duplicity-team team mailing list archive
-
duplicity-team team
-
Mailing list archive
-
Message #04820
[Merge] lp:~mgorse/duplicity/0.8-series into lp:duplicity
Mgorse has proposed merging lp:~mgorse/duplicity/0.8-series into lp:duplicity.
Requested reviews:
duplicity-team (duplicity-team)
For more details, see:
https://code.launchpad.net/~mgorse/duplicity/0.8-series/+merge/355568
Annotate more strings in duplicity/*.py
--
Your team duplicity-team is requested to review the proposed merge of lp:~mgorse/duplicity/0.8-series into lp:duplicity.
=== modified file 'duplicity/diffdir.py'
--- duplicity/diffdir.py 2018-07-27 02:18:12 +0000
+++ duplicity/diffdir.py 2018-09-24 21:19:45 +0000
@@ -19,7 +19,7 @@
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-"""
+u"""
Functions for producing signatures and deltas of directories
Note that the main processes of this module have two parts. In the
@@ -49,14 +49,14 @@
def DirSig(path_iter):
- """
+ u"""
Alias for SigTarBlockIter below
"""
return SigTarBlockIter(path_iter)
def DirFull(path_iter):
- """
+ u"""
Return a tarblock full backup of items in path_iter
A full backup is just a diff starting from nothing (it may be less
@@ -64,18 +64,18 @@
will be easy to split up the tar and make the volumes the same
sizes).
"""
- return DirDelta(path_iter, cStringIO.StringIO(""))
+ return DirDelta(path_iter, cStringIO.StringIO(u""))
def DirFull_WriteSig(path_iter, sig_outfp):
- """
+ u"""
Return full backup like above, but also write signature to sig_outfp
"""
- return DirDelta_WriteSig(path_iter, cStringIO.StringIO(""), sig_outfp)
+ return DirDelta_WriteSig(path_iter, cStringIO.StringIO(u""), sig_outfp)
def DirDelta(path_iter, dirsig_fileobj_list):
- """
+ u"""
Produce tarblock diff given dirsig_fileobj_list and pathiter
dirsig_fileobj_list should either be a tar fileobj or a list of
@@ -96,7 +96,7 @@
def delta_iter_error_handler(exc, new_path, sig_path, sig_tar=None):
- """
+ u"""
Called by get_delta_iter, report error in getting delta
"""
if new_path:
@@ -104,13 +104,13 @@
elif sig_path:
index_string = sig_path.get_relative_path()
else:
- assert 0, "Both new and sig are None for some reason"
- log.Warn(_("Error %s getting delta for %s") % (str(exc), util.fsdecode(index_string)))
+ assert 0, u"Both new and sig are None for some reason"
+ log.Warn(_(u"Error %s getting delta for %s") % (str(exc), util.fsdecode(index_string)))
return None
def get_delta_path(new_path, sig_path, sigTarFile=None):
- """
+ u"""
Return new delta_path which, when read, writes sig to sig_fileobj,
if sigTarFile is not None
"""
@@ -119,35 +119,35 @@
ti = new_path.get_tarinfo()
index = new_path.index
delta_path = new_path.get_ropath()
- log.Debug(_("Getting delta of %s and %s") % (new_path, sig_path))
+ log.Debug(_(u"Getting delta of %s and %s") % (new_path, sig_path))
def callback(sig_string):
- """
+ u"""
Callback activated when FileWithSignature read to end
"""
ti.size = len(sig_string)
- ti.name = "signature/" + "/".join(index)
+ ti.name = b"signature/" + b"/".join(index)
sigTarFile.addfile(ti, cStringIO.StringIO(sig_string))
- if new_path.isreg() and sig_path and sig_path.isreg() and sig_path.difftype == "signature":
- delta_path.difftype = "diff"
- old_sigfp = sig_path.open("rb")
- newfp = FileWithReadCounter(new_path.open("rb"))
+ if new_path.isreg() and sig_path and sig_path.isreg() and sig_path.difftype == u"signature":
+ delta_path.difftype = u"diff"
+ old_sigfp = sig_path.open(u"rb")
+ newfp = FileWithReadCounter(new_path.open(u"rb"))
if sigTarFile:
newfp = FileWithSignature(newfp, callback,
new_path.getsize())
delta_path.setfileobj(librsync.DeltaFile(old_sigfp, newfp))
else:
- delta_path.difftype = "snapshot"
+ delta_path.difftype = u"snapshot"
if sigTarFile:
- ti.name = "snapshot/" + "/".join(index)
+ ti.name = b"snapshot/" + b"/".join(index)
if not new_path.isreg():
if sigTarFile:
sigTarFile.addfile(ti)
if stats:
stats.SourceFileSize += delta_path.getsize()
else:
- newfp = FileWithReadCounter(new_path.open("rb"))
+ newfp = FileWithReadCounter(new_path.open(u"rb"))
if sigTarFile:
newfp = FileWithSignature(newfp, callback,
new_path.getsize())
@@ -158,27 +158,27 @@
def log_delta_path(delta_path, new_path=None, stats=None):
- """
+ u"""
Look at delta path and log delta. Add stats if new_path is set
"""
- if delta_path.difftype == "snapshot":
+ if delta_path.difftype == u"snapshot":
if new_path and stats:
stats.add_new_file(new_path)
- log.Info(_("A %s") %
+ log.Info(_(u"A %s") %
(util.fsdecode(delta_path.get_relative_path())),
log.InfoCode.diff_file_new,
util.escape(delta_path.get_relative_path()))
else:
if new_path and stats:
stats.add_changed_file(new_path)
- log.Info(_("M %s") %
+ log.Info(_(u"M %s") %
(util.fsdecode(delta_path.get_relative_path())),
log.InfoCode.diff_file_changed,
util.escape(delta_path.get_relative_path()))
def get_delta_iter(new_iter, sig_iter, sig_fileobj=None):
- """
+ u"""
Generate delta iter from new Path iter and sig Path iter.
For each delta path of regular file type, path.difftype with be
@@ -189,25 +189,25 @@
"""
collated = collate2iters(new_iter, sig_iter)
if sig_fileobj:
- sigTarFile = util.make_tarfile("w", sig_fileobj)
+ sigTarFile = util.make_tarfile(u"w", sig_fileobj)
else:
sigTarFile = None
for new_path, sig_path in collated:
- log.Debug(_("Comparing %s and %s") % (new_path and util.uindex(new_path.index),
- sig_path and util.uindex(sig_path.index)))
+ log.Debug(_(u"Comparing %s and %s") % (new_path and util.uindex(new_path.index),
+ sig_path and util.uindex(sig_path.index)))
if not new_path or not new_path.type:
# File doesn't exist (but ignore attempts to delete base dir;
# old versions of duplicity could have written out the sigtar in
# such a way as to fool us; LP: #929067)
if sig_path and sig_path.exists() and sig_path.index != ():
# but signature says it did
- log.Info(_("D %s") %
+ log.Info(_(u"D %s") %
(util.fsdecode(sig_path.get_relative_path())),
log.InfoCode.diff_file_deleted,
util.escape(sig_path.get_relative_path()))
if sigTarFile:
ti = ROPath(sig_path.index).get_tarinfo()
- ti.name = "deleted/" + "/".join(sig_path.index)
+ ti.name = u"deleted/" + u"/".join(sig_path.index)
sigTarFile.addfile(ti)
stats.add_deleted_file(sig_path)
yield ROPath(sig_path.index)
@@ -231,28 +231,28 @@
def sigtar2path_iter(sigtarobj):
- """
+ u"""
Convert signature tar file object open for reading into path iter
"""
- tf = util.make_tarfile("r", sigtarobj)
+ tf = util.make_tarfile(u"r", sigtarobj)
tf.debug = 1
for tarinfo in tf:
tiname = util.get_tarinfo_name(tarinfo)
- for prefix in ["signature/", "snapshot/", "deleted/"]:
+ for prefix in [b"signature/", b"snapshot/", b"deleted/"]:
if tiname.startswith(prefix):
# strip prefix and '/' from name and set it to difftype
name, difftype = tiname[len(prefix):], prefix[:-1]
break
else:
- raise DiffDirException("Bad tarinfo name %s" % (tiname,))
+ raise DiffDirException(u"Bad tarinfo name %s" % (tiname,))
- index = tuple(name.split("/"))
+ index = tuple(name.split(u"/"))
if not index[-1]:
index = index[:-1] # deal with trailing /, ""
ropath = ROPath(index)
ropath.difftype = difftype
- if difftype == "signature" or difftype == "snapshot":
+ if difftype == u"signature" or difftype == u"snapshot":
ropath.init_from_tarinfo(tarinfo)
if ropath.isreg():
ropath.setfileobj(tf.extractfile(tarinfo))
@@ -261,7 +261,7 @@
def collate2iters(riter1, riter2):
- """
+ u"""
Collate two iterators.
The elements yielded by each iterator must be have an index
@@ -305,7 +305,7 @@
def combine_path_iters(path_iter_list):
- """
+ u"""
Produce new iterator by combining the iterators in path_iter_list
This new iter will iterate every path that is in path_iter_list in
@@ -320,7 +320,7 @@
path_iter_list.reverse()
def get_triple(iter_index):
- """
+ u"""
Represent the next element as a triple, to help sorting
"""
try:
@@ -330,7 +330,7 @@
return (path.index, iter_index, path)
def refresh_triple_list(triple_list):
- """
+ u"""
Update all elements with path_index same as first element
"""
path_index = triple_list[0][0]
@@ -355,7 +355,7 @@
def DirDelta_WriteSig(path_iter, sig_infp_list, newsig_outfp):
- """
+ u"""
Like DirDelta but also write signature into sig_fileobj
Like DirDelta, sig_infp_list can be a tar fileobj or a sorted list
@@ -376,26 +376,26 @@
def get_combined_path_iter(sig_infp_list):
- """
+ u"""
Return path iter combining signatures in list of open sig files
"""
return combine_path_iters([sigtar2path_iter(x) for x in sig_infp_list])
class FileWithReadCounter:
- """
+ u"""
File-like object which also computes amount read as it is read
"""
def __init__(self, infile):
- """FileWithReadCounter initializer"""
+ u"""FileWithReadCounter initializer"""
self.infile = infile
def read(self, length=-1):
try:
buf = self.infile.read(length)
except IOError as ex:
- buf = ""
- log.Warn(_("Error %s getting delta for %s") % (str(ex), self.infile.uc_name))
+ buf = u""
+ log.Warn(_(u"Error %s getting delta for %s") % (str(ex), self.infile.uc_name))
if stats:
stats.SourceFileSize += len(buf)
return buf
@@ -405,13 +405,13 @@
class FileWithSignature:
- """
+ u"""
File-like object which also computes signature as it is read
"""
blocksize = 32 * 1024
def __init__(self, infile, callback, filelen, *extra_args):
- """
+ u"""
FileTee initializer
The object will act like infile, but whenever it is read it
@@ -442,11 +442,11 @@
class TarBlock:
- """
+ u"""
Contain information to add next file to tar
"""
def __init__(self, index, data):
- """
+ u"""
TarBlock initializer - just store data
"""
self.index = index
@@ -454,7 +454,7 @@
class TarBlockIter:
- """
+ u"""
A bit like an iterator, yield tar blocks given input iterator
Unlike an iterator, however, control over the maximum size of a
@@ -462,7 +462,7 @@
get_footer() is available.
"""
def __init__(self, input_iter):
- """
+ u"""
TarBlockIter initializer
"""
self.input_iter = input_iter
@@ -476,28 +476,28 @@
self.remember_block = None # holds block of next block
self.queued_data = None # data to return in next next() call
- def tarinfo2tarblock(self, index, tarinfo, file_data=""):
- """
+ def tarinfo2tarblock(self, index, tarinfo, file_data=b""):
+ u"""
Make tarblock out of tarinfo and file data
"""
tarinfo.size = len(file_data)
- headers = tarinfo.tobuf(errors='replace')
+ headers = tarinfo.tobuf(errors=u'replace')
blocks, remainder = divmod(tarinfo.size, tarfile.BLOCKSIZE) # @UnusedVariable
if remainder > 0:
- filler_data = "\0" * (tarfile.BLOCKSIZE - remainder)
+ filler_data = b"\0" * (tarfile.BLOCKSIZE - remainder)
else:
- filler_data = ""
- return TarBlock(index, "%s%s%s" % (headers, file_data, filler_data))
+ filler_data = b""
+ return TarBlock(index, b"%s%s%s" % (headers, file_data, filler_data))
def process(self, val):
- """
+ u"""
Turn next value of input_iter into a TarBlock
"""
assert not self.process_waiting
XXX # Override in subclass @UndefinedVariable
def process_continued(self):
- """
+ u"""
Get more tarblocks
If processing val above would produce more than one TarBlock,
@@ -507,7 +507,7 @@
XXX # Override in subclass @UndefinedVariable
def next(self):
- """
+ u"""
Return next block and update offset
"""
if self.queued_data is not None:
@@ -539,19 +539,19 @@
return 64 * 1024
def get_previous_index(self):
- """
+ u"""
Return index of last tarblock, or None if no previous index
"""
return self.previous_index, self.previous_block
def queue_index_data(self, data):
- """
+ u"""
Next time next() is called, we will return data instead of processing
"""
self.queued_data = data
def remember_next_index(self):
- """
+ u"""
When called, remember the index of the next block iterated
"""
self.remember_next = True
@@ -559,29 +559,29 @@
self.remember_block = None
def recall_index(self):
- """
+ u"""
Retrieve index remembered with remember_next_index
"""
return self.remember_value, self.remember_block
def get_footer(self):
- """
+ u"""
Return closing string for tarfile, reset offset
"""
blocks, remainder = divmod(self.offset, tarfile.RECORDSIZE) # @UnusedVariable
self.offset = 0
- return '\0' * (tarfile.RECORDSIZE - remainder) # remainder can be 0
+ return u'\0' * (tarfile.RECORDSIZE - remainder) # remainder can be 0
def __iter__(self):
return self
class DummyBlockIter(TarBlockIter):
- """
+ u"""
TarBlockIter that does no file reading
"""
def process(self, delta_ropath):
- """
+ u"""
Get a fake tarblock from delta_ropath
"""
ti = delta_ropath.get_tarinfo()
@@ -601,28 +601,28 @@
class SigTarBlockIter(TarBlockIter):
- """
+ u"""
TarBlockIter that yields blocks of a signature tar from path_iter
"""
def process(self, path):
- """
+ u"""
Return associated signature TarBlock from path
"""
ti = path.get_tarinfo()
if path.isreg():
- sfp = librsync.SigFile(path.open("rb"),
+ sfp = librsync.SigFile(path.open(u"rb"),
get_block_size(path.getsize()))
sigbuf = sfp.read()
sfp.close()
- ti.name = "signature/" + "/".join(path.index)
+ ti.name = u"signature/" + u"/".join(path.index)
return self.tarinfo2tarblock(path.index, ti, sigbuf)
else:
- ti.name = "snapshot/" + "/".join(path.index)
+ ti.name = u"snapshot/" + u"/".join(path.index)
return self.tarinfo2tarblock(path.index, ti)
class DeltaTarBlockIter(TarBlockIter):
- """
+ u"""
TarBlockIter that yields parts of a deltatar file
Unlike SigTarBlockIter, the argument to __init__ is a
@@ -630,15 +630,15 @@
calculated.
"""
def process(self, delta_ropath):
- """
+ u"""
Get a tarblock from delta_ropath
"""
def add_prefix(tarinfo, prefix):
- """Add prefix to the name of a tarinfo file"""
- if tarinfo.name == ".":
- tarinfo.name = prefix + "/"
+ u"""Add prefix to the name of a tarinfo file"""
+ if tarinfo.name == b".":
+ tarinfo.name = prefix + b"/"
else:
- tarinfo.name = "%s/%s" % (prefix, tarinfo.name)
+ tarinfo.name = b"%s/%s" % (prefix, tarinfo.name)
ti = delta_ropath.get_tarinfo()
index = delta_ropath.index
@@ -646,29 +646,29 @@
# Return blocks of deleted files or fileless snapshots
if not delta_ropath.type or not delta_ropath.fileobj:
if not delta_ropath.type:
- add_prefix(ti, "deleted")
+ add_prefix(ti, u"deleted")
else:
- assert delta_ropath.difftype == "snapshot"
- add_prefix(ti, "snapshot")
+ assert delta_ropath.difftype == u"snapshot"
+ add_prefix(ti, b"snapshot")
return self.tarinfo2tarblock(index, ti)
# Now handle single volume block case
- fp = delta_ropath.open("rb")
+ fp = delta_ropath.open(u"rb")
data, last_block = self.get_data_block(fp)
if stats:
stats.RawDeltaSize += len(data)
if last_block:
- if delta_ropath.difftype == "snapshot":
- add_prefix(ti, "snapshot")
- elif delta_ropath.difftype == "diff":
- add_prefix(ti, "diff")
+ if delta_ropath.difftype == u"snapshot":
+ add_prefix(ti, b"snapshot")
+ elif delta_ropath.difftype == u"diff":
+ add_prefix(ti, b"diff")
else:
- assert 0, "Unknown difftype"
+ assert 0, u"Unknown difftype"
return self.tarinfo2tarblock(index, ti, data)
# Finally, do multivol snapshot or diff case
- full_name = "multivol_%s/%s" % (delta_ropath.difftype, ti.name)
- ti.name = full_name + "/1"
+ full_name = u"multivol_%s/%s" % (delta_ropath.difftype, ti.name)
+ ti.name = full_name + u"/1"
self.process_prefix = full_name
self.process_fp = fp
self.process_ropath = delta_ropath
@@ -677,26 +677,26 @@
return self.tarinfo2tarblock(index, ti, data)
def get_data_block(self, fp):
- """
+ u"""
Return pair (next data block, boolean last data block)
"""
read_size = self.get_read_size()
buf = fp.read(read_size)
if len(buf) < read_size:
if fp.close():
- raise DiffDirException("Error closing file")
+ raise DiffDirException(u"Error closing file")
return (buf, True)
else:
return (buf, False)
def process_continued(self):
- """
+ u"""
Return next volume in multivol diff or snapshot
"""
assert self.process_waiting
ropath = self.process_ropath
ti, index = ropath.get_tarinfo(), ropath.index
- ti.name = "%s/%d" % (self.process_prefix, self.process_next_vol_number)
+ ti.name = u"%s/%d" % (self.process_prefix, self.process_next_vol_number)
data, last_block = self.get_data_block(self.process_fp)
if stats:
stats.RawDeltaSize += len(data)
@@ -712,13 +712,13 @@
def write_block_iter(block_iter, out_obj):
- """
+ u"""
Write block_iter to filename, path, or file object
"""
if isinstance(out_obj, Path):
- fp = open(out_obj.name, "wb")
+ fp = open(out_obj.name, u"wb")
elif isinstance(out_obj, types.StringTypes):
- fp = open(out_obj, "wb")
+ fp = open(out_obj, u"wb")
else:
fp = out_obj
for block in block_iter:
@@ -730,7 +730,7 @@
def get_block_size(file_len):
- """
+ u"""
Return a reasonable block size to use on files of length file_len
If the block size is too big, deltas will be bigger than is
=== modified file 'duplicity/manifest.py'
--- duplicity/manifest.py 2018-09-06 11:14:11 +0000
+++ duplicity/manifest.py 2018-09-24 21:19:45 +0000
@@ -19,7 +19,7 @@
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-"""Create and edit manifest for session contents"""
+u"""Create and edit manifest for session contents"""
from future_builtins import filter
@@ -32,18 +32,18 @@
class ManifestError(Exception):
- """
+ u"""
Exception raised when problem with manifest
"""
pass
class Manifest:
- """
+ u"""
List of volumes and information about each one
"""
def __init__(self, fh=None):
- """
+ u"""
Create blank Manifest
@param fh: fileobj for manifest
@@ -59,7 +59,7 @@
self.files_changed = []
def set_dirinfo(self):
- """
+ u"""
Set information about directory from globals,
and write to manifest file.
@@ -70,13 +70,13 @@
self.local_dirname = globals.local_path.name # @UndefinedVariable
if self.fh:
if self.hostname:
- self.fh.write("Hostname %s\n" % self.hostname)
+ self.fh.write(u"Hostname %s\n" % self.hostname)
if self.local_dirname:
- self.fh.write("Localdir %s\n" % Quote(self.local_dirname))
+ self.fh.write(u"Localdir %s\n" % Quote(self.local_dirname))
return self
def check_dirinfo(self):
- """
+ u"""
Return None if dirinfo is the same, otherwise error message
Does not raise an error message if hostname or local_dirname
@@ -89,41 +89,41 @@
return
if self.hostname and self.hostname != globals.hostname:
- errmsg = _("Fatal Error: Backup source host has changed.\n"
- "Current hostname: %s\n"
- "Previous hostname: %s") % (globals.hostname, self.hostname)
+ errmsg = _(u"Fatal Error: Backup source host has changed.\n"
+ u"Current hostname: %s\n"
+ u"Previous hostname: %s") % (globals.hostname, self.hostname)
code = log.ErrorCode.hostname_mismatch
- code_extra = "%s %s" % (util.escape(globals.hostname), util.escape(self.hostname))
+ code_extra = u"%s %s" % (util.escape(globals.hostname), util.escape(self.hostname))
elif (self.local_dirname and self.local_dirname != globals.local_path.name): # @UndefinedVariable
- errmsg = _("Fatal Error: Backup source directory has changed.\n"
- "Current directory: %s\n"
- "Previous directory: %s") % (globals.local_path.name, self.local_dirname) # @UndefinedVariable
+ errmsg = _(u"Fatal Error: Backup source directory has changed.\n"
+ u"Current directory: %s\n"
+ u"Previous directory: %s") % (globals.local_path.name, self.local_dirname) # @UndefinedVariable
code = log.ErrorCode.source_dir_mismatch
- code_extra = "%s %s" % (util.escape(globals.local_path.name),
- util.escape(self.local_dirname)) # @UndefinedVariable
+ code_extra = u"%s %s" % (util.escape(globals.local_path.name),
+ util.escape(self.local_dirname)) # @UndefinedVariable
else:
return
- log.FatalError(errmsg + "\n\n" +
- _("Aborting because you may have accidentally tried to "
- "backup two different data sets to the same remote "
- "location, or using the same archive directory. If "
- "this is not a mistake, use the "
- "--allow-source-mismatch switch to avoid seeing this "
- "message"), code, code_extra)
+ log.FatalError(errmsg + u"\n\n" +
+ _(u"Aborting because you may have accidentally tried to "
+ u"backup two different data sets to the same remote "
+ u"location, or using the same archive directory. If "
+ u"this is not a mistake, use the "
+ u"--allow-source-mismatch switch to avoid seeing this "
+ u"message"), code, code_extra)
def set_files_changed_info(self, files_changed):
if files_changed:
self.files_changed = files_changed
if self.fh:
- self.fh.write("Filelist %d\n" % len(self.files_changed))
+ self.fh.write(u"Filelist %d\n" % len(self.files_changed))
for fileinfo in self.files_changed:
- self.fh.write(" %-7s %s\n" % (fileinfo[1], Quote(fileinfo[0])))
+ self.fh.write(b" %-7s %s\n" % (fileinfo[1], Quote(fileinfo[0])))
def add_volume_info(self, vi):
- """
+ u"""
Add volume info vi to manifest and write to manifest
@param vi: volume info to add
@@ -134,10 +134,10 @@
vol_num = vi.volume_number
self.volume_info_dict[vol_num] = vi
if self.fh:
- self.fh.write(vi.to_string() + "\n")
+ self.fh.write(vi.to_string() + b"\n")
def del_volume_info(self, vol_num):
- """
+ u"""
Remove volume vol_num from the manifest
@param vol_num: volume number to delete
@@ -148,87 +148,87 @@
try:
del self.volume_info_dict[vol_num]
except Exception:
- raise ManifestError("Volume %d not present in manifest" % (vol_num,))
+ raise ManifestError(u"Volume %d not present in manifest" % (vol_num,))
def to_string(self):
- """
+ u"""
Return string version of self (just concatenate vi strings)
@rtype: string
@return: self in string form
"""
- result = ""
+ result = u""
if self.hostname:
- result += "Hostname %s\n" % self.hostname
+ result += b"Hostname %s\n" % self.hostname
if self.local_dirname:
- result += "Localdir %s\n" % Quote(self.local_dirname)
+ result += b"Localdir %s\n" % Quote(self.local_dirname)
- result += "Filelist %d\n" % len(self.files_changed)
+ result += b"Filelist %d\n" % len(self.files_changed)
for fileinfo in self.files_changed:
- result += " %-7s %s\n" % (fileinfo[1], Quote(fileinfo[0]))
+ result += b" %-7s %s\n" % (fileinfo[1], Quote(fileinfo[0]))
vol_num_list = self.volume_info_dict.keys()
vol_num_list.sort()
def vol_num_to_string(vol_num):
return self.volume_info_dict[vol_num].to_string()
- result = "%s%s\n" % (result,
- "\n".join(map(vol_num_to_string, vol_num_list)))
+ result = b"%s%s\n" % (result,
+ b"\n".join(map(vol_num_to_string, vol_num_list)))
return result
__str__ = to_string
def from_string(self, s):
- """
+ u"""
Initialize self from string s, return self
"""
def get_field(fieldname):
- """
+ u"""
Return the value of a field by parsing s, or None if no field
"""
- m = re.search("(^|\\n)%s\\s(.*?)\n" % fieldname, s, re.I)
+ m = re.search(u"(^|\\n)%s\\s(.*?)\n" % fieldname, s, re.I)
if not m:
return None
else:
return Unquote(m.group(2))
- self.hostname = get_field("hostname")
- self.local_dirname = get_field("localdir")
+ self.hostname = get_field(u"hostname")
+ self.local_dirname = get_field(u"localdir")
highest_vol = 0
latest_vol = 0
- vi_regexp = re.compile("(?:^|\\n)(volume\\s.*(?:\\n.*)*?)(?=\\nvolume\\s|$)", re.I)
+ vi_regexp = re.compile(u"(?:^|\\n)(volume\\s.*(?:\\n.*)*?)(?=\\nvolume\\s|$)", re.I)
vi_iterator = vi_regexp.finditer(s)
for match in vi_iterator:
vi = VolumeInfo().from_string(match.group(1))
self.add_volume_info(vi)
latest_vol = vi.volume_number
highest_vol = max(highest_vol, latest_vol)
- log.Debug(_("Found manifest volume %s") % latest_vol)
+ log.Debug(_(u"Found manifest volume %s") % latest_vol)
# If we restarted after losing some remote volumes, the highest volume
# seen may be higher than the last volume recorded. That is, the
# manifest could contain "vol1, vol2, vol3, vol2." If so, we don't
# want to keep vol3's info.
for i in range(latest_vol + 1, highest_vol + 1):
self.del_volume_info(i)
- log.Info(_("Found %s volumes in manifest") % latest_vol)
+ log.Info(_(u"Found %s volumes in manifest") % latest_vol)
# Get file changed list - not needed if --file-changed not present
filecount = 0
if globals.file_changed is not None:
- filelist_regexp = re.compile("(^|\\n)filelist\\s([0-9]+)\\n(.*?)(\\nvolume\\s|$)", re.I | re.S)
+ filelist_regexp = re.compile(u"(^|\\n)filelist\\s([0-9]+)\\n(.*?)(\\nvolume\\s|$)", re.I | re.S)
match = filelist_regexp.search(s)
if match:
filecount = int(match.group(2))
if filecount > 0:
def parse_fileinfo(line):
fileinfo = line.strip().split()
- return (fileinfo[0], ''.join(fileinfo[1:]))
+ return (fileinfo[0], u''.join(fileinfo[1:]))
- self.files_changed = list(map(parse_fileinfo, match.group(3).split('\n')))
+ self.files_changed = list(map(parse_fileinfo, match.group(3).split(u'\n')))
if filecount != len(self.files_changed):
- log.Error(_("Manifest file '%s' is corrupt: File count says %d, File list contains %d" %
- (self.fh.base if self.fh else "", filecount, len(self.files_changed))))
+ log.Error(_(u"Manifest file '%s' is corrupt: File count says %d, File list contains %d" %
+ (self.fh.base if self.fh else u"", filecount, len(self.files_changed))))
self.corrupt_filelist = True
return self
@@ -237,7 +237,7 @@
return self.files_changed
def __eq__(self, other):
- """
+ u"""
Two manifests are equal if they contain the same volume infos
"""
vi_list1 = self.volume_info_dict.keys()
@@ -246,39 +246,39 @@
vi_list2.sort()
if vi_list1 != vi_list2:
- log.Notice(_("Manifests not equal because different volume numbers"))
+ log.Notice(_(u"Manifests not equal because different volume numbers"))
return False
for i in range(len(vi_list1)):
if not vi_list1[i] == vi_list2[i]:
- log.Notice(_("Manifests not equal because volume lists differ"))
+ log.Notice(_(u"Manifests not equal because volume lists differ"))
return False
if (self.hostname != other.hostname or
self.local_dirname != other.local_dirname):
- log.Notice(_("Manifests not equal because hosts or directories differ"))
+ log.Notice(_(u"Manifests not equal because hosts or directories differ"))
return False
return True
def __ne__(self, other):
- """
+ u"""
Defines !=. Not doing this always leads to annoying bugs...
"""
return not self.__eq__(other)
def write_to_path(self, path):
- """
+ u"""
Write string version of manifest to given path
"""
assert not path.exists()
- fout = path.open("wb")
+ fout = path.open(u"wb")
fout.write(self.to_string())
assert not fout.close()
path.setdata()
def get_containing_volumes(self, index_prefix):
- """
+ u"""
Return list of volume numbers that may contain index_prefix
"""
return filter(lambda vol_num:
@@ -287,18 +287,18 @@
class VolumeInfoError(Exception):
- """
+ u"""
Raised when there is a problem initializing a VolumeInfo from string
"""
pass
class VolumeInfo:
- """
+ u"""
Information about a single volume
"""
def __init__(self):
- """VolumeInfo initializer"""
+ u"""VolumeInfo initializer"""
self.volume_number = None
self.start_index = None
self.start_block = None
@@ -309,7 +309,7 @@
def set_info(self, vol_number,
start_index, start_block,
end_index, end_block):
- """
+ u"""
Set essential VolumeInfo information, return self
Call with starting and ending paths stored in the volume. If
@@ -325,13 +325,13 @@
return self
def set_hash(self, hash_name, data):
- """
+ u"""
Set the value of hash hash_name (e.g. "MD5") to data
"""
self.hashes[hash_name] = data
def get_best_hash(self):
- """
+ u"""
Return pair (hash_type, hash_data)
SHA1 is the best hash, and MD5 is the second best hash. None
@@ -340,59 +340,59 @@
if not self.hashes:
return None
try:
- return ("SHA1", self.hashes['SHA1'])
+ return (u"SHA1", self.hashes[u'SHA1'])
except KeyError:
pass
try:
- return ("MD5", self.hashes['MD5'])
+ return (u"MD5", self.hashes[u'MD5'])
except KeyError:
pass
return self.hashes.items()[0]
def to_string(self):
- """
+ u"""
Return nicely formatted string reporting all information
"""
def index_to_string(index):
- """Return printable version of index without any whitespace"""
+ u"""Return printable version of index without any whitespace"""
if index:
- s = "/".join(index)
+ s = b"/".join(index)
return Quote(s)
else:
- return "."
+ return b"."
- slist = ["Volume %d:" % self.volume_number]
- whitespace = " "
- slist.append("%sStartingPath %s %s" %
- (whitespace, index_to_string(self.start_index), (self.start_block or " ")))
- slist.append("%sEndingPath %s %s" %
- (whitespace, index_to_string(self.end_index), (self.end_block or " ")))
+ slist = [b"Volume %d:" % self.volume_number]
+ whitespace = b" "
+ slist.append(b"%sStartingPath %s %s" %
+ (whitespace, index_to_string(self.start_index), (self.start_block or b" ")))
+ slist.append(b"%sEndingPath %s %s" %
+ (whitespace, index_to_string(self.end_index), (self.end_block or b" ")))
for key in self.hashes:
- slist.append("%sHash %s %s" %
+ slist.append(b"%sHash %s %s" %
(whitespace, key.encode(), self.hashes[key]))
- return "\n".join(slist)
+ return b"\n".join(slist)
__str__ = to_string
def from_string(self, s):
- """
+ u"""
Initialize self from string s as created by to_string
"""
def string_to_index(s):
- """
+ u"""
Return tuple index from string
"""
s = Unquote(s)
- if s == ".":
+ if s == b".":
return ()
- return tuple(s.split("/"))
+ return tuple(s.split(b"/"))
- linelist = s.strip().split("\n")
+ linelist = s.strip().split(b"\n")
# Set volume number
- m = re.search("^Volume ([0-9]+):", linelist[0], re.I)
+ m = re.search(u"^Volume ([0-9]+):", linelist[0], re.I)
if not m:
- raise VolumeInfoError("Bad first line '%s'" % (linelist[0],))
+ raise VolumeInfoError(u"Bad first line '%s'" % (linelist[0],))
self.volume_number = int(m.group(1))
# Set other fields
@@ -402,61 +402,61 @@
line_split = line.strip().split()
field_name = line_split[0].lower()
other_fields = line_split[1:]
- if field_name == "Volume":
- log.Warn(_("Warning, found extra Volume identifier"))
+ if field_name == u"Volume":
+ log.Warn(_(u"Warning, found extra Volume identifier"))
break
- elif field_name == "startingpath":
+ elif field_name == u"startingpath":
self.start_index = string_to_index(other_fields[0])
if len(other_fields) > 1:
self.start_block = int(other_fields[1])
else:
self.start_block = None
- elif field_name == "endingpath":
+ elif field_name == u"endingpath":
self.end_index = string_to_index(other_fields[0])
if len(other_fields) > 1:
self.end_block = int(other_fields[1])
else:
self.end_block = None
- elif field_name == "hash":
+ elif field_name == u"hash":
self.set_hash(other_fields[0], other_fields[1])
if self.start_index is None or self.end_index is None:
- raise VolumeInfoError("Start or end index not set")
+ raise VolumeInfoError(u"Start or end index not set")
return self
def __eq__(self, other):
- """
+ u"""
Used in test suite
"""
if not isinstance(other, VolumeInfo):
- log.Notice(_("Other is not VolumeInfo"))
+ log.Notice(_(u"Other is not VolumeInfo"))
return None
if self.volume_number != other.volume_number:
- log.Notice(_("Volume numbers don't match"))
+ log.Notice(_(u"Volume numbers don't match"))
return None
if self.start_index != other.start_index:
- log.Notice(_("start_indicies don't match"))
+ log.Notice(_(u"start_indicies don't match"))
return None
if self.end_index != other.end_index:
- log.Notice(_("end_index don't match"))
+ log.Notice(_(u"end_index don't match"))
return None
hash_list1 = self.hashes.items()
hash_list1.sort()
hash_list2 = other.hashes.items()
hash_list2.sort()
if hash_list1 != hash_list2:
- log.Notice(_("Hashes don't match"))
+ log.Notice(_(u"Hashes don't match"))
return None
return 1
def __ne__(self, other):
- """
+ u"""
Defines !=
"""
return not self.__eq__(other)
def contains(self, index_prefix, recursive=1):
- """
+ u"""
Return true if volume might contain index
If recursive is true, then return true if any index starting
@@ -471,11 +471,11 @@
return self.start_index <= index_prefix <= self.end_index
-nonnormal_char_re = re.compile("(\\s|[\\\\\"'])")
+nonnormal_char_re = re.compile(u"(\\s|[\\\\\"'])")
def Quote(s):
- """
+ u"""
Return quoted version of s safe to put in a manifest or volume info
"""
if not nonnormal_char_re.search(s):
@@ -483,29 +483,29 @@
slist = []
for char in s:
if nonnormal_char_re.search(char):
- slist.append("\\x%02x" % ord(char))
+ slist.append(b"\\x%02x" % ord(char))
else:
slist.append(char)
- return '"%s"' % "".join(slist)
+ return b'"%s"' % u"".join(slist)
def Unquote(quoted_string):
- """
+ u"""
Return original string from quoted_string produced by above
"""
- if not quoted_string[0] == '"' or quoted_string[0] == "'":
+ if not quoted_string[0] == b'"' or quoted_string[0] == b"'":
return quoted_string
assert quoted_string[0] == quoted_string[-1]
return_list = []
i = 1 # skip initial char
while i < len(quoted_string) - 1:
char = quoted_string[i]
- if char == "\\":
+ if char == b"\\":
# quoted section
- assert quoted_string[i + 1] == "x"
+ assert quoted_string[i + 1] == b"x"
return_list.append(chr(int(quoted_string[i + 2:i + 4], 16)))
i += 4
else:
return_list.append(char)
i += 1
- return "".join(return_list)
+ return b"".join(return_list)
=== modified file 'duplicity/patchdir.py'
--- duplicity/patchdir.py 2018-07-24 11:52:33 +0000
+++ duplicity/patchdir.py 2018-09-24 21:19:45 +0000
@@ -37,7 +37,7 @@
from duplicity.path import * # @UnusedWildImport
from duplicity.lazy import * # @UnusedWildImport
-"""Functions for patching of directories"""
+u"""Functions for patching of directories"""
class PatchDirException(Exception):
@@ -45,20 +45,20 @@
def Patch(base_path, difftar_fileobj):
- """Patch given base_path and file object containing delta"""
- diff_tarfile = tarfile.TarFile("arbitrary", "r", difftar_fileobj)
+ u"""Patch given base_path and file object containing delta"""
+ diff_tarfile = tarfile.TarFile(u"arbitrary", u"r", difftar_fileobj)
patch_diff_tarfile(base_path, diff_tarfile)
assert not difftar_fileobj.close()
def Patch_from_iter(base_path, fileobj_iter, restrict_index=()):
- """Patch given base_path and iterator of delta file objects"""
+ u"""Patch given base_path and iterator of delta file objects"""
diff_tarfile = TarFile_FromFileobjs(fileobj_iter)
patch_diff_tarfile(base_path, diff_tarfile, restrict_index)
def patch_diff_tarfile(base_path, diff_tarfile, restrict_index=()):
- """Patch given Path object using delta tarfile (as in tarfile.TarFile)
+ u"""Patch given Path object using delta tarfile (as in tarfile.TarFile)
If restrict_index is set, ignore any deltas in diff_tarfile that
don't start with restrict_index.
@@ -77,12 +77,12 @@
ITR = IterTreeReducer(PathPatcher, [base_path])
for basis_path, diff_ropath in collated:
if basis_path:
- log.Info(_("Patching %s") % (util.fsdecode(basis_path.get_relative_path())),
+ log.Info(_(u"Patching %s") % (util.fsdecode(basis_path.get_relative_path())),
log.InfoCode.patch_file_patching,
util.escape(basis_path.get_relative_path()))
ITR(basis_path.index, basis_path, diff_ropath)
else:
- log.Info(_("Patching %s") % (util.fsdecode(diff_ropath.get_relative_path())),
+ log.Info(_(u"Patching %s") % (util.fsdecode(diff_ropath.get_relative_path())),
log.InfoCode.patch_file_patching,
util.escape(diff_ropath.get_relative_path()))
ITR(diff_ropath.index, basis_path, diff_ropath)
@@ -96,7 +96,7 @@
def filter_path_iter(path_iter, index):
- """Rewrite path elements of path_iter so they start with index
+ u"""Rewrite path elements of path_iter so they start with index
Discard any that doesn't start with index, and remove the index
prefix from the rest.
@@ -111,7 +111,7 @@
def difftar2path_iter(diff_tarfile):
- """Turn file-like difftarobj into iterator of ROPaths"""
+ u"""Turn file-like difftarobj into iterator of ROPaths"""
tar_iter = iter(diff_tarfile)
multivol_fileobj = None
@@ -132,7 +132,7 @@
ropath = ROPath(index)
ropath.init_from_tarinfo(tarinfo_list[0])
ropath.difftype = difftype
- if difftype == "deleted":
+ if difftype == u"deleted":
ropath.type = None
elif ropath.isreg():
if multivol:
@@ -148,61 +148,61 @@
def get_index_from_tarinfo(tarinfo):
- """Return (index, difftype, multivol) pair from tarinfo object"""
- for prefix in ["snapshot/", "diff/", "deleted/",
- "multivol_diff/", "multivol_snapshot/"]:
+ u"""Return (index, difftype, multivol) pair from tarinfo object"""
+ for prefix in [b"snapshot/", b"diff/", b"deleted/",
+ b"multivol_diff/", b"multivol_snapshot/"]:
tiname = util.get_tarinfo_name(tarinfo)
if tiname.startswith(prefix):
name = tiname[len(prefix):] # strip prefix
- if prefix.startswith("multivol"):
- if prefix == "multivol_diff/":
- difftype = "diff"
+ if prefix.startswith(u"multivol"):
+ if prefix == u"multivol_diff/":
+ difftype = u"diff"
else:
- difftype = "snapshot"
+ difftype = u"snapshot"
multivol = 1
name, num_subs = \
- re.subn("(?s)^multivol_(diff|snapshot)/?(.*)/[0-9]+$",
- "\\2", tiname)
+ re.subn(b"(?s)^multivol_(diff|snapshot)/?(.*)/[0-9]+$",
+ b"\\2", tiname)
if num_subs != 1:
raise PatchDirException(u"Unrecognized diff entry %s" %
util.fsdecode(tiname))
else:
difftype = prefix[:-1] # strip trailing /
name = tiname[len(prefix):]
- if name.endswith("/"):
+ if name.endswith(b"/"):
name = name[:-1] # strip trailing /'s
multivol = 0
break
else:
raise PatchDirException(u"Unrecognized diff entry %s" %
util.fsdecode(tiname))
- if name == "." or name == "":
+ if name == b"." or name == b"":
index = ()
else:
- index = tuple(name.split("/"))
- if '..' in index:
+ index = tuple(name.split(b"/"))
+ if b'..' in index:
raise PatchDirException(u"Tar entry %s contains '..'. Security "
- "violation" % util.fsdecode(tiname))
+ u"violation" % util.fsdecode(tiname))
return (index, difftype, multivol)
class Multivol_Filelike:
- """Emulate a file like object from multivols
+ u"""Emulate a file like object from multivols
Maintains a buffer about the size of a volume. When it is read()
to the end, pull in more volumes as desired.
"""
def __init__(self, tf, tar_iter, tarinfo_list, index):
- """Initializer. tf is TarFile obj, tarinfo is first tarinfo"""
+ u"""Initializer. tf is TarFile obj, tarinfo is first tarinfo"""
self.tf, self.tar_iter = tf, tar_iter
self.tarinfo_list = tarinfo_list # must store as list for write access
self.index = index
- self.buffer = ""
+ self.buffer = b""
self.at_end = 0
def read(self, length=-1):
- """Read length bytes from file"""
+ u"""Read length bytes from file"""
if length < 0:
while self.addtobuffer():
pass
@@ -218,7 +218,7 @@
return result
def addtobuffer(self):
- """Add next chunk to buffer"""
+ u"""Add next chunk to buffer"""
if self.at_end:
return None
index, difftype, multivol = get_index_from_tarinfo( # @UnusedVariable
@@ -242,24 +242,24 @@
return 1
def close(self):
- """If not at end, read remaining data"""
+ u"""If not at end, read remaining data"""
if not self.at_end:
while 1:
- self.buffer = ""
+ self.buffer = b""
if not self.addtobuffer():
break
self.at_end = 1
class PathPatcher(ITRBranch):
- """Used by DirPatch, process the given basis and diff"""
+ u"""Used by DirPatch, process the given basis and diff"""
def __init__(self, base_path):
- """Set base_path, Path of root of tree"""
+ u"""Set base_path, Path of root of tree"""
self.base_path = base_path
self.dir_diff_ropath = None
def start_process(self, index, basis_path, diff_ropath):
- """Start processing when diff_ropath is a directory"""
+ u"""Start processing when diff_ropath is a directory"""
if not (diff_ropath and diff_ropath.isdir()):
assert index == (), util.uindex(index) # should only happen for first elem
self.fast_process(index, basis_path, diff_ropath)
@@ -276,44 +276,44 @@
self.dir_diff_ropath = diff_ropath
def end_process(self):
- """Copy directory permissions when leaving tree"""
+ u"""Copy directory permissions when leaving tree"""
if self.dir_diff_ropath:
self.dir_diff_ropath.copy_attribs(self.dir_basis_path)
def can_fast_process(self, index, basis_path, diff_ropath):
- """No need to recurse if diff_ropath isn't a directory"""
+ u"""No need to recurse if diff_ropath isn't a directory"""
return not (diff_ropath and diff_ropath.isdir())
def fast_process(self, index, basis_path, diff_ropath):
- """For use when neither is a directory"""
+ u"""For use when neither is a directory"""
if not diff_ropath:
return # no change
elif not basis_path:
- if diff_ropath.difftype == "deleted":
+ if diff_ropath.difftype == u"deleted":
pass # already deleted
else:
# just copy snapshot over
diff_ropath.copy(self.base_path.new_index(index))
- elif diff_ropath.difftype == "deleted":
+ elif diff_ropath.difftype == u"deleted":
if basis_path.isdir():
basis_path.deltree()
else:
basis_path.delete()
- elif not basis_path.isreg() or (basis_path.isreg() and diff_ropath.difftype == "snapshot"):
+ elif not basis_path.isreg() or (basis_path.isreg() and diff_ropath.difftype == u"snapshot"):
if basis_path.isdir():
basis_path.deltree()
else:
basis_path.delete()
diff_ropath.copy(basis_path)
else:
- assert diff_ropath.difftype == "diff", diff_ropath.difftype
+ assert diff_ropath.difftype == u"diff", diff_ropath.difftype
basis_path.patch_with_attribs(diff_ropath)
class TarFile_FromFileobjs:
- """Like a tarfile.TarFile iterator, but read from multiple fileobjs"""
+ u"""Like a tarfile.TarFile iterator, but read from multiple fileobjs"""
def __init__(self, fileobj_iter):
- """Make new tarinfo iterator
+ u"""Make new tarinfo iterator
fileobj_iter should be an iterator of file objects opened for
reading. They will be closed at end of reading.
@@ -327,11 +327,11 @@
return self
def set_tarfile(self):
- """Set tarfile from next file object, or raise StopIteration"""
+ u"""Set tarfile from next file object, or raise StopIteration"""
if self.current_fp:
assert not self.current_fp.close()
self.current_fp = next(self.fileobj_iter)
- self.tarfile = util.make_tarfile("r", self.current_fp)
+ self.tarfile = util.make_tarfile(u"r", self.current_fp)
self.tar_iter = iter(self.tarfile)
def next(self):
@@ -345,12 +345,12 @@
return next(self.tar_iter)
def extractfile(self, tarinfo):
- """Return data associated with given tarinfo"""
+ u"""Return data associated with given tarinfo"""
return self.tarfile.extractfile(tarinfo)
def collate_iters(iter_list):
- """Collate iterators by index
+ u"""Collate iterators by index
Input is a list of n iterators each of which must iterate elements
with an index attribute. The elements must come out in increasing
@@ -371,7 +371,7 @@
elems = overflow[:]
def setrorps(overflow, elems):
- """Set the overflow and rorps list"""
+ u"""Set the overflow and rorps list"""
for i in range(iter_num):
if not overflow[i] and elems[i] is None:
try:
@@ -381,7 +381,7 @@
elems[i] = None
def getleastindex(elems):
- """Return the first index in elems, assuming elems isn't empty"""
+ u"""Return the first index in elems, assuming elems isn't empty"""
return min(map(lambda elem: elem.index, filter(lambda x: x, elems)))
def yield_tuples(iter_num, overflow, elems):
@@ -403,7 +403,7 @@
class IndexedTuple:
- """Like a tuple, but has .index (used previously by collate_iters)"""
+ u"""Like a tuple, but has .index (used previously by collate_iters)"""
def __init__(self, index, sequence):
self.index = index
self.data = tuple(sequence)
@@ -412,7 +412,7 @@
return len(self.data)
def __getitem__(self, key):
- """This only works for numerical keys (easier this way)"""
+ u"""This only works for numerical keys (easier this way)"""
return self.data[key]
def __lt__(self, other):
@@ -448,11 +448,11 @@
return None
def __str__(self):
- return "(%s).%s" % (", ".join(map(str, self.data)), self.index)
+ return u"(%s).%s" % (u", ".join(map(str, self.data)), self.index)
def normalize_ps(patch_sequence):
- """Given an sequence of ROPath deltas, remove blank and unnecessary
+ u"""Given an sequence of ROPath deltas, remove blank and unnecessary
The sequence is assumed to be in patch order (later patches apply
to earlier ones). A patch is unnecessary if a later one doesn't
@@ -467,29 +467,29 @@
if delta is not None:
# skip blank entries
result_list.insert(0, delta)
- if delta.difftype != "diff":
+ if delta.difftype != u"diff":
break
i -= 1
return result_list
def patch_seq2ropath(patch_seq):
- """Apply the patches in patch_seq, return single ropath"""
+ u"""Apply the patches in patch_seq, return single ropath"""
first = patch_seq[0]
- assert first.difftype != "diff", "First patch in sequence " \
- "%s was a diff" % patch_seq
+ assert first.difftype != u"diff", u"First patch in sequence " \
+ u"%s was a diff" % patch_seq
if not first.isreg():
# No need to bother with data if not regular file
- assert len(patch_seq) == 1, "Patch sequence isn't regular, but " \
- "has %d entries" % len(patch_seq)
+ assert len(patch_seq) == 1, u"Patch sequence isn't regular, but " \
+ u"has %d entries" % len(patch_seq)
return first.get_ropath()
- current_file = first.open("rb")
+ current_file = first.open(u"rb")
for delta_ropath in patch_seq[1:]:
- assert delta_ropath.difftype == "diff", delta_ropath.difftype
+ assert delta_ropath.difftype == u"diff", delta_ropath.difftype
if not isinstance(current_file, file):
- """
+ u"""
librsync insists on a real file object, which we create manually
by using the duplicity.tempdir to tell us where.
@@ -503,14 +503,14 @@
tempfp.seek(0)
current_file = tempfp
current_file = librsync.PatchedFile(current_file,
- delta_ropath.open("rb"))
+ delta_ropath.open(u"rb"))
result = patch_seq[-1].get_ropath()
result.setfileobj(current_file)
return result
def integrate_patch_iters(iter_list):
- """Combine a list of iterators of ropath patches
+ u"""Combine a list of iterators of ropath patches
The iter_list should be sorted in patch order, and the elements in
each iter_list need to be orderd by index. The output will be an
@@ -527,14 +527,14 @@
yield final_ropath
except Exception as e:
filename = normalized[-1].get_ropath().get_relative_path()
- log.Warn(_("Error '%s' patching %s") %
+ log.Warn(_(u"Error '%s' patching %s") %
(util.uexc(e), util.fsdecode(filename)),
log.WarningCode.cannot_process,
util.escape(filename))
def tarfiles2rop_iter(tarfile_list, restrict_index=()):
- """Integrate tarfiles of diffs into single ROPath iter
+ u"""Integrate tarfiles of diffs into single ROPath iter
Then filter out all the diffs in that index which don't start with
the restrict_index.
@@ -548,7 +548,7 @@
def Write_ROPaths(base_path, rop_iter):
- """Write out ropaths in rop_iter starting at base_path
+ u"""Write out ropaths in rop_iter starting at base_path
Returns 1 if something was actually written, 0 otherwise.
@@ -564,20 +564,20 @@
class ROPath_IterWriter(ITRBranch):
- """Used in Write_ROPaths above
+ u"""Used in Write_ROPaths above
We need to use an ITR because we have to update the
permissions/times of directories after we write the files in them.
"""
def __init__(self, base_path):
- """Set base_path, Path of root of tree"""
+ u"""Set base_path, Path of root of tree"""
self.base_path = base_path
self.dir_diff_ropath = None
self.dir_new_path = None
def start_process(self, index, ropath):
- """Write ropath. Only handles the directory case"""
+ u"""Write ropath. Only handles the directory case"""
if not ropath.isdir():
# Base may not be a directory, but rest should
assert ropath.index == (), ropath.index
@@ -596,19 +596,19 @@
self.dir_diff_ropath = ropath
def end_process(self):
- """Update information of a directory when leaving it"""
+ u"""Update information of a directory when leaving it"""
if self.dir_diff_ropath:
self.dir_diff_ropath.copy_attribs(self.dir_new_path)
def can_fast_process(self, index, ropath):
- """Can fast process (no recursion) if ropath isn't a directory"""
- log.Info(_("Writing %s of type %s") %
+ u"""Can fast process (no recursion) if ropath isn't a directory"""
+ log.Info(_(u"Writing %s of type %s") %
(util.fsdecode(ropath.get_relative_path()), ropath.type),
log.InfoCode.patch_file_writing,
- "%s %s" % (util.escape(ropath.get_relative_path()), ropath.type))
+ u"%s %s" % (util.escape(ropath.get_relative_path()), ropath.type))
return not ropath.isdir()
def fast_process(self, index, ropath):
- """Write non-directory ropath to destination"""
+ u"""Write non-directory ropath to destination"""
if ropath.exists():
ropath.copy(self.base_path.new_index(index))
=== modified file 'duplicity/path.py'
--- duplicity/path.py 2018-07-24 11:52:33 +0000
+++ duplicity/path.py 2018-09-24 21:19:45 +0000
@@ -19,7 +19,7 @@
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-"""Wrapper class around a file like "/usr/bin/env"
+u"""Wrapper class around a file like "/usr/bin/env"
This class makes certain file operations more convenient and
associates stat information with filenames
@@ -53,7 +53,7 @@
class StatResult:
- """Used to emulate the output of os.stat() and related"""
+ u"""Used to emulate the output of os.stat() and related"""
# st_mode is required by the TarInfo class, but it's unclear how
# to generate it from file permissions.
st_mode = 0
@@ -64,158 +64,158 @@
class ROPath:
- """Read only Path
+ u"""Read only Path
Objects of this class doesn't represent real files, so they don't
have a name. They are required to be indexed though.
"""
def __init__(self, index, stat=None):
- """ROPath initializer"""
+ u"""ROPath initializer"""
self.opened, self.fileobj = None, None
self.index = index
self.stat, self.type = None, None
self.mode, self.devnums = None, None
def set_from_stat(self):
- """Set the value of self.type, self.mode from self.stat"""
+ u"""Set the value of self.type, self.mode from self.stat"""
if not self.stat:
self.type = None
st_mode = self.stat.st_mode
if stat.S_ISREG(st_mode):
- self.type = "reg"
+ self.type = u"reg"
elif stat.S_ISDIR(st_mode):
- self.type = "dir"
+ self.type = u"dir"
elif stat.S_ISLNK(st_mode):
- self.type = "sym"
+ self.type = u"sym"
elif stat.S_ISFIFO(st_mode):
- self.type = "fifo"
+ self.type = u"fifo"
elif stat.S_ISSOCK(st_mode):
raise PathException(util.fsdecode(self.get_relative_path()) +
u"is a socket, unsupported by tar")
- self.type = "sock"
+ self.type = u"sock"
elif stat.S_ISCHR(st_mode):
- self.type = "chr"
+ self.type = u"chr"
elif stat.S_ISBLK(st_mode):
- self.type = "blk"
+ self.type = u"blk"
else:
- raise PathException("Unknown type")
+ raise PathException(u"Unknown type")
self.mode = stat.S_IMODE(st_mode)
- if self.type in ("chr", "blk"):
+ if self.type in (u"chr", u"blk"):
try:
self.devnums = (os.major(self.stat.st_rdev),
os.minor(self.stat.st_rdev))
except:
- log.Warn(_("Warning: %s invalid devnums (0x%X), treating as (0, 0).")
+ log.Warn(_(u"Warning: %s invalid devnums (0x%X), treating as (0, 0).")
% (util.fsdecode(self.get_relative_path()), self.stat.st_rdev))
self.devnums = (0, 0)
def blank(self):
- """Black out self - set type and stat to None"""
+ u"""Black out self - set type and stat to None"""
self.type, self.stat = None, None
def exists(self):
- """True if corresponding file exists"""
+ u"""True if corresponding file exists"""
return self.type
def isreg(self):
- """True if self corresponds to regular file"""
- return self.type == "reg"
+ u"""True if self corresponds to regular file"""
+ return self.type == u"reg"
def isdir(self):
- """True if self is dir"""
- return self.type == "dir"
+ u"""True if self is dir"""
+ return self.type == u"dir"
def issym(self):
- """True if self is sym"""
- return self.type == "sym"
+ u"""True if self is sym"""
+ return self.type == u"sym"
def isfifo(self):
- """True if self is fifo"""
- return self.type == "fifo"
+ u"""True if self is fifo"""
+ return self.type == u"fifo"
def issock(self):
- """True is self is socket"""
- return self.type == "sock"
+ u"""True is self is socket"""
+ return self.type == u"sock"
def isdev(self):
- """True is self is a device file"""
- return self.type == "chr" or self.type == "blk"
+ u"""True is self is a device file"""
+ return self.type == u"chr" or self.type == u"blk"
def getdevloc(self):
- """Return device number path resides on"""
+ u"""Return device number path resides on"""
return self.stat.st_dev
def getsize(self):
- """Return length in bytes from stat object"""
+ u"""Return length in bytes from stat object"""
return self.stat.st_size
def getmtime(self):
- """Return mod time of path in seconds"""
+ u"""Return mod time of path in seconds"""
return int(self.stat.st_mtime)
def get_relative_path(self):
- """Return relative path, created from index"""
+ u"""Return relative path, created from index"""
if self.index:
- return "/".join(self.index)
+ return b"/".join(self.index)
else:
- return "."
+ return b"."
def getperms(self):
- """Return permissions mode, owner and group"""
+ u"""Return permissions mode, owner and group"""
s1 = self.stat
- return '%s:%s %o' % (s1.st_uid, s1.st_gid, self.mode)
+ return u'%s:%s %o' % (s1.st_uid, s1.st_gid, self.mode)
def open(self, mode):
- """Return fileobj associated with self"""
- assert mode == "rb" and self.fileobj and not self.opened, \
- "%s %s %s" % (mode, self.fileobj, self.opened)
+ u"""Return fileobj associated with self"""
+ assert mode == u"rb" and self.fileobj and not self.opened, \
+ u"%s %s %s" % (mode, self.fileobj, self.opened)
self.opened = 1
return self.fileobj
def get_data(self):
- """Return contents of associated fileobj in string"""
- fin = self.open("rb")
+ u"""Return contents of associated fileobj in string"""
+ fin = self.open(u"rb")
buf = fin.read()
assert not fin.close()
return buf
def setfileobj(self, fileobj):
- """Set file object returned by open()"""
+ u"""Set file object returned by open()"""
assert not self.fileobj
self.fileobj = fileobj
self.opened = None
def init_from_tarinfo(self, tarinfo):
- """Set data from tarinfo object (part of tarfile module)"""
+ u"""Set data from tarinfo object (part of tarfile module)"""
# Set the typepp
type = tarinfo.type
if type == tarfile.REGTYPE or type == tarfile.AREGTYPE:
- self.type = "reg"
+ self.type = u"reg"
elif type == tarfile.LNKTYPE:
- raise PathException("Hard links not supported yet")
+ raise PathException(u"Hard links not supported yet")
elif type == tarfile.SYMTYPE:
- self.type = "sym"
+ self.type = u"sym"
self.symtext = tarinfo.linkname
elif type == tarfile.CHRTYPE:
- self.type = "chr"
+ self.type = u"chr"
self.devnums = (tarinfo.devmajor, tarinfo.devminor)
elif type == tarfile.BLKTYPE:
- self.type = "blk"
+ self.type = u"blk"
self.devnums = (tarinfo.devmajor, tarinfo.devminor)
elif type == tarfile.DIRTYPE:
- self.type = "dir"
+ self.type = u"dir"
elif type == tarfile.FIFOTYPE:
- self.type = "fifo"
+ self.type = u"fifo"
else:
- raise PathException("Unknown tarinfo type %s" % (type,))
+ raise PathException(u"Unknown tarinfo type %s" % (type,))
self.mode = tarinfo.mode
self.stat = StatResult()
- """ Set user and group id
+ u""" Set user and group id
use numeric id if name lookup fails
OR
--numeric-owner is set
@@ -235,13 +235,13 @@
self.stat.st_mtime = int(tarinfo.mtime)
if self.stat.st_mtime < 0:
- log.Warn(_("Warning: %s has negative mtime, treating as 0.")
+ log.Warn(_(u"Warning: %s has negative mtime, treating as 0.")
% (tarinfo.uc_name))
self.stat.st_mtime = 0
self.stat.st_size = tarinfo.size
def get_ropath(self):
- """Return ropath copy of self"""
+ u"""Return ropath copy of self"""
new_ropath = ROPath(self.index, self.stat)
new_ropath.type, new_ropath.mode = self.type, self.mode
if self.issym():
@@ -253,7 +253,7 @@
return new_ropath
def get_tarinfo(self):
- """Generate a tarfile.TarInfo object based on self
+ u"""Generate a tarfile.TarInfo object based on self
Doesn't set size based on stat, because we may want to replace
data wiht other stream. Size should be set separately by
@@ -262,11 +262,11 @@
"""
ti = tarfile.TarInfo()
if self.index:
- ti.name = "/".join(self.index)
+ ti.name = b"/".join(self.index)
else:
- ti.name = "."
+ ti.name = b"."
if self.isdir():
- ti.name += "/" # tar dir naming convention
+ ti.name += b"/" # tar dir naming convention
ti.size = 0
if self.type:
@@ -283,18 +283,18 @@
ti.type = tarfile.SYMTYPE
ti.linkname = self.symtext
elif self.isdev():
- if self.type == "chr":
+ if self.type == u"chr":
ti.type = tarfile.CHRTYPE
else:
ti.type = tarfile.BLKTYPE
ti.devmajor, ti.devminor = self.devnums
else:
- raise PathException("Unrecognized type " + str(self.type))
+ raise PathException(u"Unrecognized type " + str(self.type))
ti.mode = self.mode
ti.uid, ti.gid = self.stat.st_uid, self.stat.st_gid
if self.stat.st_mtime < 0:
- log.Warn(_("Warning: %s has negative mtime, treating as 0.")
+ log.Warn(_(u"Warning: %s has negative mtime, treating as 0.")
% (util.fsdecode(self.get_relative_path())))
ti.mtime = 0
else:
@@ -303,14 +303,14 @@
try:
ti.uname = cached_ops.getpwuid(ti.uid)[0]
except KeyError:
- ti.uname = ''
+ ti.uname = u''
try:
ti.gname = cached_ops.getgrgid(ti.gid)[0]
except KeyError:
- ti.gname = ''
+ ti.gname = b''
if ti.type in (tarfile.CHRTYPE, tarfile.BLKTYPE):
- if hasattr(os, "major") and hasattr(os, "minor"):
+ if hasattr(os, u"major") and hasattr(os, u"minor"):
ti.devmajor, ti.devminor = self.devnums
else:
# Currently we depend on an uninitiliazed tarinfo file to
@@ -320,7 +320,7 @@
return ti
def __eq__(self, other):
- """Used to compare two ROPaths. Doesn't look at fileobjs"""
+ u"""Used to compare two ROPaths. Doesn't look at fileobjs"""
if not self.type and not other.type:
return 1 # neither exists
if not self.stat and other.stat or not other.stat and self.stat:
@@ -348,7 +348,7 @@
return not self.__eq__(other)
def compare_verbose(self, other, include_data=0):
- """Compare ROPaths like __eq__, but log reason if different
+ u"""Compare ROPaths like __eq__, but log reason if different
This is placed in a separate function from __eq__ because
__eq__ should be very time sensitive, and logging statements
@@ -358,7 +358,7 @@
"""
def log_diff(log_string):
- log_str = _("Difference found:") + u" " + log_string
+ log_str = _(u"Difference found:") + u" " + log_string
log.Notice(log_str % (util.fsdecode(self.get_relative_path())))
if include_data is False:
@@ -367,24 +367,24 @@
if not self.type and not other.type:
return 1
if not self.stat and other.stat:
- log_diff(_("New file %s"))
+ log_diff(_(u"New file %s"))
return 0
if not other.stat and self.stat:
- log_diff(_("File %s is missing"))
+ log_diff(_(u"File %s is missing"))
return 0
if self.type != other.type:
- log_diff(_("File %%s has type %s, expected %s") %
+ log_diff(_(u"File %%s has type %s, expected %s") %
(other.type, self.type))
return 0
if self.isreg() or self.isdir() or self.isfifo():
if not self.perms_equal(other):
- log_diff(_("File %%s has permissions %s, expected %s") %
+ log_diff(_(u"File %%s has permissions %s, expected %s") %
(other.getperms(), self.getperms()))
return 0
if ((int(self.stat.st_mtime) != int(other.stat.st_mtime)) and
(self.stat.st_mtime > 0 or other.stat.st_mtime > 0)):
- log_diff(_("File %%s has mtime %s, expected %s") %
+ log_diff(_(u"File %%s has mtime %s, expected %s") %
(dup_time.timetopretty(int(other.stat.st_mtime)),
dup_time.timetopretty(int(self.stat.st_mtime))))
return 0
@@ -392,33 +392,33 @@
if self.compare_data(other):
return 1
else:
- log_diff(_("Data for file %s is different"))
+ log_diff(_(u"Data for file %s is different"))
return 0
else:
return 1
elif self.issym():
- if self.symtext == other.symtext or self.symtext + "/" == other.symtext:
+ if self.symtext == other.symtext or self.symtext + u"/" == other.symtext:
return 1
else:
- log_diff(_("Symlink %%s points to %s, expected %s") %
+ log_diff(_(u"Symlink %%s points to %s, expected %s") %
(other.symtext, self.symtext))
return 0
elif self.isdev():
if not self.perms_equal(other):
- log_diff(_("File %%s has permissions %s, expected %s") %
+ log_diff(_(u"File %%s has permissions %s, expected %s") %
(other.getperms(), self.getperms()))
return 0
if self.devnums != other.devnums:
- log_diff(_("Device file %%s has numbers %s, expected %s")
+ log_diff(_(u"Device file %%s has numbers %s, expected %s")
% (other.devnums, self.devnums))
return 0
return 1
assert 0
def compare_data(self, other):
- """Compare data from two regular files, return true if same"""
- f1 = self.open("rb")
- f2 = other.open("rb")
+ u"""Compare data from two regular files, return true if same"""
+ f1 = self.open(u"rb")
+ f2 = other.open(u"rb")
def close():
assert not f1.close()
@@ -435,15 +435,15 @@
return 1
def perms_equal(self, other):
- """True if self and other have same permissions and ownership"""
+ u"""True if self and other have same permissions and ownership"""
s1, s2 = self.stat, other.stat
return (self.mode == other.mode and
s1.st_gid == s2.st_gid and s1.st_uid == s2.st_uid)
def copy(self, other):
- """Copy self to other. Also copies data. Other must be Path"""
+ u"""Copy self to other. Also copies data. Other must be Path"""
if self.isreg():
- other.writefileobj(self.open("rb"))
+ other.writefileobj(self.open(u"rb"))
elif self.isdir():
os.mkdir(other.name)
elif self.issym():
@@ -456,15 +456,15 @@
elif self.issock():
socket.socket(socket.AF_UNIX).bind(other.name)
elif self.isdev():
- if self.type == "chr":
- devtype = "c"
+ if self.type == u"chr":
+ devtype = u"c"
else:
- devtype = "b"
+ devtype = u"b"
other.makedev(devtype, *self.devnums)
self.copy_attribs(other)
def copy_attribs(self, other):
- """Only copy attributes from self to other"""
+ u"""Only copy attributes from self to other"""
if isinstance(other, Path):
if self.stat is not None:
util.maybe_ignore_errors(lambda: os.chown(other.name, self.stat.st_uid, self.stat.st_gid))
@@ -481,18 +481,18 @@
other.mode = self.mode
def __unicode__(self):
- """Return string representation"""
+ u"""Return string representation"""
return u"(%s %s)" % (util.uindex(self.index), self.type)
class Path(ROPath):
- """
+ u"""
Path class - wrapper around ordinary local files
Besides caching stat() results, this class organizes various file
code.
"""
- regex_chars_to_quote = re.compile("[\\\\\\\"\\$`]")
+ regex_chars_to_quote = re.compile(u"[\\\\\\\"\\$`]")
def rename_index(self, index):
if not globals.rename or not index:
@@ -508,7 +508,7 @@
return index # no rename found
def __init__(self, base, index=()):
- """Path initializer"""
+ u"""Path initializer"""
# self.opened should be true if the file has been opened, and
# self.fileobj can override returned fileobj
self.opened, self.fileobj = None, None
@@ -530,7 +530,7 @@
self.setdata()
def setdata(self):
- """Refresh stat cache"""
+ u"""Refresh stat cache"""
try:
# We may be asked to look at the target of symlinks rather than
# the link itself.
@@ -540,7 +540,7 @@
self.stat = os.lstat(self.name)
except OSError as e:
err_string = errno.errorcode[e[0]]
- if err_string in ["ENOENT", "ENOTDIR", "ELOOP", "ENOTCONN"]:
+ if err_string in [u"ENOENT", u"ENOTDIR", u"ELOOP", u"ENOTCONN"]:
self.stat, self.type = None, None # file doesn't exist
self.mode = None
else:
@@ -551,23 +551,23 @@
self.symtext = os.readlink(self.name)
def append(self, ext):
- """Return new Path with ext added to index"""
+ u"""Return new Path with ext added to index"""
return self.__class__(self.base, self.index + (ext,))
def new_index(self, index):
- """Return new Path with index index"""
+ u"""Return new Path with index index"""
return self.__class__(self.base, index)
def listdir(self):
- """Return list generated by os.listdir"""
+ u"""Return list generated by os.listdir"""
return os.listdir(self.name)
def isemptydir(self):
- """Return true if path is a directory and is empty"""
+ u"""Return true if path is a directory and is empty"""
return self.isdir() and not self.listdir()
- def open(self, mode="rb"):
- """
+ def open(self, mode=u"rb"):
+ u"""
Return fileobj associated with self
Usually this is just the file data on disk, but can be
@@ -581,25 +581,25 @@
return result
def makedev(self, type, major, minor):
- """Make a device file with specified type, major/minor nums"""
- cmdlist = ['mknod', self.name, type, str(major), str(minor)]
- if os.spawnvp(os.P_WAIT, 'mknod', cmdlist) != 0:
- raise PathException("Error running %s" % cmdlist)
+ u"""Make a device file with specified type, major/minor nums"""
+ cmdlist = [u'mknod', self.name, type, str(major), str(minor)]
+ if os.spawnvp(os.P_WAIT, u'mknod', cmdlist) != 0:
+ raise PathException(u"Error running %s" % cmdlist)
self.setdata()
def mkdir(self):
- """Make directory(s) at specified path"""
- log.Info(_("Making directory %s") % self.uc_name)
+ u"""Make directory(s) at specified path"""
+ log.Info(_(u"Making directory %s") % self.uc_name)
try:
os.makedirs(self.name)
except OSError:
if (not globals.force):
- raise PathException("Error creating directory %s" % self.uc_name, 7)
+ raise PathException(u"Error creating directory %s" % self.uc_name, 7)
self.setdata()
def delete(self):
- """Remove this file"""
- log.Info(_("Deleting %s") % self.uc_name)
+ u"""Remove this file"""
+ log.Info(_(u"Deleting %s") % self.uc_name)
if self.isdir():
util.ignore_missing(os.rmdir, self.name)
else:
@@ -607,15 +607,15 @@
self.setdata()
def touch(self):
- """Open the file, write 0 bytes, close"""
- log.Info(_("Touching %s") % self.uc_name)
- fp = self.open("wb")
+ u"""Open the file, write 0 bytes, close"""
+ log.Info(_(u"Touching %s") % self.uc_name)
+ fp = self.open(u"wb")
fp.close()
def deltree(self):
- """Remove self by recursively deleting files under it"""
+ u"""Remove self by recursively deleting files under it"""
from duplicity import selection # todo: avoid circ. dep. issue
- log.Info(_("Deleting tree %s") % self.uc_name)
+ log.Info(_(u"Deleting tree %s") % self.uc_name)
itr = IterTreeReducer(PathDeleter, [])
for path in selection.Select(self).set_iter():
itr(path.index, path)
@@ -623,50 +623,50 @@
self.setdata()
def get_parent_dir(self):
- """Return directory that self is in"""
+ u"""Return directory that self is in"""
if self.index:
return Path(self.base, self.index[:-1])
else:
- components = self.base.split("/")
+ components = self.base.split(u"/")
if len(components) == 2 and not components[0]:
- return Path("/") # already in root directory
+ return Path(u"/") # already in root directory
else:
- return Path("/".join(components[:-1]))
+ return Path(u"/".join(components[:-1]))
def writefileobj(self, fin):
- """Copy file object fin to self. Close both when done."""
- fout = self.open("wb")
+ u"""Copy file object fin to self. Close both when done."""
+ fout = self.open(u"wb")
while 1:
buf = fin.read(_copy_blocksize)
if not buf:
break
fout.write(buf)
if fin.close() or fout.close():
- raise PathException("Error closing file object")
+ raise PathException(u"Error closing file object")
self.setdata()
def rename(self, new_path):
- """Rename file at current path to new_path."""
+ u"""Rename file at current path to new_path."""
shutil.move(self.name, new_path.name)
self.setdata()
new_path.setdata()
def move(self, new_path):
- """Like rename but destination may be on different file system"""
+ u"""Like rename but destination may be on different file system"""
self.copy(new_path)
self.delete()
def chmod(self, mode):
- """Change permissions of the path"""
+ u"""Change permissions of the path"""
os.chmod(self.name, mode)
self.setdata()
def patch_with_attribs(self, diff_ropath):
- """Patch self with diff and then copy attributes over"""
+ u"""Patch self with diff and then copy attributes over"""
assert self.isreg() and diff_ropath.isreg()
temp_path = self.get_temp_in_same_dir()
- fbase = self.open("rb")
- fdiff = diff_ropath.open("rb")
+ fbase = self.open(u"rb")
+ fdiff = diff_ropath.open(u"rb")
patch_fileobj = librsync.PatchedFile(fbase, fdiff)
temp_path.writefileobj(patch_fileobj)
assert not fbase.close()
@@ -675,11 +675,11 @@
temp_path.rename(self)
def get_temp_in_same_dir(self):
- """Return temp non existent path in same directory as self"""
+ u"""Return temp non existent path in same directory as self"""
global _tmp_path_counter
parent_dir = self.get_parent_dir()
while 1:
- temp_path = parent_dir.append("duplicity_temp." +
+ temp_path = parent_dir.append(u"duplicity_temp." +
str(_tmp_path_counter))
if not temp_path.type:
return temp_path
@@ -688,18 +688,18 @@
u"Warning too many temp files created for " + self.uc_name
def compare_recursive(self, other, verbose=None):
- """Compare self to other Path, descending down directories"""
+ u"""Compare self to other Path, descending down directories"""
from duplicity import selection # todo: avoid circ. dep. issue
selfsel = selection.Select(self).set_iter()
othersel = selection.Select(other).set_iter()
return Iter.equal(selfsel, othersel, verbose)
def __repr__(self):
- """Return string representation"""
- return "(%s %s %s)" % (self.index, self.name, self.type)
+ u"""Return string representation"""
+ return u"(%s %s %s)" % (self.index, self.name, self.type)
def quote(self, s=None):
- """
+ u"""
Return quoted version of s (defaults to self.name)
The output is meant to be interpreted with shells, so can be
@@ -707,15 +707,15 @@
"""
if not s:
s = self.name
- return '"%s"' % self.regex_chars_to_quote.sub(lambda m: "\\" + m.group(0), s)
+ return u'"%s"' % self.regex_chars_to_quote.sub(lambda m: u"\\" + m.group(0), s)
def unquote(self, s):
- """Return unquoted version of string s, as quoted by above quote()"""
- assert s[0] == s[-1] == "\"" # string must be quoted by above
- result = ""
+ u"""Return unquoted version of string s, as quoted by above quote()"""
+ assert s[0] == s[-1] == u"\"" # string must be quoted by above
+ result = u""
i = 1
while i < len(s) - 1:
- if s[i] == "\\":
+ if s[i] == u"\\":
result += s[i + 1]
i += 2
else:
@@ -724,38 +724,38 @@
return result
def get_filename(self):
- """Return filename of last component"""
- components = self.name.split("/")
+ u"""Return filename of last component"""
+ components = self.name.split(u"/")
assert components and components[-1]
return components[-1]
def get_canonical(self):
- """
+ u"""
Return string of canonical version of path
Remove ".", and trailing slashes where possible. Note that
it's harder to remove "..", as "foo/bar/.." is not necessarily
"foo", so we can't use path.normpath()
"""
- newpath = "/".join(filter(lambda x: x and x != ".",
- self.name.split("/")))
- if self.name[0] == "/":
- return "/" + newpath
+ newpath = u"/".join(filter(lambda x: x and x != u".",
+ self.name.split(u"/")))
+ if self.name[0] == u"/":
+ return u"/" + newpath
elif newpath:
return newpath
else:
- return "."
+ return u"."
class DupPath(Path):
- """
+ u"""
Represent duplicity data files
Based on the file name, files that are compressed or encrypted
will have different open() methods.
"""
def __init__(self, base, index=(), parseresults=None):
- """
+ u"""
DupPath initializer
The actual filename (no directory) must be the single element
@@ -767,12 +767,12 @@
else:
assert len(index) == 1
self.pr = file_naming.parse(index[0])
- assert self.pr, "must be a recognizable duplicity file"
+ assert self.pr, u"must be a recognizable duplicity file"
Path.__init__(self, base, index)
- def filtered_open(self, mode="rb", gpg_profile=None):
- """
+ def filtered_open(self, mode=u"rb", gpg_profile=None):
+ u"""
Return fileobj with appropriate encryption/compression
If encryption is specified but no gpg_profile, use
@@ -788,16 +788,16 @@
elif self.pr.encrypted:
if not gpg_profile:
gpg_profile = globals.gpg_profile
- if mode == "rb":
+ if mode == u"rb":
return gpg.GPGFile(False, self, gpg_profile)
- elif mode == "wb":
+ elif mode == u"wb":
return gpg.GPGFile(True, self, gpg_profile)
else:
return self.open(mode)
class PathDeleter(ITRBranch):
- """Delete a directory. Called by Path.deltree"""
+ u"""Delete a directory. Called by Path.deltree"""
def start_process(self, index, path):
self.path = path
=== modified file 'duplicity/statistics.py'
--- duplicity/statistics.py 2018-07-24 11:52:33 +0000
+++ duplicity/statistics.py 2018-09-24 21:19:45 +0000
@@ -19,7 +19,7 @@
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-"""Generate and process backup statistics"""
+u"""Generate and process backup statistics"""
from future_builtins import map
@@ -35,66 +35,66 @@
class StatsObj:
- """Contains various statistics, provide string conversion functions"""
+ u"""Contains various statistics, provide string conversion functions"""
# used when quoting files in get_stats_line
- space_regex = re.compile(" ")
+ space_regex = re.compile(u" ")
- stat_file_attrs = ('SourceFiles',
- 'SourceFileSize',
- 'NewFiles',
- 'NewFileSize',
- 'DeletedFiles',
- 'ChangedFiles',
- 'ChangedFileSize',
- 'ChangedDeltaSize',
- 'DeltaEntries',
- 'RawDeltaSize')
- stat_misc_attrs = ('Errors',
- 'TotalDestinationSizeChange')
- stat_time_attrs = ('StartTime',
- 'EndTime',
- 'ElapsedTime')
- stat_attrs = (('Filename',) + stat_time_attrs +
+ stat_file_attrs = (u'SourceFiles',
+ u'SourceFileSize',
+ u'NewFiles',
+ u'NewFileSize',
+ u'DeletedFiles',
+ u'ChangedFiles',
+ u'ChangedFileSize',
+ u'ChangedDeltaSize',
+ u'DeltaEntries',
+ u'RawDeltaSize')
+ stat_misc_attrs = (u'Errors',
+ u'TotalDestinationSizeChange')
+ stat_time_attrs = (u'StartTime',
+ u'EndTime',
+ u'ElapsedTime')
+ stat_attrs = ((u'Filename',) + stat_time_attrs +
stat_misc_attrs + stat_file_attrs)
# Below, the second value in each pair is true iff the value
# indicates a number of bytes
- stat_file_pairs = (('SourceFiles', False),
- ('SourceFileSize', True),
- ('NewFiles', False),
- ('NewFileSize', True),
- ('DeletedFiles', False),
- ('ChangedFiles', False),
- ('ChangedFileSize', True),
- ('ChangedDeltaSize', True),
- ('DeltaEntries', False),
- ('RawDeltaSize', True))
+ stat_file_pairs = ((u'SourceFiles', False),
+ (u'SourceFileSize', True),
+ (u'NewFiles', False),
+ (u'NewFileSize', True),
+ (u'DeletedFiles', False),
+ (u'ChangedFiles', False),
+ (u'ChangedFileSize', True),
+ (u'ChangedDeltaSize', True),
+ (u'DeltaEntries', False),
+ (u'RawDeltaSize', True))
# This is used in get_byte_summary_string below
- byte_abbrev_list = ((1024 * 1024 * 1024 * 1024, "TB"),
- (1024 * 1024 * 1024, "GB"),
- (1024 * 1024, "MB"),
- (1024, "KB"))
+ byte_abbrev_list = ((1024 * 1024 * 1024 * 1024, u"TB"),
+ (1024 * 1024 * 1024, u"GB"),
+ (1024 * 1024, u"MB"),
+ (1024, u"KB"))
def __init__(self):
- """Set attributes to None"""
+ u"""Set attributes to None"""
for attr in self.stat_attrs:
self.__dict__[attr] = None
def get_stat(self, attribute):
- """Get a statistic"""
+ u"""Get a statistic"""
return self.__dict__[attribute]
def set_stat(self, attr, value):
- """Set attribute to given value"""
+ u"""Set attribute to given value"""
self.__dict__[attr] = value
def increment_stat(self, attr):
- """Add 1 to value of attribute"""
+ u"""Add 1 to value of attribute"""
self.__dict__[attr] += 1
def get_total_dest_size_change(self):
- """Return total destination size change
+ u"""Return total destination size change
This represents the total increase in the size of the
duplicity destination directory, or None if not available.
@@ -103,25 +103,25 @@
return 0 # this needs to be re-done for duplicity
def get_stats_line(self, index, use_repr=1):
- """Return one line abbreviated version of full stats string"""
+ u"""Return one line abbreviated version of full stats string"""
file_attrs = [str(self.get_stat(a)) for a in self.stat_file_attrs]
if not index:
- filename = "."
+ filename = u"."
else:
filename = os.path.join(*index)
if use_repr:
# use repr to quote newlines in relative filename, then
# take of leading and trailing quote and quote spaces.
- filename = self.space_regex.sub("\\x20", repr(filename)[1:-1])
- return " ".join([filename, ] + file_attrs)
+ filename = self.space_regex.sub(u"\\x20", repr(filename)[1:-1])
+ return u" ".join([filename, ] + file_attrs)
def set_stats_from_line(self, line):
- """Set statistics from given line"""
+ u"""Set statistics from given line"""
def error():
- raise StatsException("Bad line '%s'" % line)
- if line[-1] == "\n":
+ raise StatsException(u"Bad line '%s'" % line)
+ if line[-1] == u"\n":
line = line[:-1]
- lineparts = line.split(" ")
+ lineparts = line.split(u" ")
if len(lineparts) < len(self.stat_file_attrs):
error()
for attr, val_string in zip(self.stat_file_attrs,
@@ -137,62 +137,62 @@
return self
def get_stats_string(self):
- """Return extended string printing out statistics"""
- return "%s%s%s" % (self.get_timestats_string(),
- self.get_filestats_string(),
- self.get_miscstats_string())
+ u"""Return extended string printing out statistics"""
+ return u"%s%s%s" % (self.get_timestats_string(),
+ self.get_filestats_string(),
+ self.get_miscstats_string())
def get_timestats_string(self):
- """Return portion of statistics string dealing with time"""
+ u"""Return portion of statistics string dealing with time"""
timelist = []
if self.StartTime is not None:
- timelist.append("StartTime %.2f (%s)\n" %
+ timelist.append(u"StartTime %.2f (%s)\n" %
(self.StartTime, dup_time.timetopretty(self.StartTime)))
if self.EndTime is not None:
- timelist.append("EndTime %.2f (%s)\n" %
+ timelist.append(u"EndTime %.2f (%s)\n" %
(self.EndTime, dup_time.timetopretty(self.EndTime)))
if self.ElapsedTime or (self.StartTime is not None and
self.EndTime is not None):
if self.ElapsedTime is None:
self.ElapsedTime = self.EndTime - self.StartTime
- timelist.append("ElapsedTime %.2f (%s)\n" %
+ timelist.append(u"ElapsedTime %.2f (%s)\n" %
(self.ElapsedTime, dup_time.inttopretty(self.ElapsedTime)))
- return "".join(timelist)
+ return u"".join(timelist)
def get_filestats_string(self):
- """Return portion of statistics string about files and bytes"""
+ u"""Return portion of statistics string about files and bytes"""
def fileline(stat_file_pair):
- """Return zero or one line of the string"""
+ u"""Return zero or one line of the string"""
attr, in_bytes = stat_file_pair
val = self.get_stat(attr)
if val is None:
- return ""
+ return u""
if in_bytes:
- return "%s %s (%s)\n" % (attr, val,
- self.get_byte_summary_string(val))
+ return u"%s %s (%s)\n" % (attr, val,
+ self.get_byte_summary_string(val))
else:
- return "%s %s\n" % (attr, val)
+ return u"%s %s\n" % (attr, val)
- return "".join(map(fileline, self.stat_file_pairs))
+ return u"".join(map(fileline, self.stat_file_pairs))
def get_miscstats_string(self):
- """Return portion of extended stat string about misc attributes"""
- misc_string = ""
+ u"""Return portion of extended stat string about misc attributes"""
+ misc_string = u""
tdsc = self.TotalDestinationSizeChange
if tdsc is not None:
- misc_string += ("TotalDestinationSizeChange %s (%s)\n" %
+ misc_string += (u"TotalDestinationSizeChange %s (%s)\n" %
(tdsc, self.get_byte_summary_string(tdsc)))
if self.Errors is not None:
- misc_string += "Errors %d\n" % self.Errors
+ misc_string += u"Errors %d\n" % self.Errors
return misc_string
def get_byte_summary_string(self, byte_count):
- """Turn byte count into human readable string like "7.23GB" """
+ u"""Turn byte count into human readable string like "7.23GB" """
if byte_count < 0:
- sign = "-"
+ sign = u"-"
byte_count = -byte_count
else:
- sign = ""
+ sign = u""
for abbrev_bytes, abbrev_string in self.byte_abbrev_list:
if byte_count >= abbrev_bytes:
@@ -204,26 +204,26 @@
precision = 1
else:
precision = 2
- return "%s%%.%df %s" % (sign, precision, abbrev_string) \
+ return u"%s%%.%df %s" % (sign, precision, abbrev_string) \
% (abbrev_count,)
byte_count = round(byte_count)
if byte_count == 1:
- return sign + "1 byte"
+ return sign + u"1 byte"
else:
- return "%s%d bytes" % (sign, byte_count)
+ return u"%s%d bytes" % (sign, byte_count)
def get_stats_logstring(self, title):
- """Like get_stats_string, but add header and footer"""
- header = "--------------[ %s ]--------------" % title
- footer = "-" * len(header)
- return "%s\n%s%s\n" % (header, self.get_stats_string(), footer)
+ u"""Like get_stats_string, but add header and footer"""
+ header = u"--------------[ %s ]--------------" % title
+ footer = u"-" * len(header)
+ return u"%s\n%s%s\n" % (header, self.get_stats_string(), footer)
def set_stats_from_string(self, s):
- """Initialize attributes from string, return self for convenience"""
+ u"""Initialize attributes from string, return self for convenience"""
def error(line):
- raise StatsException("Bad line '%s'" % line)
+ raise StatsException(u"Bad line '%s'" % line)
- for line in s.split("\n"):
+ for line in s.split(u"\n"):
if not line:
continue
line_parts = line.split()
@@ -247,20 +247,20 @@
return self
def write_stats_to_path(self, path):
- """Write statistics string to given path"""
- fin = path.open("w")
+ u"""Write statistics string to given path"""
+ fin = path.open(u"w")
fin.write(self.get_stats_string())
assert not fin.close()
def read_stats_from_path(self, path):
- """Set statistics from path, return self for convenience"""
- fp = path.open("r")
+ u"""Set statistics from path, return self for convenience"""
+ fp = path.open(u"r")
self.set_stats_from_string(fp.read())
assert not fp.close()
return self
def stats_equal(self, s):
- """Return true if s has same statistics as self"""
+ u"""Return true if s has same statistics as self"""
assert isinstance(s, StatsObj)
for attr in self.stat_file_attrs:
if self.get_stat(attr) != s.get_stat(attr):
@@ -268,7 +268,7 @@
return 1
def set_to_average(self, statobj_list):
- """Set self's attributes to average of those in statobj_list"""
+ u"""Set self's attributes to average of those in statobj_list"""
for attr in self.stat_attrs:
self.set_stat(attr, 0)
for statobj in statobj_list:
@@ -290,7 +290,7 @@
return self
def get_statsobj_copy(self):
- """Return new StatsObj object with same stats as self"""
+ u"""Return new StatsObj object with same stats as self"""
s = StatsObj()
for attr in self.stat_attrs:
s.set_stat(attr, self.get_stat(attr))
@@ -298,9 +298,9 @@
class StatsDeltaProcess(StatsObj):
- """Keep track of statistics during DirDelta process"""
+ u"""Keep track of statistics during DirDelta process"""
def __init__(self):
- """StatsDeltaProcess initializer - zero file attributes"""
+ u"""StatsDeltaProcess initializer - zero file attributes"""
StatsObj.__init__(self)
for attr in StatsObj.stat_file_attrs:
self.__dict__[attr] = 0
@@ -309,39 +309,39 @@
self.files_changed = []
def add_new_file(self, path):
- """Add stats of new file path to statistics"""
+ u"""Add stats of new file path to statistics"""
filesize = path.getsize()
self.SourceFiles += 1
# SourceFileSize is added-to incrementally as read
self.NewFiles += 1
self.NewFileSize += filesize
self.DeltaEntries += 1
- self.add_delta_entries_file(path, 'new')
+ self.add_delta_entries_file(path, b'new')
def add_changed_file(self, path):
- """Add stats of file that has changed since last backup"""
+ u"""Add stats of file that has changed since last backup"""
filesize = path.getsize()
self.SourceFiles += 1
# SourceFileSize is added-to incrementally as read
self.ChangedFiles += 1
self.ChangedFileSize += filesize
self.DeltaEntries += 1
- self.add_delta_entries_file(path, 'changed')
+ self.add_delta_entries_file(path, b'changed')
def add_deleted_file(self, path):
- """Add stats of file no longer in source directory"""
+ u"""Add stats of file no longer in source directory"""
self.DeletedFiles += 1 # can't add size since not available
self.DeltaEntries += 1
- self.add_delta_entries_file(path, 'deleted')
+ self.add_delta_entries_file(path, b'deleted')
def add_unchanged_file(self, path):
- """Add stats of file that hasn't changed since last backup"""
+ u"""Add stats of file that hasn't changed since last backup"""
filesize = path.getsize()
self.SourceFiles += 1
self.SourceFileSize += filesize
def close(self):
- """End collection of data, set EndTime"""
+ u"""End collection of data, set EndTime"""
self.EndTime = time.time()
def add_delta_entries_file(self, path, action_type):
=== modified file 'duplicity/util.py'
--- duplicity/util.py 2018-07-24 11:52:33 +0000
+++ duplicity/util.py 2018-09-24 21:19:45 +0000
@@ -19,7 +19,7 @@
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-"""
+u"""
Miscellaneous utilities.
"""
@@ -51,23 +51,23 @@
# ToDo: Revisit this once we drop Python 2 support/the backport is complete
def fsencode(unicode_filename):
- """Convert a unicode filename to a filename encoded in the system encoding"""
+ u"""Convert a unicode filename to a filename encoded in the system encoding"""
# For paths, just use path.name rather than converting with this
# If we are not doing any cleverness with non-unicode filename bytes,
# encoding to system encoding is good enough
- return unicode_filename.encode(sys.getfilesystemencoding(), "replace")
+ return unicode_filename.encode(sys.getfilesystemencoding(), u"replace")
def fsdecode(bytes_filename):
- """Convert a filename encoded in the system encoding to unicode"""
+ u"""Convert a filename encoded in the system encoding to unicode"""
# For paths, just use path.uc_name rather than converting with this
# If we are not doing any cleverness with non-unicode filename bytes,
# decoding using system encoding is good enough. Use "ignore" as
# Linux paths can contain non-Unicode characters
- return bytes_filename.decode(globals.fsencoding, "replace")
+ return bytes_filename.decode(globals.fsencoding, u"replace")
def exception_traceback(limit=50):
- """
+ u"""
@return A string representation in typical Python format of the
currently active/raised exception.
"""
@@ -76,20 +76,20 @@
lines = traceback.format_tb(tb, limit)
lines.extend(traceback.format_exception_only(type, value))
- msg = "Traceback (innermost last):\n"
- msg = msg + "%-20s %s" % (string.join(lines[:-1], ""), lines[-1])
+ msg = u"Traceback (innermost last):\n"
+ msg = msg + u"%-20s %s" % (string.join(lines[:-1], u""), lines[-1])
- return msg.decode('unicode-escape', 'replace')
+ return msg.decode(u'unicode-escape', u'replace')
def escape(string):
- "Convert a (bytes) filename to a format suitable for logging (quoted utf8)"
- string = fsdecode(string).encode('unicode-escape', 'replace')
- return u"'%s'" % string.decode('utf8', 'replace')
+ u"Convert a (bytes) filename to a format suitable for logging (quoted utf8)"
+ string = fsdecode(string).encode(u'unicode-escape', u'replace')
+ return u"'%s'" % string.decode(u'utf8', u'replace')
def uindex(index):
- "Convert an index (a tuple of path parts) to unicode for printing"
+ u"Convert an index (a tuple of path parts) to unicode for printing"
if index:
return os.path.join(*list(map(fsdecode, index)))
else:
@@ -101,11 +101,11 @@
# non-ascii will cause a UnicodeDecodeError when implicitly decoding to
# unicode. So we decode manually, using the filesystem encoding.
# 99.99% of the time, this will be a fine encoding to use.
- return fsdecode(unicode(e).encode('utf-8'))
+ return fsdecode(unicode(e).encode(u'utf-8'))
def maybe_ignore_errors(fn):
- """
+ u"""
Execute fn. If the global configuration setting ignore_errors is
set to True, catch errors and log them but do continue (and return
None).
@@ -117,7 +117,7 @@
return fn()
except Exception as e:
if globals.ignore_errors:
- log.Warn(_("IGNORED_ERROR: Warning: ignoring error as requested: %s: %s")
+ log.Warn(_(u"IGNORED_ERROR: Warning: ignoring error as requested: %s: %s")
% (e.__class__.__name__, uexc(e)))
return None
else:
@@ -145,7 +145,7 @@
# yet. So we want to ignore ReadError exceptions, which are used to signal
# this.
try:
- tf = tarfile.TarFile("arbitrary", mode, fp)
+ tf = tarfile.TarFile(u"arbitrary", mode, fp)
# Now we cause TarFile to not cache TarInfo objects. It would end up
# consuming a lot of memory over the lifetime of our long-lasting
# signature files otherwise.
@@ -159,14 +159,14 @@
# Python versions before 2.6 ensure that directories end with /, but 2.6
# and later ensure they they *don't* have /. ::shrug:: Internally, we
# continue to use pre-2.6 method.
- if ti.isdir() and not ti.name.endswith("/"):
- return ti.name + "/"
+ if ti.isdir() and not ti.name.endswith(b"/"):
+ return ti.name + b"/"
else:
return ti.name
def ignore_missing(fn, filename):
- """
+ u"""
Execute fn on filename. Ignore ENOENT errors, otherwise raise exception.
@param fn: callable
@@ -184,7 +184,7 @@
@atexit.register
def release_lockfile():
if globals.lockfile:
- log.Debug(_("Releasing lockfile %s") % globals.lockpath)
+ log.Debug(_(u"Releasing lockfile %s") % globals.lockpath)
try:
globals.lockfile.release()
except Exception:
@@ -192,7 +192,7 @@
def copyfileobj(infp, outfp, byte_count=-1):
- """Copy byte_count bytes from infp to outfp, or all if byte_count < 0
+ u"""Copy byte_count bytes from infp to outfp, or all if byte_count < 0
Returns the number of bytes actually written (may be less than
byte_count if find eof. Does not close either fileobj.
@@ -221,7 +221,7 @@
def which(program):
- """
+ u"""
Return absolute path for program name.
Returns None if program not found.
"""
@@ -234,8 +234,8 @@
if is_exe(program):
return program
else:
- for path in os.getenv("PATH").split(os.pathsep):
- path = path.strip('"')
+ for path in os.getenv(u"PATH").split(os.pathsep):
+ path = path.strip(u'"')
exe_file = os.path.abspath(os.path.join(path, program))
if is_exe(exe_file):
return exe_file
=== modified file 'po/duplicity.pot'
--- po/duplicity.pot 2018-09-17 21:03:06 +0000
+++ po/duplicity.pot 2018-09-24 21:19:45 +0000
@@ -8,7 +8,7 @@
msgstr ""
"Project-Id-Version: PACKAGE VERSION\n"
"Report-Msgid-Bugs-To: Kenneth Loafman <kenneth@xxxxxxxxxxx>\n"
-"POT-Creation-Date: 2018-09-17 15:38-0500\n"
+"POT-Creation-Date: 2018-09-24 11:46-0500\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@xxxxxx>\n"
=== modified file 'testing/test_code.py'
--- testing/test_code.py 2018-09-17 21:03:06 +0000
+++ testing/test_code.py 2018-09-24 21:19:45 +0000
@@ -113,12 +113,6 @@
os.path.join(_top_dir, u'setup.py'),
os.path.join(_top_dir, u'duplicity', u'__init__.py'),
os.path.join(_top_dir, u'duplicity', u'compilec.py'),
- os.path.join(_top_dir, u'duplicity', u'diffdir.py'),
- os.path.join(_top_dir, u'duplicity', u'manifest.py'),
- os.path.join(_top_dir, u'duplicity', u'patchdir.py'),
- os.path.join(_top_dir, u'duplicity', u'path.py'),
- os.path.join(_top_dir, u'duplicity', u'statistics.py'),
- os.path.join(_top_dir, u'duplicity', u'util.py'),
os.path.join(_top_dir, u'testing', u'overrides', u'gettext.py'),
os.path.join(_top_dir, u'testing', u'test_unadorned.py'),
os.path.join(_top_dir, u'testing', u'unit', u'test_statistics.py'),
Follow ups