duplicity-team team mailing list archive
-
duplicity-team team
-
Mailing list archive
-
Message #04662
[Merge] lp:~aaron-whitehouse/duplicity/08-ufn-to-fsdecode into lp:duplicity
Aaron Whitehouse has proposed merging lp:~aaron-whitehouse/duplicity/08-ufn-to-fsdecode into lp:duplicity.
Requested reviews:
duplicity-team (duplicity-team)
For more details, see:
https://code.launchpad.net/~aaron-whitehouse/duplicity/08-ufn-to-fsdecode/+merge/335537
* Change util.fsdecode to use "replace" instead of "ignore" (matching behaviour of util.ufn)
* Replace all uses of ufn with fsdecode
* Make backend.tobytes use util.fsencode rather than reimplementing
--
Your team duplicity-team is requested to review the proposed merge of lp:~aaron-whitehouse/duplicity/08-ufn-to-fsdecode into lp:duplicity.
=== modified file 'bin/duplicity'
--- bin/duplicity 2017-12-20 13:03:42 +0000
+++ bin/duplicity 2017-12-21 22:26:40 +0000
@@ -317,7 +317,7 @@
return # error querying file
if size != orig_size:
code_extra = "%s %d %d" % (util.escape(dest_filename), orig_size, size)
- log.FatalError(_("File %s was corrupted during upload.") % util.ufn(dest_filename),
+ log.FatalError(_("File %s was corrupted during upload.") % util.fsdecode(dest_filename),
log.ErrorCode.volume_wrong_size, code_extra)
def put(tdp, dest_filename, vol_num):
@@ -694,7 +694,7 @@
for path in path_iter:
if path.difftype != "deleted":
user_info = u"%s %s" % (dup_time.timetopretty(path.getmtime()),
- util.ufn(path.get_relative_path()))
+ util.fsdecode(path.get_relative_path()))
log_info = "%s %s %s" % (dup_time.timetostring(path.getmtime()),
util.escape(path.get_relative_path()),
path.type)
@@ -718,7 +718,7 @@
restore_get_patched_rop_iter(col_stats)):
if globals.restore_dir:
log.FatalError(_("%s not found in archive - no files restored.")
- % (util.ufn(globals.restore_dir)),
+ % (util.fsdecode(globals.restore_dir)),
log.ErrorCode.restore_dir_not_found)
else:
log.FatalError(_("No files found in archive - nothing restored."),
@@ -790,7 +790,7 @@
log.FatalError("%s\n %s\n %s\n %s\n" %
(_("Invalid data - %s hash mismatch for file:") %
hash_pair[0],
- util.ufn(filename),
+ util.fsdecode(filename),
_("Calculated hash: %s") % calculated_hash,
_("Manifest hash: %s") % hash_pair[1]),
log.ErrorCode.mismatched_hash)
@@ -892,7 +892,7 @@
log.Warn(_("No extraneous files found, nothing deleted in cleanup."))
return
- filestr = u"\n".join(map(util.ufn, extraneous))
+ filestr = u"\n".join(map(util.fsdecode, extraneous))
if globals.force:
log.Notice(ngettext("Deleting this file from backend:",
"Deleting these files from backend:",
@@ -1196,11 +1196,11 @@
del_name = globals.archive_dir_path.append(fn).name
log.Notice(_("Deleting local %s (not authoritative at backend).") %
- util.ufn(del_name))
+ util.fsdecode(del_name))
try:
util.ignore_missing(os.unlink, del_name)
except Exception as e:
- log.Warn(_("Unable to delete %s: %s") % (util.ufn(del_name),
+ log.Warn(_("Unable to delete %s: %s") % (util.fsdecode(del_name),
util.uexc(e)))
def copy_to_local(fn):
@@ -1232,7 +1232,7 @@
else:
name = None
log.FatalError(_("Failed to read %s: %s") %
- (util.ufn(name), sys.exc_info()),
+ (util.fsdecode(name), sys.exc_info()),
log.ErrorCode.generic)
if not res.data:
self.fileobj.close()
@@ -1245,7 +1245,7 @@
def get_footer(self):
return ""
- log.Notice(_("Copying %s to local cache.") % util.ufn(fn))
+ log.Notice(_("Copying %s to local cache.") % util.fsdecode(fn))
pr, loc_name, rem_name = resolve_basename(fn)
@@ -1311,10 +1311,10 @@
else:
if local_missing:
log.Notice(_("Sync would copy the following from remote to local:") +
- u"\n" + u"\n".join(map(util.ufn, local_missing)))
+ u"\n" + u"\n".join(map(util.fsdecode, local_missing)))
if local_spurious:
log.Notice(_("Sync would remove the following spurious local files:") +
- u"\n" + u"\n".join(map(util.ufn, local_spurious)))
+ u"\n" + u"\n".join(map(util.fsdecode, local_spurious)))
def check_last_manifest(col_stats):
@@ -1390,7 +1390,7 @@
"""
log.Log(u'=' * 80, verbosity)
log.Log(u"duplicity $version ($reldate)", verbosity)
- log.Log(u"Args: %s" % util.ufn(' '.join(sys.argv)), verbosity)
+ log.Log(u"Args: %s" % util.fsdecode(' '.join(sys.argv)), verbosity)
log.Log(u' '.join(platform.uname()), verbosity)
log.Log(u"%s %s" % (sys.executable or sys.platform, sys.version), verbosity)
log.Log(u'=' * 80, verbosity)
=== modified file 'bin/rdiffdir'
--- bin/rdiffdir 2015-05-01 13:56:13 +0000
+++ bin/rdiffdir 2017-12-21 22:26:40 +0000
@@ -58,7 +58,7 @@
try:
return open(filename, "r")
except IOError:
- log.FatalError(_("Error opening file %s") % util.ufn(filename))
+ log.FatalError(_("Error opening file %s") % util.fsdecode(filename))
try:
optlist, args = getopt.getopt(arglist, "v:Vz",
@@ -126,7 +126,7 @@
pass
else:
log.FatalError(_("File %s already exists, will not "
- "overwrite.") % util.ufn(filename))
+ "overwrite.") % util.fsdecode(filename))
def get_action(args):
=== modified file 'duplicity/backend.py'
--- duplicity/backend.py 2017-12-13 22:43:36 +0000
+++ duplicity/backend.py 2017-12-21 22:26:40 +0000
@@ -511,7 +511,7 @@
def __do_put(self, source_path, remote_filename):
if hasattr(self.backend, '_put'):
- log.Info(_("Writing %s") % util.ufn(remote_filename))
+ log.Info(_("Writing %s") % util.fsdecode(remote_filename))
self.backend._put(source_path, remote_filename)
else:
raise NotImplementedError()
@@ -568,7 +568,7 @@
# There shouldn't be any encoding errors for files we care
# about, since duplicity filenames are ascii. But user files
# may be in the same directory. So just replace characters.
- return filename.encode(globals.fsencoding, 'replace')
+ return util.fsencode(filename)
else:
return filename
=== modified file 'duplicity/collections.py'
--- duplicity/collections.py 2017-12-13 22:43:36 +0000
+++ duplicity/collections.py 2017-12-21 22:26:40 +0000
@@ -168,7 +168,7 @@
try:
self.backend.delete(rfn)
except Exception:
- log.Debug(_("BackupSet.delete: missing %s") % [util.ufn(f) for f in rfn])
+ log.Debug(_("BackupSet.delete: missing %s") % [util.fsdecode(f) for f in rfn])
pass
if self.action not in ["collection-status", "replicate"]:
local_filename_list = globals.archive_dir_path.listdir()
@@ -182,7 +182,7 @@
try:
globals.archive_dir_path.append(lfn).delete()
except Exception:
- log.Debug(_("BackupSet.delete: missing %s") % [util.ufn(f) for f in lfn])
+ log.Debug(_("BackupSet.delete: missing %s") % [util.fsdecode(f) for f in lfn])
pass
util.release_lockfile()
@@ -194,7 +194,7 @@
if self.remote_manifest_name:
filelist.append(self.remote_manifest_name)
filelist.extend(self.volume_name_dict.values())
- return u"[%s]" % u", ".join(map(util.ufn, filelist))
+ return u"[%s]" % u", ".join(map(util.fsdecode, filelist))
def get_timestr(self):
"""
@@ -248,10 +248,10 @@
manifest_buffer = self.backend.get_data(self.remote_manifest_name)
except GPGError as message:
log.Error(_("Error processing remote manifest (%s): %s") %
- (util.ufn(self.remote_manifest_name), util.uexc(message)))
+ (util.fsdecode(self.remote_manifest_name), util.uexc(message)))
return None
log.Info(_("Processing remote manifest %s (%s)") % (
- util.ufn(self.remote_manifest_name), len(manifest_buffer)))
+ util.fsdecode(self.remote_manifest_name), len(manifest_buffer)))
return manifest.Manifest().from_string(manifest_buffer)
def get_manifest(self):
@@ -789,7 +789,7 @@
"Warning, found the following local orphaned "
"signature files:",
len(self.local_orphaned_sig_names)) + u"\n" +
- u"\n".join(map(util.ufn, self.local_orphaned_sig_names)),
+ u"\n".join(map(util.fsdecode, self.local_orphaned_sig_names)),
log.WarningCode.orphaned_sig)
if self.remote_orphaned_sig_names:
@@ -798,7 +798,7 @@
"Warning, found the following remote orphaned "
"signature files:",
len(self.remote_orphaned_sig_names)) + u"\n" +
- u"\n".join(map(util.ufn, self.remote_orphaned_sig_names)),
+ u"\n".join(map(util.fsdecode, self.remote_orphaned_sig_names)),
log.WarningCode.orphaned_sig)
if self.all_sig_chains and sig_chain_warning and not self.matched_chain_pair:
@@ -828,7 +828,7 @@
missing files.
"""
log.Debug(_("Extracting backup chains from list of files: %s")
- % [util.ufn(f) for f in filename_list])
+ % [util.fsdecode(f) for f in filename_list])
# First put filenames in set form
sets = []
@@ -838,15 +838,15 @@
"""
for set in sets:
if set.add_filename(filename):
- log.Debug(_("File %s is part of known set") % (util.ufn(filename),))
+ log.Debug(_("File %s is part of known set") % (util.fsdecode(filename),))
break
else:
- log.Debug(_("File %s is not part of a known set; creating new set") % (util.ufn(filename),))
+ log.Debug(_("File %s is not part of a known set; creating new set") % (util.fsdecode(filename),))
new_set = BackupSet(self.backend, self.action)
if new_set.add_filename(filename):
sets.append(new_set)
else:
- log.Debug(_("Ignoring file (rejected by backup set) '%s'") % util.ufn(filename))
+ log.Debug(_("Ignoring file (rejected by backup set) '%s'") % util.fsdecode(filename))
for f in filename_list:
add_to_sets(f)
=== modified file 'duplicity/diffdir.py'
--- duplicity/diffdir.py 2017-12-13 22:43:36 +0000
+++ duplicity/diffdir.py 2017-12-21 22:26:40 +0000
@@ -105,7 +105,7 @@
index_string = sig_path.get_relative_path()
else:
assert 0, "Both new and sig are None for some reason"
- log.Warn(_("Error %s getting delta for %s") % (str(exc), util.ufn(index_string)))
+ log.Warn(_("Error %s getting delta for %s") % (str(exc), util.fsdecode(index_string)))
return None
@@ -165,14 +165,14 @@
if new_path and stats:
stats.add_new_file(new_path)
log.Info(_("A %s") %
- (util.ufn(delta_path.get_relative_path())),
+ (util.fsdecode(delta_path.get_relative_path())),
log.InfoCode.diff_file_new,
util.escape(delta_path.get_relative_path()))
else:
if new_path and stats:
stats.add_changed_file(new_path)
log.Info(_("M %s") %
- (util.ufn(delta_path.get_relative_path())),
+ (util.fsdecode(delta_path.get_relative_path())),
log.InfoCode.diff_file_changed,
util.escape(delta_path.get_relative_path()))
@@ -202,7 +202,7 @@
if sig_path and sig_path.exists() and sig_path.index != ():
# but signature says it did
log.Info(_("D %s") %
- (util.ufn(sig_path.get_relative_path())),
+ (util.fsdecode(sig_path.get_relative_path())),
log.InfoCode.diff_file_deleted,
util.escape(sig_path.get_relative_path()))
if sigTarFile:
=== modified file 'duplicity/lazy.py'
--- duplicity/lazy.py 2017-03-02 22:38:47 +0000
+++ duplicity/lazy.py 2017-12-21 22:26:40 +0000
@@ -410,7 +410,7 @@
filename = os.path.join(*self.index) # pylint: disable=not-an-iterable
else:
filename = "."
- log.Warn(_("Error '%s' processing %s") % (exc, util.ufn(filename)),
+ log.Warn(_("Error '%s' processing %s") % (exc, util.fsdecode(filename)),
log.WarningCode.cannot_process,
util.escape(filename))
@@ -420,6 +420,6 @@
index_str = "."
else:
index_str = os.path.join(*index)
- log.Warn(_("Skipping %s because of previous error") % util.ufn(index_str),
+ log.Warn(_("Skipping %s because of previous error") % util.fsdecode(index_str),
log.WarningCode.process_skipped,
util.escape(index_str))
=== modified file 'duplicity/patchdir.py'
--- duplicity/patchdir.py 2017-07-11 14:55:38 +0000
+++ duplicity/patchdir.py 2017-12-21 22:26:40 +0000
@@ -77,12 +77,12 @@
ITR = IterTreeReducer(PathPatcher, [base_path])
for basis_path, diff_ropath in collated:
if basis_path:
- log.Info(_("Patching %s") % (util.ufn(basis_path.get_relative_path())),
+ log.Info(_("Patching %s") % (util.fsdecode(basis_path.get_relative_path())),
log.InfoCode.patch_file_patching,
util.escape(basis_path.get_relative_path()))
ITR(basis_path.index, basis_path, diff_ropath)
else:
- log.Info(_("Patching %s") % (util.ufn(diff_ropath.get_relative_path())),
+ log.Info(_("Patching %s") % (util.fsdecode(diff_ropath.get_relative_path())),
log.InfoCode.patch_file_patching,
util.escape(diff_ropath.get_relative_path()))
ITR(diff_ropath.index, basis_path, diff_ropath)
@@ -165,7 +165,7 @@
"\\2", tiname)
if num_subs != 1:
raise PatchDirException(u"Unrecognized diff entry %s" %
- util.ufn(tiname))
+ util.fsdecode(tiname))
else:
difftype = prefix[:-1] # strip trailing /
name = tiname[len(prefix):]
@@ -175,14 +175,14 @@
break
else:
raise PatchDirException(u"Unrecognized diff entry %s" %
- util.ufn(tiname))
+ util.fsdecode(tiname))
if name == "." or name == "":
index = ()
else:
index = tuple(name.split("/"))
if '..' in index:
raise PatchDirException(u"Tar entry %s contains '..'. Security "
- "violation" % util.ufn(tiname))
+ "violation" % util.fsdecode(tiname))
return (index, difftype, multivol)
@@ -528,7 +528,7 @@
except Exception as e:
filename = normalized[-1].get_ropath().get_relative_path()
log.Warn(_("Error '%s' patching %s") %
- (util.uexc(e), util.ufn(filename)),
+ (util.uexc(e), util.fsdecode(filename)),
log.WarningCode.cannot_process,
util.escape(filename))
@@ -603,7 +603,7 @@
def can_fast_process(self, index, ropath):
"""Can fast process (no recursion) if ropath isn't a directory"""
log.Info(_("Writing %s of type %s") %
- (util.ufn(ropath.get_relative_path()), ropath.type),
+ (util.fsdecode(ropath.get_relative_path()), ropath.type),
log.InfoCode.patch_file_writing,
"%s %s" % (util.escape(ropath.get_relative_path()), ropath.type))
return not ropath.isdir()
=== modified file 'duplicity/path.py'
--- duplicity/path.py 2017-12-13 22:43:36 +0000
+++ duplicity/path.py 2017-12-21 22:26:40 +0000
@@ -92,7 +92,7 @@
elif stat.S_ISFIFO(st_mode):
self.type = "fifo"
elif stat.S_ISSOCK(st_mode):
- raise PathException(util.ufn(self.get_relative_path()) +
+ raise PathException(util.fsdecode(self.get_relative_path()) +
u"is a socket, unsupported by tar")
self.type = "sock"
elif stat.S_ISCHR(st_mode):
@@ -109,7 +109,7 @@
os.minor(self.stat.st_rdev))
except:
log.Warn(_("Warning: %s invalid devnums (0x%X), treating as (0, 0).")
- % (util.ufn(self.get_relative_path()), self.stat.st_rdev))
+ % (util.fsdecode(self.get_relative_path()), self.stat.st_rdev))
self.devnums = (0, 0)
def blank(self):
@@ -295,7 +295,7 @@
ti.uid, ti.gid = self.stat.st_uid, self.stat.st_gid
if self.stat.st_mtime < 0:
log.Warn(_("Warning: %s has negative mtime, treating as 0.")
- % (util.ufn(self.get_relative_path())))
+ % (util.fsdecode(self.get_relative_path())))
ti.mtime = 0
else:
ti.mtime = int(self.stat.st_mtime)
@@ -359,7 +359,7 @@
"""
def log_diff(log_string):
log_str = _("Difference found:") + u" " + log_string
- log.Notice(log_str % (util.ufn(self.get_relative_path())))
+ log.Notice(log_str % (util.fsdecode(self.get_relative_path())))
if include_data is False:
return True
=== modified file 'duplicity/selection.py'
--- duplicity/selection.py 2017-12-13 22:43:36 +0000
+++ duplicity/selection.py 2017-12-21 22:26:40 +0000
@@ -116,15 +116,15 @@
try:
mode = os.stat(fullpath)[stat.ST_MODE]
if stat.S_ISSOCK(mode):
- log.Info(_("Skipping socket %s") % util.ufn(fullpath),
+ log.Info(_("Skipping socket %s") % util.fsdecode(fullpath),
log.InfoCode.skipping_socket,
util.escape(fullpath))
else:
- log.Warn(_("Error initializing file %s") % util.ufn(fullpath),
+ log.Warn(_("Error initializing file %s") % util.fsdecode(fullpath),
log.WarningCode.cannot_iterate,
util.escape(fullpath))
except OSError:
- log.Warn(_("Error accessing possibly locked file %s") % util.ufn(fullpath),
+ log.Warn(_("Error accessing possibly locked file %s") % util.fsdecode(fullpath),
log.WarningCode.cannot_stat, util.escape(fullpath))
return None
=== modified file 'duplicity/tempdir.py'
--- duplicity/tempdir.py 2016-06-28 21:03:46 +0000
+++ duplicity/tempdir.py 2017-12-21 22:26:40 +0000
@@ -129,7 +129,7 @@
temproot = _initialSystemTempRoot
self.__dir = tempfile.mkdtemp("-tempdir", "duplicity-", temproot)
- log.Info(_("Using temporary directory %s") % util.ufn(self.__dir))
+ log.Info(_("Using temporary directory %s") % util.fsdecode(self.__dir))
# number of mktemp()/mkstemp() calls served so far
self.__tempcount = 0
@@ -173,7 +173,7 @@
suffix = "-%d" % (self.__tempcount,)
filename = tempfile.mktemp(suffix, "mktemp-", self.__dir)
- log.Debug(_("Registering (mktemp) temporary file %s") % util.ufn(filename))
+ log.Debug(_("Registering (mktemp) temporary file %s") % util.fsdecode(filename))
self.__pending[filename] = None
finally:
self.__lock.release()
@@ -195,7 +195,7 @@
suffix = "-%d" % (self.__tempcount,)
fd, filename = tempfile.mkstemp(suffix, "mkstemp-", self.__dir)
- log.Debug(_("Registering (mkstemp) temporary file %s") % util.ufn(filename))
+ log.Debug(_("Registering (mkstemp) temporary file %s") % util.fsdecode(filename))
self.__pending[filename] = None
finally:
self.__lock.release()
@@ -227,10 +227,10 @@
self.__lock.acquire()
try:
if fname in self.__pending:
- log.Debug(_("Forgetting temporary file %s") % util.ufn(fname))
+ log.Debug(_("Forgetting temporary file %s") % util.fsdecode(fname))
del(self.__pending[fname])
else:
- log.Warn(_("Attempt to forget unknown tempfile %s - this is probably a bug.") % util.ufn(fname))
+ log.Warn(_("Attempt to forget unknown tempfile %s - this is probably a bug.") % util.fsdecode(fname))
pass
finally:
self.__lock.release()
@@ -249,16 +249,16 @@
if self.__dir is not None:
for file in self.__pending.keys():
try:
- log.Debug(_("Removing still remembered temporary file %s") % util.ufn(file))
+ log.Debug(_("Removing still remembered temporary file %s") % util.fsdecode(file))
util.ignore_missing(os.unlink, file)
except Exception:
- log.Info(_("Cleanup of temporary file %s failed") % util.ufn(file))
+ log.Info(_("Cleanup of temporary file %s failed") % util.fsdecode(file))
pass
try:
os.rmdir(self.__dir)
except Exception:
log.Warn(_("Cleanup of temporary directory %s failed - "
- "this is probably a bug.") % util.ufn(self.__dir))
+ "this is probably a bug.") % util.fsdecode(self.__dir))
pass
self.__pending = None
self.__dir = None
=== modified file 'duplicity/util.py'
--- duplicity/util.py 2017-12-01 22:39:33 +0000
+++ duplicity/util.py 2017-12-21 22:26:40 +0000
@@ -59,11 +59,11 @@
def fsdecode(bytes_filename):
"""Convert a filename encoded in the system encoding to unicode"""
- # For paths, just use path.uname rather than converting with this
+ # For paths, just use path.uc_name rather than converting with this
# If we are not doing any cleverness with non-unicode filename bytes,
# decoding using system encoding is good enough. Use "ignore" as
# Linux paths can contain non-Unicode characters
- return bytes_filename.decode(globals.fsencoding, "ignore")
+ return bytes_filename.decode(globals.fsencoding, "replace")
def exception_traceback(limit=50):
@@ -84,20 +84,14 @@
def escape(string):
"Convert a (bytes) filename to a format suitable for logging (quoted utf8)"
- string = ufn(string).encode('unicode-escape', 'replace')
+ string = fsdecode(string).encode('unicode-escape', 'replace')
return u"'%s'" % string.decode('utf8', 'replace')
-def ufn(filename):
- """Convert a (bytes) filename to unicode for printing"""
- # Note: path.uc_name is preferable for paths
- return filename.decode(globals.fsencoding, "replace")
-
-
def uindex(index):
"Convert an index (a tuple of path parts) to unicode for printing"
if index:
- return os.path.join(*list(map(ufn, index)))
+ return os.path.join(*list(map(fsdecode, index)))
else:
return u'.'
@@ -107,7 +101,7 @@
# non-ascii will cause a UnicodeDecodeError when implicitly decoding to
# unicode. So we decode manually, using the filesystem encoding.
# 99.99% of the time, this will be a fine encoding to use.
- return ufn(unicode(e).encode('utf-8'))
+ return fsdecode(unicode(e).encode('utf-8'))
def maybe_ignore_errors(fn):
Follow ups