duplicity-team team mailing list archive
-
duplicity-team team
-
Mailing list archive
-
Message #01879
[Merge] lp:~mterry/duplicity/encoding into lp:duplicity
Michael Terry has proposed merging lp:~mterry/duplicity/encoding into lp:duplicity.
Requested reviews:
duplicity-team (duplicity-team)
For more details, see:
https://code.launchpad.net/~mterry/duplicity/encoding/+merge/200168
So this branch hopefully fixes two filename encoding issues:
1) Users in bug 989496 were noticing a UnicodeEncodeError exception which happens (as far as I can tell) because some backends (like webdav) are returning unicode filenames from list(). When these filenames are combined with the utf8 translations of log messages, either (A) the default ascii encoding can't handle promoting the utf8 bytes or -- if there aren't any utf8 bytes in the translation -- (B) the resulting unicode string raises an error later when log.py tries to upgrade the string again to unicode for printing.
This fix is largely implemented by adding a wrapper for backend list() implementations. This wrapper ensures that duplicity internals always see a byte string. (I'd like to eventually use this same wrapping strategy to implement generic retry support without backends having to add any logic, but that's just a thought for the future.)
That is, the fix for issue #1 is completely inside backend.py and the changes to backends/*.py.
2) The rest of the invasive changes deal with filenames that may not be valid utf8. This is much rarer, but possible. For proper handling of this, we need to print using unicode, and convert filenames from the system filename encoding to unicode, gracefully handling conversion errors. Some of the filenames we print are remote names. Who knows what encoding they are in; it could be different than the system filename encoding. 99% of the time, everything will be utf8 and we're fine. If we do get conversion errors, the only effect should be some question mark characters in duplicity logging output.
I tried to convert as much of the actual codebase to use unicode for printing. But I stopped short of adding an assert in log.py to enforce unicode, because I didn't want to go through all the backend code and manually adjust those bits without being able to test each one.
--
https://code.launchpad.net/~mterry/duplicity/encoding/+merge/200168
Your team duplicity-team is requested to review the proposed merge of lp:~mterry/duplicity/encoding into lp:duplicity.
=== modified file 'bin/duplicity'
--- bin/duplicity 2013-11-17 17:23:07 +0000
+++ bin/duplicity 2013-12-30 03:37:33 +0000
@@ -237,7 +237,7 @@
if tarblock_iter.previous_index > last_index:
log.Warn(_("File %s complete in backup set.\n"
"Continuing restart on file %s.") %
- ("/".join(last_index), "/".join(tarblock_iter.previous_index)),
+ (util.uindex(last_index), util.uindex(tarblock_iter.previous_index)),
log.ErrorCode.restart_file_not_found)
# We went too far! Stuff the data back into place before restarting
tarblock_iter.queue_index_data(iter_result)
@@ -246,7 +246,7 @@
except StopIteration:
log.Warn(_("File %s missing in backup set.\n"
"Continuing restart on file %s.") %
- ("/".join(last_index), "/".join(tarblock_iter.previous_index)),
+ (util.uindex(last_index), util.uindex(tarblock_iter.previous_index)),
log.ErrorCode.restart_file_not_found)
@@ -295,7 +295,7 @@
return # error querying file
if size != orig_size:
code_extra = "%s %d %d" % (util.escape(dest_filename), orig_size, size)
- log.FatalError(_("File %s was corrupted during upload.") % dest_filename,
+ log.FatalError(_("File %s was corrupted during upload.") % util.ufn(dest_filename),
log.ErrorCode.volume_wrong_size, code_extra)
def put(tdp, dest_filename, vol_num):
@@ -352,9 +352,9 @@
validate_encryption_settings(globals.restart.last_backup, mf)
mf.fh = man_outfp
last_block = globals.restart.last_block
- log.Notice("Restarting after volume %s, file %s, block %s" %
+ log.Notice(_("Restarting after volume %s, file %s, block %s") %
(globals.restart.start_vol,
- "/".join(globals.restart.last_index),
+ util.uindex(globals.restart.last_index),
globals.restart.last_block))
vol_num = globals.restart.start_vol
restart_position_iterator(tarblock_iter)
@@ -419,7 +419,7 @@
(tdp, dest_filename, vol_num)))
# Log human-readable version as well as raw numbers for machine consumers
- log.Progress('Processed volume %d' % vol_num, diffdir.stats.SourceFileSize)
+ log.Progress(_('Processed volume %d') % vol_num, diffdir.stats.SourceFileSize)
# Snapshot (serialize) progress now as a Volume has been completed. This is always the last restore point
# when it comes to restart a failed backup
if globals.progress:
@@ -666,8 +666,8 @@
path_iter = diffdir.get_combined_path_iter(sig_chain.get_fileobjs(time))
for path in path_iter:
if path.difftype != "deleted":
- user_info = "%s %s" % (dup_time.timetopretty(path.getmtime()),
- path.get_relative_path())
+ user_info = u"%s %s" % (dup_time.timetopretty(path.getmtime()),
+ util.ufn(path.get_relative_path()))
log_info = "%s %s %s" % (dup_time.timetostring(path.getmtime()),
util.escape(path.get_relative_path()),
path.type)
@@ -752,11 +752,11 @@
verified, hash_pair, calculated_hash = restore_check_hash(volume_info, tdp)
if not verified:
log.FatalError("%s\n %s\n %s\n %s\n" %
- (_("Invalid data - %s hash mismatch for file:") % hash_pair[0],
- filename,
- _("Calculated hash: %s") % calculated_hash,
- _("Manifest hash: %s") % hash_pair[1]),
- log.ErrorCode.mismatched_hash)
+ (_("Invalid data - %s hash mismatch for file:") % hash_pair[0],
+ util.ufn(filename),
+ _("Calculated hash: %s") % calculated_hash,
+ _("Manifest hash: %s") % hash_pair[1]),
+ log.ErrorCode.mismatched_hash)
fileobj = tdp.filtered_open_with_delete("rb")
if parseresults.encrypted and globals.gpg_profile.sign_key:
@@ -824,10 +824,10 @@
# Unfortunately, ngettext doesn't handle multiple number variables, so we
# split up the string.
log.Notice(_("Verify complete: %s, %s.") %
- (gettext.ngettext("%d file compared",
- "%d files compared", total_count) % total_count,
- gettext.ngettext("%d difference found",
- "%d differences found", diff_count) % diff_count))
+ (ngettext("%d file compared",
+ "%d files compared", total_count) % total_count,
+ ngettext("%d difference found",
+ "%d differences found", diff_count) % diff_count))
if diff_count >= 1:
exit_val = 1
@@ -848,12 +848,12 @@
log.Warn(_("No extraneous files found, nothing deleted in cleanup."))
return
- filestr = "\n".join(extraneous)
+ filestr = u"\n".join(map(util.ufn, extraneous))
if globals.force:
- log.Notice(gettext.ngettext("Deleting this file from backend:",
- "Deleting these files from backend:",
- len(extraneous))
- + "\n" + filestr)
+ log.Notice(ngettext("Deleting this file from backend:",
+ "Deleting these files from backend:",
+ len(extraneous))
+ + u"\n" + filestr)
if not globals.dry_run:
col_stats.backend.delete(ext_remote)
for fn in ext_local:
@@ -862,10 +862,10 @@
except Exception:
pass
else:
- log.Notice(gettext.ngettext("Found the following file to delete:",
- "Found the following files to delete:",
- len(extraneous))
- + "\n" + filestr + "\n"
+ log.Notice(ngettext("Found the following file to delete:",
+ "Found the following files to delete:",
+ len(extraneous))
+ + u"\n" + filestr + u"\n"
+ _("Run duplicity again with the --force option to actually delete."))
@@ -924,9 +924,9 @@
log.Notice(_("No old backup sets found, nothing deleted."))
return
if globals.force:
- log.Notice(gettext.ngettext("Deleting backup chain at time:",
- "Deleting backup chains at times:",
- len(chainlist)) +
+ log.Notice(ngettext("Deleting backup chain at time:",
+ "Deleting backup chains at times:",
+ len(chainlist)) +
"\n" + chain_times_str(chainlist))
# Add signature files too, since they won't be needed anymore
chainlist += col_stats.get_signature_chains_older_than(globals.remove_time)
@@ -948,9 +948,9 @@
chain.delete(keep_full=globals.remove_all_inc_of_but_n_full_mode)
col_stats.set_values(sig_chain_warning=None)
else:
- log.Notice(gettext.ngettext("Found old backup chain at the following time:",
- "Found old backup chains at the following times:",
- len(chainlist)) +
+ log.Notice(ngettext("Found old backup chain at the following time:",
+ "Found old backup chains at the following times:",
+ len(chainlist)) +
"\n" + chain_times_str(chainlist) + "\n" +
_("Rerun command with --force option to actually delete."))
@@ -1029,11 +1029,11 @@
def remove_local(fn):
del_name = globals.archive_dir.append(fn).name
- log.Notice(_("Deleting local %s (not authoritative at backend).") % del_name)
+ log.Notice(_("Deleting local %s (not authoritative at backend).") % util.ufn(del_name))
try:
util.ignore_missing(os.unlink, del_name)
except Exception, e:
- log.Warn(_("Unable to delete %s: %s") % (del_name, str(e)))
+ log.Warn(_("Unable to delete %s: %s") % (util.ufn(del_name), str(e)))
def copy_to_local(fn):
"""
@@ -1062,7 +1062,7 @@
else:
name = None
log.FatalError(_("Failed to read %s: %s") %
- (name, sys.exc_info()),
+ (util.ufn(name), sys.exc_info()),
log.ErrorCode.generic)
if not res.data:
self.fileobj.close()
@@ -1075,7 +1075,7 @@
def get_footer(self):
return ""
- log.Notice(_("Copying %s to local cache.") % fn)
+ log.Notice(_("Copying %s to local cache.") % util.ufn(fn))
pr, loc_name, rem_name = resolve_basename(fn)
@@ -1142,10 +1142,10 @@
else:
if local_missing:
log.Notice(_("Sync would copy the following from remote to local:")
- + "\n" + "\n".join(local_missing))
+ + u"\n" + u"\n".join(map(util.ufn, local_missing)))
if local_spurious:
log.Notice(_("Sync would remove the following spurious local files:")
- + "\n" + "\n".join(local_spurious))
+ + u"\n" + u"\n".join(map(util.ufn, local_spurious)))
def check_last_manifest(col_stats):
@@ -1218,12 +1218,12 @@
"""
log Python, duplicity, and system versions
"""
- log.Log('=' * 80, verbosity)
- log.Log("duplicity $version ($reldate)", verbosity)
- log.Log("Args: %s" % (' '.join(sys.argv),), verbosity)
- log.Log(' '.join(platform.uname()), verbosity)
- log.Log("%s %s" % (sys.executable or sys.platform, sys.version), verbosity)
- log.Log('=' * 80, verbosity)
+ log.Log(u'=' * 80, verbosity)
+ log.Log(u"duplicity $version ($reldate)", verbosity)
+ log.Log(u"Args: %s" % (' '.join(sys.argv),), verbosity)
+ log.Log(u' '.join(platform.uname()), verbosity)
+ log.Log(u"%s %s" % (sys.executable or sys.platform, sys.version), verbosity)
+ log.Log(u'=' * 80, verbosity)
class Restart:
@@ -1293,14 +1293,14 @@
# duplicity crashes when PYTHONOPTIMIZE is set, so check
# and refuse to run if it is set.
if 'PYTHONOPTIMIZE' in os.environ:
- log.FatalError(
+ log.FatalError(_(
"""
PYTHONOPTIMIZE in the environment causes duplicity to fail to
recognize its own backups. Please remove PYTHONOPTIMIZE from
the environment and rerun the backup.
See https://bugs.launchpad.net/duplicity/+bug/931175
-""", log.ErrorCode.pythonoptimize_set)
+"""), log.ErrorCode.pythonoptimize_set)
# if python is run setuid, it's only partway set,
# so make sure to run with euid/egid of root
@@ -1427,7 +1427,7 @@
# symmetric key
if (globals.gpg_profile.signing_passphrase and
globals.gpg_profile.passphrase != globals.gpg_profile.signing_passphrase):
- log.FatalError("When using symmetric encryption, the signing passphrase must equal the encryption passphrase.", log.ErrorCode.user_error)
+ log.FatalError(_("When using symmetric encryption, the signing passphrase must equal the encryption passphrase."), log.ErrorCode.user_error)
if action == "full":
full_backup(col_stats)
@@ -1487,8 +1487,8 @@
# For gpg errors, don't show an ugly stack trace by
# default. But do with sufficient verbosity.
log.Info(_("GPG error detail: %s")
- % (''.join(traceback.format_exception(*sys.exc_info()))))
- log.FatalError("%s: %s" % (e.__class__.__name__, e.args[0]),
+ % (u''.join(traceback.format_exception(*sys.exc_info()))))
+ log.FatalError(u"%s: %s" % (e.__class__.__name__, e.args[0]),
log.ErrorCode.gpg_failed,
e.__class__.__name__)
@@ -1496,8 +1496,8 @@
# For user errors, don't show an ugly stack trace by
# default. But do with sufficient verbosity.
log.Info(_("User error detail: %s")
- % (''.join(traceback.format_exception(*sys.exc_info()))))
- log.FatalError("%s: %s" % (e.__class__.__name__, str(e)),
+ % (u''.join(traceback.format_exception(*sys.exc_info()))))
+ log.FatalError(u"%s: %s" % (e.__class__.__name__, unicode(e)),
log.ErrorCode.user_error,
e.__class__.__name__)
@@ -1505,18 +1505,18 @@
# For backend errors, don't show an ugly stack trace by
# default. But do with sufficient verbosity.
log.Info(_("Backend error detail: %s")
- % (''.join(traceback.format_exception(*sys.exc_info()))))
- log.FatalError("%s: %s" % (e.__class__.__name__, str(e)),
+ % (u''.join(traceback.format_exception(*sys.exc_info()))))
+ log.FatalError(u"%s: %s" % (e.__class__.__name__, unicode(e)),
log.ErrorCode.user_error,
e.__class__.__name__)
except Exception, e:
if "Forced assertion for testing" in str(e):
- log.FatalError("%s: %s" % (e.__class__.__name__, str(e)),
+ log.FatalError(u"%s: %s" % (e.__class__.__name__, unicode(e)),
log.ErrorCode.exception,
e.__class__.__name__)
else:
# Traceback and that mess
- log.FatalError("%s" % (''.join(traceback.format_exception(*sys.exc_info()))),
+ log.FatalError(u''.join(traceback.format_exception(*sys.exc_info())),
log.ErrorCode.exception,
e.__class__.__name__)
=== modified file 'bin/rdiffdir'
--- bin/rdiffdir 2013-04-27 14:48:39 +0000
+++ bin/rdiffdir 2013-12-30 03:37:33 +0000
@@ -36,6 +36,7 @@
from duplicity import globals
from duplicity import selection
from duplicity import path
+from duplicity import util
# If set, compress diff and delta files using gzip
gzip_compress = None
@@ -55,7 +56,7 @@
try:
return open(filename, "r")
except IOError:
- log.FatalError("Error opening file %s" % filename)
+ log.FatalError(_("Error opening file %s") % util.ufn(filename))
try:
optlist, args = getopt.getopt(arglist, "v:Vz",
@@ -118,8 +119,8 @@
except OSError:
pass
else:
- log.FatalError("File %s already exists, will not "
- "overwrite." % filename)
+ log.FatalError(_("File %s already exists, will not "
+ "overwrite.") % util.ufn(filename))
def get_action(args):
"""Figure out the main action from the arguments"""
=== modified file 'duplicity/__init__.py'
--- duplicity/__init__.py 2012-09-13 14:08:52 +0000
+++ duplicity/__init__.py 2013-12-30 03:37:33 +0000
@@ -19,5 +19,12 @@
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+import __builtin__
import gettext
-gettext.install('duplicity', codeset='utf8')
+
+t = gettext.translation('duplicity', fallback=True)
+t.install(unicode=True)
+
+# Once we can depend on python >=2.5, we can just use names='ngettext' above.
+# But for now, do the install manually.
+__builtin__.__dict__['ngettext'] = t.ungettext
=== modified file 'duplicity/backend.py'
--- duplicity/backend.py 2013-05-07 22:03:13 +0000
+++ duplicity/backend.py 2013-12-30 03:37:33 +0000
@@ -84,7 +84,7 @@
except Exception:
res = "Failed: " + str(sys.exc_info()[1])
level = log.WARNING
- log.Log("Import of %s %s" % (imp, res), level)
+ log.Log(_("Import of %s %s") % (imp, res), level)
else:
continue
@@ -317,9 +317,9 @@
kwargs = {"raise_errors" : True}
return fn(*args, **kwargs)
except Exception, e:
- log.Warn("Attempt %s failed: %s: %s"
+ log.Warn(_("Attempt %s failed: %s: %s")
% (n, e.__class__.__name__, str(e)))
- log.Debug("Backtrace of previous error: %s"
+ log.Debug(_("Backtrace of previous error: %s")
% exception_traceback())
if isinstance(e, TemporaryLoadException):
time.sleep(30) # wait longer before trying again
@@ -347,18 +347,18 @@
raise e
except Exception, e:
# retry on anything else
- log.Warn("Attempt %s failed. %s: %s"
+ log.Warn(_("Attempt %s failed. %s: %s")
% (n, e.__class__.__name__, str(e)))
- log.Debug("Backtrace of previous error: %s"
+ log.Debug(_("Backtrace of previous error: %s")
% exception_traceback())
time.sleep(10) # wait a bit before trying again
# final trial, die on exception
self.retry_count = n+1
return fn(self, *args)
except Exception, e:
- log.Debug("Backtrace of previous error: %s"
+ log.Debug(_("Backtrace of previous error: %s")
% exception_traceback())
- log.FatalError("Giving up after %s attempts. %s: %s"
+ log.FatalError(_("Giving up after %s attempts. %s: %s")
% (self.retry_count, e.__class__.__name__, str(e)),
log.ErrorCode.backend_error)
self.retry_count = 0
@@ -411,9 +411,24 @@
def list(self):
"""
- Return list of filenames (strings) present in backend
+ Return list of filenames (byte strings) present in backend
"""
- raise NotImplementedError()
+ def tobytes(filename):
+ "Convert a (maybe unicode) filename to bytes"
+ if isinstance(filename, unicode):
+ # There shouldn't be any encoding errors for files we care
+ # about, since duplicity filenames are ascii. But user files
+ # may be in the same directory. So just replace characters.
+ return filename.encode(sys.getfilesystemencoding(), 'replace')
+ else:
+ return filename
+
+ if hasattr(self, '_list'):
+ # Make sure that duplicity internals only ever see byte strings
+ # for filenames, no matter what the backend thinks it is talking.
+ return map(tobytes, self._list())
+ else:
+ raise NotImplementedError()
def delete(self, filename_list):
"""
@@ -567,15 +582,15 @@
except (KeyError, ValueError):
pass
- log.Warn(gettext.ngettext("Running '%s' failed with code %d (attempt #%d)",
- "Running '%s' failed with code %d (attempt #%d)", n) %
- (private, result, n))
+ log.Warn(ngettext("Running '%s' failed with code %d (attempt #%d)",
+ "Running '%s' failed with code %d (attempt #%d)", n) %
+ (private, result, n))
if stdout or stderr:
log.Warn(_("Error is:\n%s") % stderr + (stderr and stdout and "\n") + stdout)
- log.Warn(gettext.ngettext("Giving up trying to execute '%s' after %d attempt",
- "Giving up trying to execute '%s' after %d attempts",
- globals.num_retries) % (private, globals.num_retries))
+ log.Warn(ngettext("Giving up trying to execute '%s' after %d attempt",
+ "Giving up trying to execute '%s' after %d attempts",
+ globals.num_retries) % (private, globals.num_retries))
raise BackendException("Error running '%s'" % private)
def get_fileobj_read(self, filename, parseresults = None):
@@ -609,7 +624,7 @@
"""
if not parseresults:
parseresults = file_naming.parse(filename)
- assert parseresults, "Filename %s not correctly parsed" % filename
+ assert parseresults, u"Filename %s not correctly parsed" % util.ufn(filename)
tdp = dup_temp.new_tempduppath(parseresults)
def close_file_hook():
=== modified file 'duplicity/backends/_boto_multi.py'
--- duplicity/backends/_boto_multi.py 2013-08-25 19:10:21 +0000
+++ duplicity/backends/_boto_multi.py 2013-12-30 03:37:33 +0000
@@ -294,7 +294,7 @@
(self.straight_url, remote_filename, globals.num_retries))
raise BackendException("Error downloading %s/%s" % (self.straight_url, remote_filename))
- def list(self):
+ def _list(self):
if not self.bucket:
return []
=== modified file 'duplicity/backends/_boto_single.py'
--- duplicity/backends/_boto_single.py 2013-12-28 17:25:50 +0000
+++ duplicity/backends/_boto_single.py 2013-12-30 03:37:33 +0000
@@ -265,7 +265,7 @@
(self.straight_url, remote_filename, globals.num_retries))
raise BackendException("Error downloading %s/%s" % (self.straight_url, remote_filename))
- def list(self):
+ def _list(self):
if not self.bucket:
return []
=== modified file 'duplicity/backends/_cf_cloudfiles.py'
--- duplicity/backends/_cf_cloudfiles.py 2013-11-24 16:49:57 +0000
+++ duplicity/backends/_cf_cloudfiles.py 2013-12-30 03:37:33 +0000
@@ -116,7 +116,7 @@
raise BackendException("Error downloading '%s/%s'"
% (self.container, remote_filename))
- def list(self):
+ def _list(self):
for n in range(1, globals.num_retries+1):
log.Info("Listing '%s'" % (self.container))
try:
=== modified file 'duplicity/backends/_cf_pyrax.py'
--- duplicity/backends/_cf_pyrax.py 2013-11-24 16:49:57 +0000
+++ duplicity/backends/_cf_pyrax.py 2013-12-30 03:37:33 +0000
@@ -119,7 +119,7 @@
raise BackendException("Error downloading '%s/%s'"
% (self.container, remote_filename))
- def list(self):
+ def _list(self):
for n in range(1, globals.num_retries + 1):
log.Info("Listing '%s'" % (self.container))
try:
=== modified file 'duplicity/backends/_ssh_paramiko.py'
--- duplicity/backends/_ssh_paramiko.py 2013-12-28 14:55:24 +0000
+++ duplicity/backends/_ssh_paramiko.py 2013-12-30 03:37:33 +0000
@@ -364,7 +364,7 @@
log.Warn("%s (Try %d of %d) Will retry in %d seconds." % (e,n,globals.num_retries,self.retry_delay))
raise BackendException("Giving up trying to download '%s' after %d attempts" % (remote_filename,n))
- def list(self):
+ def _list(self):
"""lists the contents of the one-and-only duplicity dir on the remote side.
In scp mode unavoidable quoting issues will make this fail if the directory name
contains single quotes."""
=== modified file 'duplicity/backends/_ssh_pexpect.py'
--- duplicity/backends/_ssh_pexpect.py 2013-06-13 19:33:48 +0000
+++ duplicity/backends/_ssh_pexpect.py 2013-12-30 03:37:33 +0000
@@ -277,7 +277,7 @@
raise BackendException("File %s not found locally after get "
"from backend" % local_path.name)
- def list(self):
+ def _list(self):
"""
List files available for scp
=== modified file 'duplicity/backends/dpbxbackend.py'
--- duplicity/backends/dpbxbackend.py 2013-12-26 20:03:25 +0000
+++ duplicity/backends/dpbxbackend.py 2013-12-30 03:37:33 +0000
@@ -196,7 +196,7 @@
@retry_fatal
@command()
- def list(self,none=None):
+ def _list(self,none=None):
"""List files in directory"""
# Do a long listing to avoid connection reset
remote_dir = urllib.unquote(self.parsed_url.path.lstrip('/')).rstrip()
=== modified file 'duplicity/backends/ftpbackend.py'
--- duplicity/backends/ftpbackend.py 2011-06-17 06:21:42 +0000
+++ duplicity/backends/ftpbackend.py 2013-12-30 03:37:33 +0000
@@ -105,7 +105,7 @@
self.run_command_persist(commandline)
local_path.setdata()
- def list(self):
+ def _list(self):
"""List files in directory"""
# Do a long listing to avoid connection reset
commandline = "ncftpls %s -l '%s'" % (self.flags, self.url_string)
=== modified file 'duplicity/backends/ftpsbackend.py'
--- duplicity/backends/ftpsbackend.py 2013-04-27 15:01:27 +0000
+++ duplicity/backends/ftpsbackend.py 2013-12-30 03:37:33 +0000
@@ -104,7 +104,7 @@
self.run_command_persist(commandline)
local_path.setdata()
- def list(self):
+ def _list(self):
"""List files in directory"""
# Do a long listing to avoid connection reset
remote_dir = urllib.unquote(self.parsed_url.path.lstrip('/')).rstrip()
=== modified file 'duplicity/backends/gdocsbackend.py'
--- duplicity/backends/gdocsbackend.py 2013-11-19 15:39:47 +0000
+++ duplicity/backends/gdocsbackend.py 2013-12-30 03:37:33 +0000
@@ -137,7 +137,7 @@
% (remote_filename, self.folder.title.text, str(e)), raise_errors)
@retry
- def list(self, raise_errors=False):
+ def _list(self, raise_errors=False):
"""List files in folder"""
try:
entries = self.__fetch_entries(self.folder.resource_id.text,
=== modified file 'duplicity/backends/giobackend.py'
--- duplicity/backends/giobackend.py 2013-01-15 01:28:01 +0000
+++ duplicity/backends/giobackend.py 2013-12-30 03:37:33 +0000
@@ -152,7 +152,7 @@
local_path.setdata()
@retry
- def list(self, raise_errors=False):
+ def _list(self, raise_errors=False):
"""List files in that directory"""
files = []
try:
=== modified file 'duplicity/backends/imapbackend.py'
--- duplicity/backends/imapbackend.py 2011-06-17 06:21:42 +0000
+++ duplicity/backends/imapbackend.py 2013-12-30 03:37:33 +0000
@@ -197,7 +197,7 @@
local_path.setdata()
log.Info("IMAP mail with '%s' subject fetched" % remote_filename)
- def list(self):
+ def _list(self):
ret = []
(result,list) = self._conn.select(globals.imap_mailbox)
if result != "OK":
=== modified file 'duplicity/backends/localbackend.py'
--- duplicity/backends/localbackend.py 2011-11-17 15:59:54 +0000
+++ duplicity/backends/localbackend.py 2013-12-30 03:37:33 +0000
@@ -96,7 +96,7 @@
except Exception, e:
self.handle_error(e, 'get', source_path.name, local_path.name)
- def list(self):
+ def _list(self):
"""List files in that directory"""
try:
os.makedirs(self.remote_pathdir.base)
=== modified file 'duplicity/backends/megabackend.py'
--- duplicity/backends/megabackend.py 2013-07-10 18:17:51 +0000
+++ duplicity/backends/megabackend.py 2013-12-30 03:37:33 +0000
@@ -105,7 +105,7 @@
% (remote_filename, self.__get_node_name(self.folder), str(e)), raise_errors)
@retry
- def list(self, raise_errors=False):
+ def _list(self, raise_errors=False):
"""List files in folder"""
try:
entries = self.client.get_files_in_node(self.folder)
=== modified file 'duplicity/backends/swiftbackend.py'
--- duplicity/backends/swiftbackend.py 2013-06-14 17:51:18 +0000
+++ duplicity/backends/swiftbackend.py 2013-12-30 03:37:33 +0000
@@ -131,7 +131,7 @@
raise BackendException("Error downloading '%s/%s'"
% (self.container, remote_filename))
- def list(self):
+ def _list(self):
for n in range(1, globals.num_retries+1):
log.Info("Listing '%s'" % (self.container))
try:
=== modified file 'duplicity/backends/tahoebackend.py'
--- duplicity/backends/tahoebackend.py 2010-07-23 22:38:33 +0000
+++ duplicity/backends/tahoebackend.py 2013-12-30 03:37:33 +0000
@@ -74,7 +74,7 @@
self.run("tahoe", "cp", self.get_remote_path(remote_filename), local_path.name)
local_path.setdata()
- def list(self):
+ def _list(self):
log.Debug("tahoe: List")
return self.run("tahoe", "ls", self.get_remote_path()).split('\n')
=== modified file 'duplicity/backends/u1backend.py'
--- duplicity/backends/u1backend.py 2013-04-25 16:07:21 +0000
+++ duplicity/backends/u1backend.py 2013-12-30 03:37:33 +0000
@@ -244,7 +244,7 @@
f.close()
local_path.setdata()
- def list(self):
+ def _list(self):
"""List files in that directory"""
remote_full = self.meta_base + "?include_children=true"
resp, content = self.client.request(remote_full)
=== modified file 'duplicity/backends/webdavbackend.py'
--- duplicity/backends/webdavbackend.py 2013-12-28 16:32:27 +0000
+++ duplicity/backends/webdavbackend.py 2013-12-30 03:37:33 +0000
@@ -262,7 +262,7 @@
return 'Digest %s' % auth_string
@retry_fatal
- def list(self):
+ def _list(self):
"""List files in directory"""
log.Info("Listing directory %s on WebDAV server" % (self.directory,))
response = None
=== modified file 'duplicity/collections.py'
--- duplicity/collections.py 2012-11-03 21:42:00 +0000
+++ duplicity/collections.py 2013-12-30 03:37:33 +0000
@@ -30,6 +30,7 @@
from duplicity import dup_time
from duplicity import globals
from duplicity import manifest
+from duplicity import util
from duplicity.gpg import GPGError
class CollectionsError(Exception):
@@ -144,7 +145,7 @@
try:
self.backend.delete(rfn)
except Exception:
- log.Debug("BackupSet.delete: missing %s" % rfn)
+ log.Debug(_("BackupSet.delete: missing %s") % map(util.ufn, rfn))
pass
for lfn in globals.archive_dir.listdir():
pr = file_naming.parse(lfn)
@@ -155,10 +156,10 @@
try:
globals.archive_dir.append(lfn).delete()
except Exception:
- log.Debug("BackupSet.delete: missing %s" % lfn)
+ log.Debug(_("BackupSet.delete: missing %s") % map(util.ufn, lfn))
pass
- def __str__(self):
+ def __unicode__(self):
"""
For now just list files in set
"""
@@ -166,7 +167,7 @@
if self.remote_manifest_name:
filelist.append(self.remote_manifest_name)
filelist.extend(self.volume_name_dict.values())
- return "[%s]" % ", ".join(filelist)
+ return u"[%s]" % u", ".join(map(util.ufn, filelist))
def get_timestr(self):
"""
@@ -611,42 +612,42 @@
return l
- def __str__(self):
+ def __unicode__(self):
"""
Return string summary of the collection
"""
l = [_("Collection Status"),
- "-----------------",
+ u"-----------------",
_("Connecting with backend: %s") %
(self.backend.__class__.__name__,),
- _("Archive dir: %s") % (self.archive_dir.name,)]
+ _("Archive dir: %s") % (util.ufn(self.archive_dir.name),)]
l.append("\n" +
- gettext.ngettext("Found %d secondary backup chain.",
- "Found %d secondary backup chains.",
- len(self.other_backup_chains))
+ ngettext("Found %d secondary backup chain.",
+ "Found %d secondary backup chains.",
+ len(self.other_backup_chains))
% len(self.other_backup_chains))
for i in range(len(self.other_backup_chains)):
l.append(_("Secondary chain %d of %d:") %
(i+1, len(self.other_backup_chains)))
- l.append(str(self.other_backup_chains[i]))
+ l.append(unicode(self.other_backup_chains[i]))
l.append("")
if self.matched_chain_pair:
l.append("\n" + _("Found primary backup chain with matching "
"signature chain:"))
- l.append(str(self.matched_chain_pair[1]))
+ l.append(unicode(self.matched_chain_pair[1]))
else:
l.append(_("No backup chains with active signatures found"))
if self.orphaned_backup_sets or self.incomplete_backup_sets:
- l.append(gettext.ngettext("Also found %d backup set not part of any chain,",
- "Also found %d backup sets not part of any chain,",
- len(self.orphaned_backup_sets))
+ l.append(ngettext("Also found %d backup set not part of any chain,",
+ "Also found %d backup sets not part of any chain,",
+ len(self.orphaned_backup_sets))
% (len(self.orphaned_backup_sets),))
- l.append(gettext.ngettext("and %d incomplete backup set.",
- "and %d incomplete backup sets.",
- len(self.incomplete_backup_sets))
+ l.append(ngettext("and %d incomplete backup set.",
+ "and %d incomplete backup sets.",
+ len(self.incomplete_backup_sets))
% (len(self.incomplete_backup_sets),))
# TRANSL: "cleanup" is a hard-coded command, so do not translate it
l.append(_('These may be deleted by running duplicity with the '
@@ -654,7 +655,7 @@
else:
l.append(_("No orphaned or incomplete backup sets found."))
- return "\n".join(l)
+ return u"\n".join(l)
def set_values(self, sig_chain_warning = 1):
"""
@@ -668,16 +669,16 @@
# get remote filename list
backend_filename_list = self.backend.list()
- log.Debug(gettext.ngettext("%d file exists on backend",
- "%d files exist on backend",
- len(backend_filename_list)) %
+ log.Debug(ngettext("%d file exists on backend",
+ "%d files exist on backend",
+ len(backend_filename_list)) %
len(backend_filename_list))
# get local filename list
local_filename_list = self.archive_dir.listdir()
- log.Debug(gettext.ngettext("%d file exists in cache",
- "%d files exist in cache",
- len(local_filename_list)) %
+ log.Debug(ngettext("%d file exists in cache",
+ "%d files exist in cache",
+ len(local_filename_list)) %
len(local_filename_list))
# check for partial backups
@@ -750,21 +751,21 @@
assert self.values_set
if self.local_orphaned_sig_names:
- log.Warn(gettext.ngettext("Warning, found the following local orphaned "
- "signature file:",
- "Warning, found the following local orphaned "
- "signature files:",
- len(self.local_orphaned_sig_names))
- + "\n" + "\n".join(self.local_orphaned_sig_names),
+ log.Warn(ngettext("Warning, found the following local orphaned "
+ "signature file:",
+ "Warning, found the following local orphaned "
+ "signature files:",
+ len(self.local_orphaned_sig_names))
+ + u"\n" + u"\n".join(map(util.ufn, self.local_orphaned_sig_names)),
log.WarningCode.orphaned_sig)
if self.remote_orphaned_sig_names:
- log.Warn(gettext.ngettext("Warning, found the following remote orphaned "
- "signature file:",
- "Warning, found the following remote orphaned "
- "signature files:",
- len(self.remote_orphaned_sig_names))
- + "\n" + "\n".join(self.remote_orphaned_sig_names),
+ log.Warn(ngettext("Warning, found the following remote orphaned "
+ "signature file:",
+ "Warning, found the following remote orphaned "
+ "signature files:",
+ len(self.remote_orphaned_sig_names))
+ + u"\n" + u"\n".join(map(util.ufn, self.remote_orphaned_sig_names)),
log.WarningCode.orphaned_sig)
if self.all_sig_chains and sig_chain_warning and not self.matched_chain_pair:
@@ -776,13 +777,12 @@
"from aborted session"), log.WarningCode.incomplete_backup)
if self.orphaned_backup_sets:
- log.Warn(gettext.ngettext("Warning, found the following orphaned "
- "backup file:",
- "Warning, found the following orphaned "
- "backup files:",
- len(self.orphaned_backup_sets))
- + "\n" + "\n".join(map(lambda x: str(x),
- self.orphaned_backup_sets)),
+ log.Warn(ngettext("Warning, found the following orphaned "
+ "backup file:",
+ "Warning, found the following orphaned "
+ "backup files:",
+ len(self.orphaned_backup_sets))
+ + u"\n" + u"\n".join(map(unicode, self.orphaned_backup_sets)),
log.WarningCode.orphaned_backup)
def get_backup_chains(self, filename_list):
@@ -795,7 +795,7 @@
missing files.
"""
log.Debug(_("Extracting backup chains from list of files: %s")
- % filename_list)
+ % map(util.ufn, filename_list))
# First put filenames in set form
sets = []
def add_to_sets(filename):
@@ -804,15 +804,15 @@
"""
for set in sets:
if set.add_filename(filename):
- log.Debug(_("File %s is part of known set") % (filename,))
+ log.Debug(_("File %s is part of known set") % (util.ufn(filename),))
break
else:
- log.Debug(_("File %s is not part of a known set; creating new set") % (filename,))
+ log.Debug(_("File %s is not part of a known set; creating new set") % (util.ufn(filename),))
new_set = BackupSet(self.backend)
if new_set.add_filename(filename):
sets.append(new_set)
else:
- log.Debug(_("Ignoring file (rejected by backup set) '%s'") % filename)
+ log.Debug(_("Ignoring file (rejected by backup set) '%s'") % util.ufn(filename))
map(add_to_sets, filename_list)
sets, incomplete_sets = self.get_sorted_sets(sets)
=== modified file 'duplicity/commandline.py'
--- duplicity/commandline.py 2013-11-24 16:49:57 +0000
+++ duplicity/commandline.py 2013-12-30 03:37:33 +0000
@@ -40,6 +40,7 @@
from duplicity import log
from duplicity import path
from duplicity import selection
+from duplicity import util
select_opts = [] # Will hold all the selection options
@@ -194,7 +195,11 @@
if file is None:
file = sys.stdout
encoding = self._get_encoding(file)
- file.write(self.format_help().decode('utf-8').encode(encoding, "replace"))
+ help = self.format_help()
+ # The help is in unicode or bytes depending on the user's locale
+ if not isinstance(help, unicode):
+ help = self.format_help().decode('utf-8')
+ file.write(help.encode(encoding, "replace"))
def parse_cmdline_options(arglist):
@@ -655,7 +660,7 @@
set_archive_dir(expand_archive_dir(globals.archive_dir,
globals.backup_name))
- log.Info(_("Using archive dir: %s") % (globals.archive_dir.name,))
+ log.Info(_("Using archive dir: %s") % (util.ufn(globals.archive_dir.name),))
log.Info(_("Using backup name: %s") % (globals.backup_name,))
return args
@@ -865,7 +870,7 @@
archive_dir = path.Path(dirstring)
if not archive_dir.isdir():
log.FatalError(_("Specified archive directory '%s' does not exist, "
- "or is not a directory") % (archive_dir.name,),
+ "or is not a directory") % (util.ufn(archive_dir.name),),
log.ErrorCode.bad_archive_dir)
globals.archive_dir = archive_dir
@@ -934,18 +939,18 @@
if action == "restore":
if (local_path.exists() and not local_path.isemptydir()) and not globals.force:
log.FatalError(_("Restore destination directory %s already "
- "exists.\nWill not overwrite.") % (local_pathname,),
+ "exists.\nWill not overwrite.") % (util.ufn(local_path.name),),
log.ErrorCode.restore_dir_exists)
elif action == "verify":
if not local_path.exists():
log.FatalError(_("Verify directory %s does not exist") %
- (local_path.name,),
+ (util.ufn(local_path.name),),
log.ErrorCode.verify_dir_doesnt_exist)
else:
assert action == "full" or action == "inc"
if not local_path.exists():
log.FatalError(_("Backup source directory %s does not exist.")
- % (local_path.name,),
+ % (util.ufn(local_path.name),),
log.ErrorCode.backup_dir_doesnt_exist)
globals.local_path = local_path
=== modified file 'duplicity/diffdir.py'
--- duplicity/diffdir.py 2013-11-16 02:07:49 +0000
+++ duplicity/diffdir.py 2013-12-30 03:37:33 +0000
@@ -100,7 +100,7 @@
index_string = sig_path.get_relative_path()
else:
assert 0, "Both new and sig are None for some reason"
- log.Warn(_("Error %s getting delta for %s") % (str(exc), index_string))
+ log.Warn(_("Error %s getting delta for %s") % (str(exc), util.ufn(index_string)))
return None
@@ -160,14 +160,14 @@
if new_path and stats:
stats.add_new_file(new_path)
log.Info(_("A %s") %
- (delta_path.get_relative_path(),),
+ (util.ufn(delta_path.get_relative_path())),
log.InfoCode.diff_file_new,
util.escape(delta_path.get_relative_path()))
else:
if new_path and stats:
stats.add_changed_file(new_path)
log.Info(_("M %s") %
- (delta_path.get_relative_path(),),
+ (util.ufn(delta_path.get_relative_path())),
log.InfoCode.diff_file_changed,
util.escape(delta_path.get_relative_path()))
@@ -188,8 +188,8 @@
else:
sigTarFile = None
for new_path, sig_path in collated:
- log.Debug(_("Comparing %s and %s") % (new_path and new_path.index,
- sig_path and sig_path.index))
+ log.Debug(_("Comparing %s and %s") % (new_path and util.uindex(new_path.index),
+ sig_path and util.uindex(sig_path.index)))
if not new_path or not new_path.type:
# File doesn't exist (but ignore attempts to delete base dir;
# old versions of duplicity could have written out the sigtar in
@@ -197,7 +197,7 @@
if sig_path and sig_path.exists() and sig_path.index != ():
# but signature says it did
log.Info(_("D %s") %
- (sig_path.get_relative_path(),),
+ (util.ufn(sig_path.get_relative_path())),
log.InfoCode.diff_file_deleted,
util.escape(sig_path.get_relative_path()))
if sigTarFile:
@@ -391,7 +391,7 @@
buf = self.infile.read(length)
except IOError, ex:
buf = ""
- log.Warn(_("Error %s getting delta for %s") % (str(ex), self.infile.name))
+ log.Warn(_("Error %s getting delta for %s") % (str(ex), util.ufn(self.infile.name)))
if stats:
stats.SourceFileSize += len(buf)
return buf
@@ -476,7 +476,7 @@
Make tarblock out of tarinfo and file data
"""
tarinfo.size = len(file_data)
- headers = tarinfo.tobuf()
+ headers = tarinfo.tobuf(errors='replace')
blocks, remainder = divmod(tarinfo.size, tarfile.BLOCKSIZE) #@UnusedVariable
if remainder > 0:
filler_data = "\0" * (tarfile.BLOCKSIZE - remainder)
=== modified file 'duplicity/dup_temp.py'
--- duplicity/dup_temp.py 2013-01-06 18:12:52 +0000
+++ duplicity/dup_temp.py 2013-12-30 03:37:33 +0000
@@ -261,7 +261,7 @@
res = Block(self.fp.read(self.get_read_size()))
except Exception:
log.FatalError(_("Failed to read %s: %s") %
- (self.src.name, sys.exc_info()),
+ (util.ufn(self.src.name), sys.exc_info()),
log.ErrorCode.generic)
if not res.data:
self.fp.close()
=== modified file 'duplicity/gpg.py'
--- duplicity/gpg.py 2013-01-06 18:12:52 +0000
+++ duplicity/gpg.py 2013-12-30 03:37:33 +0000
@@ -25,7 +25,7 @@
see duplicity's README for details
"""
-import os, types, tempfile, re, gzip
+import os, types, tempfile, re, gzip, locale
from duplicity import misc
from duplicity import globals
@@ -207,14 +207,14 @@
self.read(offset - self.byte_count)
def gpg_failed(self):
- msg = "GPG Failed, see log below:\n"
- msg += "===== Begin GnuPG log =====\n"
+ msg = u"GPG Failed, see log below:\n"
+ msg += u"===== Begin GnuPG log =====\n"
for fp in (self.logger_fp, self.stderr_fp):
fp.seek(0)
for line in fp:
- msg += line.strip() + "\n"
- msg += "===== End GnuPG log =====\n"
- if not (msg.find("invalid packet (ctb=14)") > -1):
+ msg += unicode(line.strip(), locale.getpreferredencoding(), 'replace') + u"\n"
+ msg += u"===== End GnuPG log =====\n"
+ if not (msg.find(u"invalid packet (ctb=14)") > -1):
raise GPGError, msg
else:
return ""
=== modified file 'duplicity/gpginterface.py'
--- duplicity/gpginterface.py 2012-11-21 01:27:35 +0000
+++ duplicity/gpginterface.py 2013-12-30 03:37:33 +0000
@@ -234,7 +234,7 @@
import threading
except ImportError:
import dummy_threading #@UnusedImport
- log.Warn("Threading not available -- zombie processes may appear")
+ log.Warn(_("Threading not available -- zombie processes may appear"))
__author__ = "Frank J. Tobin, ftobin@xxxxxxxxxxxxxxx"
__version__ = "0.3.2"
@@ -674,7 +674,7 @@
try:
process.returned = os.waitpid(process.pid, 0)[1]
except:
- log.Debug("GPG process %d terminated before wait()" % process.pid)
+ log.Debug(_("GPG process %d terminated before wait()") % process.pid)
process.returned = 0
=== modified file 'duplicity/lazy.py'
--- duplicity/lazy.py 2012-03-22 02:45:58 +0000
+++ duplicity/lazy.py 2013-12-30 03:37:33 +0000
@@ -323,7 +323,7 @@
if index <= self.index:
log.Warn(_("Warning: oldindex %s >= newindex %s") %
- (self.index, index))
+ (util.uindex(self.index), util.uindex(index)))
return 1
if self.finish_branches(index) is None:
@@ -397,7 +397,7 @@
filename = os.path.join(*self.index)
else:
filename = "."
- log.Warn(_("Error '%s' processing %s") % (exc, filename),
+ log.Warn(_("Error '%s' processing %s") % (exc, util.ufn(filename)),
log.WarningCode.cannot_process,
util.escape(filename))
@@ -407,7 +407,7 @@
index_str = "."
else:
index_str = os.path.join(*index)
- log.Warn(_("Skipping %s because of previous error") % index_str,
+ log.Warn(_("Skipping %s because of previous error") % util.ufn(index_str),
log.WarningCode.process_skipped,
util.escape(index_str))
=== modified file 'duplicity/log.py'
--- duplicity/log.py 2013-04-27 14:10:11 +0000
+++ duplicity/log.py 2013-12-30 03:37:33 +0000
@@ -72,7 +72,14 @@
initial_level = _logger.getEffectiveLevel()
_logger.setLevel(DupToLoggerLevel(MAX))
- _logger.log(DupToLoggerLevel(verb_level), s.decode("utf8", "ignore"))
+ # If all the backends kindly gave us unicode, we could enable this next
+ # assert line. As it is, we'll attempt to convert s to unicode if we
+ # are handed bytes. One day we should update the backends.
+ #assert isinstance(s, unicode)
+ if not isinstance(s, unicode):
+ s = s.decode("utf8", "replace")
+
+ _logger.log(DupToLoggerLevel(verb_level), s)
_logger.controlLine = None
if force_print:
@@ -194,7 +201,7 @@
def PrintCollectionStatus(col_stats, force_print=False):
"""Prints a collection status to the log"""
- Log(str(col_stats), 8, InfoCode.collection_status,
+ Log(unicode(col_stats), 8, InfoCode.collection_status,
'\n' + '\n'.join(col_stats.to_log_info()), force_print)
def Notice(s):
=== modified file 'duplicity/manifest.py'
--- duplicity/manifest.py 2013-11-16 02:06:05 +0000
+++ duplicity/manifest.py 2013-12-30 03:37:33 +0000
@@ -232,7 +232,7 @@
Write string version of manifest to given path
"""
assert not path.exists()
- fout = path.open("w")
+ fout = path.open("wb")
fout.write(self.to_string())
assert not fout.close()
path.setdata()
=== modified file 'duplicity/misc.py'
--- duplicity/misc.py 2009-04-02 14:47:12 +0000
+++ duplicity/misc.py 2013-12-30 03:37:33 +0000
@@ -24,6 +24,7 @@
import os
from duplicity import log
+from duplicity import util
class MiscError(Exception):
@@ -96,7 +97,7 @@
raise StopIteration
filename = "%s.%d" % (self.prefix, self.current_index)
- log.Info(_("Starting to write %s") % filename)
+ log.Info(_("Starting to write %s") % util.ufn(filename))
outfp = open(filename, "wb")
if not self.write_volume(outfp):
@@ -105,7 +106,7 @@
if self.current_index == 1:
# special case first index
log.Notice(_("One only volume required.\n"
- "Renaming %s to %s") % (filename, self.prefix))
+ "Renaming %s to %s") % (util.ufn(filename), util.ufn(self.prefix)))
os.rename(filename, self.prefix)
return self.prefix
else:
=== modified file 'duplicity/patchdir.py'
--- duplicity/patchdir.py 2013-11-16 03:41:58 +0000
+++ duplicity/patchdir.py 2013-12-30 03:37:33 +0000
@@ -72,12 +72,12 @@
ITR = IterTreeReducer( PathPatcher, [base_path] )
for basis_path, diff_ropath in collated:
if basis_path:
- log.Info( _( "Patching %s" ) % ( basis_path.get_relative_path(), ),
+ log.Info(_("Patching %s") % (util.ufn(basis_path.get_relative_path())),
log.InfoCode.patch_file_patching,
util.escape( basis_path.get_relative_path() ) )
ITR( basis_path.index, basis_path, diff_ropath )
else:
- log.Info( _( "Patching %s" ) % ( diff_ropath.get_relative_path(), ),
+ log.Info(_("Patching %s") % (util.ufn(diff_ropath.get_relative_path())),
log.InfoCode.patch_file_patching,
util.escape( diff_ropath.get_relative_path() ) )
ITR( diff_ropath.index, basis_path, diff_ropath )
@@ -155,8 +155,8 @@
re.subn( "(?s)^multivol_(diff|snapshot)/?(.*)/[0-9]+$",
"\\2", tiname )
if num_subs != 1:
- raise PatchDirException( "Unrecognized diff entry %s" %
- ( tiname, ) )
+ raise PatchDirException(u"Unrecognized diff entry %s" %
+ util.ufn(tiname))
else:
difftype = prefix[:-1] # strip trailing /
name = tiname[len( prefix ):]
@@ -165,15 +165,15 @@
multivol = 0
break
else:
- raise PatchDirException( "Unrecognized diff entry %s" %
- ( tiname, ) )
+ raise PatchDirException(u"Unrecognized diff entry %s" %
+ util.ufn(tiname))
if name == "." or name == "":
index = ()
else:
index = tuple( name.split( "/" ) )
if '..' in index:
- raise PatchDirException( "Tar entry %s contains '..'. Security "
- "violation" % ( tiname, ) )
+ raise PatchDirException(u"Tar entry %s contains '..'. Security "
+ "violation" % util.ufn(tiname))
return ( index, difftype, multivol )
@@ -252,7 +252,7 @@
def start_process( self, index, basis_path, diff_ropath ):
"""Start processing when diff_ropath is a directory"""
if not ( diff_ropath and diff_ropath.isdir() ):
- assert index == (), str( index ) # should only happen for first elem
+ assert index == (), util.uindex(index) # should only happen for first elem
self.fast_process( index, basis_path, diff_ropath )
return
@@ -580,7 +580,7 @@
def can_fast_process( self, index, ropath ):
"""Can fast process (no recursion) if ropath isn't a directory"""
log.Info( _( "Writing %s of type %s" ) %
- ( ropath.get_relative_path(), ropath.type ),
+ (util.ufn(ropath.get_relative_path()), ropath.type),
log.InfoCode.patch_file_writing,
"%s %s" % ( util.escape( ropath.get_relative_path() ), ropath.type ) )
return not ropath.isdir()
=== modified file 'duplicity/path.py'
--- duplicity/path.py 2012-11-09 03:21:40 +0000
+++ duplicity/path.py 2013-12-30 03:37:33 +0000
@@ -81,8 +81,8 @@
elif stat.S_ISFIFO(st_mode):
self.type = "fifo"
elif stat.S_ISSOCK(st_mode):
- raise PathException(self.get_relative_path() +
- "is a socket, unsupported by tar")
+ raise PathException(util.ufn(self.get_relative_path()) +
+ u"is a socket, unsupported by tar")
self.type = "sock"
elif stat.S_ISCHR(st_mode):
self.type = "chr"
@@ -220,7 +220,7 @@
self.stat.st_mtime = int(tarinfo.mtime)
if self.stat.st_mtime < 0:
log.Warn(_("Warning: %s has negative mtime, treating as 0.")
- % (tarinfo.name,))
+ % (util.ufn(tarinfo.name)))
self.stat.st_mtime = 0
self.stat.st_size = tarinfo.size
@@ -279,7 +279,7 @@
ti.uid, ti.gid = self.stat.st_uid, self.stat.st_gid
if self.stat.st_mtime < 0:
log.Warn(_("Warning: %s has negative mtime, treating as 0.")
- % (self.get_relative_path(),))
+ % (util.ufn(self.get_relative_path())))
ti.mtime = 0
else:
ti.mtime = int(self.stat.st_mtime)
@@ -343,8 +343,8 @@
"""
def log_diff(log_string):
- log_str = _("Difference found:") + " " + log_string
- log.Notice(log_str % (self.get_relative_path(),))
+ log_str = _("Difference found:") + u" " + log_string
+ log.Notice(log_str % (util.ufn(self.get_relative_path())))
if not self.type and not other.type:
return 1
@@ -459,9 +459,9 @@
other.stat = stat
other.mode = self.mode
- def __repr__(self):
+ def __unicode__(self):
"""Return string representation"""
- return "(%s %s)" % (self.index, self.type)
+ return u"(%s %s)" % (util.uindex(self.index), self.type)
class Path(ROPath):
@@ -551,17 +551,17 @@
def mkdir(self):
"""Make directory(s) at specified path"""
- log.Info(_("Making directory %s") % (self.name,))
+ log.Info(_("Making directory %s") % util.ufn(self.name))
try:
os.makedirs(self.name)
except OSError:
if (not globals.force):
- raise PathException("Error creating directory %s" % (self.name,), 7)
+ raise PathException("Error creating directory %s" % util.ufn(self.name), 7)
self.setdata()
def delete(self):
"""Remove this file"""
- log.Info(_("Deleting %s") % (self.name,))
+ log.Info(_("Deleting %s") % util.ufn(self.name))
if self.isdir():
util.ignore_missing(os.rmdir, self.name)
else:
@@ -570,14 +570,14 @@
def touch(self):
"""Open the file, write 0 bytes, close"""
- log.Info(_("Touching %s") % (self.name,))
+ log.Info(_("Touching %s") % util.ufn(self.name))
fp = self.open("wb")
fp.close()
def deltree(self):
"""Remove self by recursively deleting files under it"""
from duplicity import selection # todo: avoid circ. dep. issue
- log.Info(_("Deleting tree %s") % (self.name,))
+ log.Info(_("Deleting tree %s") % util.ufn(self.name))
itr = IterTreeReducer(PathDeleter, [])
for path in selection.Select(self).set_iter():
itr(path.index, path)
@@ -644,7 +644,7 @@
return temp_path
_tmp_path_counter += 1
assert _tmp_path_counter < 10000, \
- "Warning too many temp files created for " + self.name
+ u"Warning too many temp files created for " + util.ufn(self.name)
def compare_recursive(self, other, verbose = None):
"""Compare self to other Path, descending down directories"""
=== modified file 'duplicity/robust.py'
--- duplicity/robust.py 2010-07-22 19:15:11 +0000
+++ duplicity/robust.py 2013-12-30 03:37:33 +0000
@@ -56,7 +56,7 @@
def listpath(path):
"""Like path.listdir() but return [] if error, and sort results"""
def error_handler(exc):
- log.Warn(_("Error listing directory %s") % path.name)
+ log.Warn(_("Error listing directory %s") % util.ufn(path.name))
return []
dir_listing = check_common_error(error_handler, path.listdir)
dir_listing.sort()
@@ -64,4 +64,4 @@
from duplicity import librsync
from duplicity import log
-
+from duplicity import util
=== modified file 'duplicity/selection.py'
--- duplicity/selection.py 2012-10-17 14:12:33 +0000
+++ duplicity/selection.py 2013-12-30 03:37:33 +0000
@@ -116,15 +116,15 @@
try:
mode = os.stat(fullpath)[stat.ST_MODE]
if stat.S_ISSOCK(mode):
- log.Info(_("Skipping socket %s") % fullpath,
+ log.Info(_("Skipping socket %s") % util.ufn(fullpath),
log.InfoCode.skipping_socket,
util.escape(fullpath))
else:
- log.Warn(_("Error initializing file %s") % fullpath,
+ log.Warn(_("Error initializing file %s") % util.ufn(fullpath),
log.WarningCode.cannot_iterate,
util.escape(fullpath))
except OSError:
- log.Warn(_("Error accessing possibly locked file %s") % fullpath,
+ log.Warn(_("Error accessing possibly locked file %s") % util.ufn(fullpath),
log.WarningCode.cannot_stat,
util.escape(fullpath))
return None
@@ -145,7 +145,7 @@
# make sure file is read accessible
if (new_path and new_path.type in ["reg", "dir"]
and not os.access(new_path.name, os.R_OK)):
- log.Warn(_("Error accessing possibly locked file %s") % new_path.name,
+ log.Warn(_("Error accessing possibly locked file %s") % util.ufn(new_path.name),
log.WarningCode.cannot_read,
util.escape(new_path.name))
if diffdir.stats:
@@ -161,9 +161,9 @@
if not path.type:
# base doesn't exist
log.Warn(_("Warning: base %s doesn't exist, continuing") %
- path.name)
+ util.ufn(path.name))
return
- log.Debug(_("Selecting %s") % path.name)
+ log.Debug(_("Selecting %s") % util.ufn(path.name))
yield path
if not path.isdir():
return
@@ -181,10 +181,10 @@
if val == 0:
if delayed_path_stack:
for delayed_path in delayed_path_stack:
- log.Log(_("Selecting %s") % delayed_path.name, 6)
+ log.Log(_("Selecting %s") % util.ufn(delayed_path.name), 6)
yield delayed_path
del delayed_path_stack[:]
- log.Debug(_("Selecting %s") % subpath.name)
+ log.Debug(_("Selecting %s") % util.ufn(subpath.name))
yield subpath
if subpath.isdir():
diryield_stack.append(diryield(subpath))
@@ -271,7 +271,7 @@
%s
Useful file specifications begin with the base directory or some
pattern (such as '**') which matches the base directory.""") %
- (exc, self.prefix), log.ErrorCode.file_prefix_error)
+ (exc, util.ufn(self.prefix)), log.ErrorCode.file_prefix_error)
elif isinstance(exc, GlobbingError):
log.FatalError(_("Fatal Error while processing expression\n"
"%s") % exc, log.ErrorCode.globbing_error)
@@ -340,7 +340,7 @@
if prefix_warnings[0] < 6:
log.Warn(_("Warning: file specification '%s' in filelist %s\n"
"doesn't start with correct prefix %s. Ignoring.") %
- (exc, filelist_name, self.prefix))
+ (exc, filelist_name, util.ufn(self.prefix)))
if prefix_warnings[0] == 5:
log.Warn(_("Future prefix errors will not be logged."))
@@ -519,7 +519,7 @@
if include == 0:
sel_func = exclude_sel_func
else:
- log.FatalError("--include-if-present not implemented (would it make sense?).",
+ log.FatalError(u"--include-if-present not implemented (would it make sense?).",
log.ErrorCode.not_implemented)
sel_func.exclude = not include
=== modified file 'duplicity/tempdir.py'
--- duplicity/tempdir.py 2012-11-22 12:45:48 +0000
+++ duplicity/tempdir.py 2013-12-30 03:37:33 +0000
@@ -116,7 +116,7 @@
"""
self.__dir = tempfile.mkdtemp("-tempdir", "duplicity-", temproot)
- log.Info(_("Using temporary directory %s") % (self.__dir,))
+ log.Info(_("Using temporary directory %s") % util.ufn(self.__dir))
# number of mktemp()/mkstemp() calls served so far
self.__tempcount = 0
@@ -160,7 +160,7 @@
suffix = "-%d" % (self.__tempcount,)
filename = tempfile.mktemp(suffix, "mktemp-", self.__dir)
- log.Debug(_("Registering (mktemp) temporary file %s") % (filename,))
+ log.Debug(_("Registering (mktemp) temporary file %s") % util.ufn(filename))
self.__pending[filename] = None
finally:
self.__lock.release()
@@ -182,7 +182,7 @@
suffix = "-%d" % (self.__tempcount,)
fd, filename = tempfile.mkstemp(suffix, "mkstemp-", self.__dir)
- log.Debug(_("Registering (mkstemp) temporary file %s") % (filename,))
+ log.Debug(_("Registering (mkstemp) temporary file %s") % util.ufn(filename))
self.__pending[filename] = None
finally:
self.__lock.release()
@@ -214,10 +214,10 @@
self.__lock.acquire()
try:
if self.__pending.has_key(fname):
- log.Debug(_("Forgetting temporary file %s") % (fname, ))
+ log.Debug(_("Forgetting temporary file %s") % util.ufn(fname))
del(self.__pending[fname])
else:
- log.Warn(_("Attempt to forget unknown tempfile %s - this is probably a bug.") % (fname,))
+ log.Warn(_("Attempt to forget unknown tempfile %s - this is probably a bug.") % util.ufn(fname))
pass
finally:
self.__lock.release()
@@ -236,15 +236,15 @@
if not self.__dir is None:
for file in self.__pending.keys():
try:
- log.Debug(_("Removing still remembered temporary file %s") % (file,))
+ log.Debug(_("Removing still remembered temporary file %s") % util.ufn(file))
util.ignore_missing(os.unlink, file)
except Exception:
- log.Info(_("Cleanup of temporary file %s failed") % (file,))
+ log.Info(_("Cleanup of temporary file %s failed") % util.ufn(file))
pass
try:
os.rmdir(self.__dir)
except Exception:
- log.Warn(_("Cleanup of temporary directory %s failed - this is probably a bug.") % (self.__dir,))
+ log.Warn(_("Cleanup of temporary directory %s failed - this is probably a bug.") % util.ufn(self.__dir))
pass
self.__pending = None
self.__dir = None
=== modified file 'duplicity/util.py'
--- duplicity/util.py 2013-09-14 13:21:26 +0000
+++ duplicity/util.py 2013-12-30 03:37:33 +0000
@@ -24,6 +24,7 @@
"""
import errno
+import os
import sys
import string
import traceback
@@ -50,7 +51,21 @@
return str
def escape(string):
- return "'%s'" % string.encode("string-escape")
+ "Convert a (bytes) filename to a format suitable for logging (quoted utf8)"
+ string = ufn(string).encode('unicode-escape', 'replace')
+ return u"'%s'" % string.decode('utf8', 'replace')
+
+def ufn(filename):
+ "Convert a (bytes) filename to unicode for printing"
+ assert not isinstance(filename, unicode)
+ return filename.decode(sys.getfilesystemencoding(), 'replace')
+
+def uindex(index):
+ "Convert an index (a tuple of path parts) to unicode for printing"
+ if index:
+ return os.path.join(*map(ufn, index))
+ else:
+ return u'.'
def maybe_ignore_errors(fn):
"""
=== modified file 'testing/tests/file_namingtest.py'
--- testing/tests/file_namingtest.py 2011-11-04 04:33:06 +0000
+++ testing/tests/file_namingtest.py 2013-12-30 03:37:33 +0000
@@ -46,7 +46,7 @@
dup_time.setcurtime(20)
filename = file_naming.get("inc", volume_number = 23)
- log.Info("Inc filename: " + filename)
+ log.Info(u"Inc filename: " + filename)
pr = file_naming.parse(filename)
assert pr and pr.type == "inc", pr
assert pr.start_time == 10
@@ -55,7 +55,7 @@
assert not pr.partial
filename = file_naming.get("full-sig")
- log.Info("Full sig filename: " + filename)
+ log.Info(u"Full sig filename: " + filename)
pr = file_naming.parse(filename)
assert pr.type == "full-sig"
assert pr.time == 20
=== modified file 'testing/tests/unicode.py'
--- testing/tests/unicode.py 2012-09-13 14:08:52 +0000
+++ testing/tests/unicode.py 2013-12-30 03:37:33 +0000
@@ -29,11 +29,13 @@
if 'duplicity' in sys.modules:
del(sys.modules["duplicity"])
- @patch('gettext.install')
- def test_module_install(self, inst_mock):
- """Make sure we convert po files to utf8"""
+ @patch('gettext.translation')
+ def test_module_install(self, gettext_mock):
+ """Make sure we convert translations to unicode"""
import duplicity
- inst_mock.assert_called_once_with('duplicity', codeset='utf8')
+ gettext_mock.assert_called_once_with('duplicity', fallback=True)
+ gettext_mock.return_value.install.assert_called_once_with(unicode=True)
+ assert ngettext is gettext_mock.return_value.ungettext
if __name__ == "__main__":
unittest.main()
Follow ups