← Back to team overview

duplicity-team team mailing list archive

[Merge] lp:~mgorse/duplicity/0.8-series into lp:duplicity

 

Mgorse has proposed merging lp:~mgorse/duplicity/0.8-series into lp:duplicity.

Commit message:
Adorn some strings in duplicity/*.py

Requested reviews:
  duplicity-team (duplicity-team)

For more details, see:
https://code.launchpad.net/~mgorse/duplicity/0.8-series/+merge/354890

Adorn some duplicity/*.py strings. I've avoided submitting anything that I think might require significant discussion; I think that reviewing will be easier this way. Mostly annotated strings as unicode, except for librsync.py.
-- 
Your team duplicity-team is requested to review the proposed merge of lp:~mgorse/duplicity/0.8-series into lp:duplicity.
=== modified file 'bin/duplicity'
--- bin/duplicity	2018-07-22 14:36:15 +0000
+++ bin/duplicity	2018-09-13 20:09:49 +0000
@@ -1391,7 +1391,8 @@
     """
     log.Log(u'=' * 80, verbosity)
     log.Log(u"duplicity $version ($reldate)", verbosity)
-    log.Log(u"Args: %s" % util.fsdecode(u' '.join(sys.argv)), verbosity)
+    u_args = (util.fsdecode(arg) for arg in sys.argv)
+    log.Log(u"Args: %s" % u' '.join(u_args), verbosity)
     log.Log(u' '.join(platform.uname()), verbosity)
     log.Log(u"%s %s" % (sys.executable or sys.platform, sys.version), verbosity)
     log.Log(u'=' * 80, verbosity)

=== modified file 'duplicity/asyncscheduler.py'
--- duplicity/asyncscheduler.py	2018-07-24 11:52:33 +0000
+++ duplicity/asyncscheduler.py	2018-09-13 20:09:49 +0000
@@ -20,7 +20,7 @@
 # along with duplicity; if not, write to the Free Software Foundation,
 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 
-"""
+u"""
 Asynchronous job scheduler, for concurrent execution with minimalistic
 dependency guarantees.
 """
@@ -37,7 +37,7 @@
 
 
 class AsyncScheduler:
-    """
+    u"""
     Easy-to-use scheduler of function calls to be executed
     concurrently. A very simple dependency mechanism exists in the
     form of barriers (see insert_barrier()).
@@ -59,14 +59,14 @@
     """
 
     def __init__(self, concurrency):
-        """
+        u"""
         Create an asynchronous scheduler that executes jobs with the
         given level of concurrency.
         """
-        log.Info("%s: %s" % (self.__class__.__name__,
-                             _("instantiating at concurrency %d") %
-                             (concurrency)))
-        assert concurrency >= 0, "%s concurrency level must be >= 0" % (self.__class__.__name__,)
+        log.Info(u"%s: %s" % (self.__class__.__name__,
+                              _(u"instantiating at concurrency %d") %
+                              (concurrency)))
+        assert concurrency >= 0, u"%s concurrency level must be >= 0" % (self.__class__.__name__,)
 
         self.__failed = False  # has at least one task failed so far?
         self.__failed_waiter = None  # when __failed, the waiter of the first task that failed
@@ -79,10 +79,10 @@
 #                                                    # are not technically efficient.
 
         if concurrency > 0:
-            require_threading("concurrency > 0 (%d)" % (concurrency,))
+            require_threading(u"concurrency > 0 (%d)" % (concurrency,))
 
     def insert_barrier(self):
-        """
+        u"""
         Proclaim that any tasks scheduled prior to the call to this
         method MUST be executed prior to any tasks scheduled after the
         call to this method.
@@ -91,7 +91,7 @@
         barrier must be inserted in between to guarantee that A
         happens before B.
         """
-        log.Debug("%s: %s" % (self.__class__.__name__, _("inserting barrier")))
+        log.Debug(u"%s: %s" % (self.__class__.__name__, _(u"inserting barrier")))
         # With concurrency 0 it's a NOOP, and due to the special case in
         # task scheduling we do not want to append to the queue (will never
         # be popped).
@@ -102,7 +102,7 @@
             with_lock(self.__cv, _insert_barrier)
 
     def schedule_task(self, fn, params):
-        """
+        u"""
         Schedule the given task (callable, typically function) for
         execution. Pass the given parameters to the function when
         calling it. Returns a callable which can optionally be used
@@ -139,20 +139,20 @@
         if self.__concurrency == 0:
             # special case this to not require any platform support for
             # threading at all
-            log.Info("%s: %s" % (self.__class__.__name__,
-                     _("running task synchronously (asynchronicity disabled)")),
+            log.Info(u"%s: %s" % (self.__class__.__name__,
+                     _(u"running task synchronously (asynchronicity disabled)")),
                      log.InfoCode.synchronous_upload_begin)
 
             return self.__run_synchronously(fn, params)
         else:
-            log.Info("%s: %s" % (self.__class__.__name__,
-                     _("scheduling task for asynchronous execution")),
+            log.Info(u"%s: %s" % (self.__class__.__name__,
+                     _(u"scheduling task for asynchronous execution")),
                      log.InfoCode.asynchronous_upload_begin)
 
             return self.__run_asynchronously(fn, params)
 
     def wait(self):
-        """
+        u"""
         Wait for the scheduler to become entirely empty (i.e., all
         tasks having run to completion).
 
@@ -174,8 +174,8 @@
         def _waiter():
             return ret
 
-        log.Info("%s: %s" % (self.__class__.__name__,
-                 _("task completed successfully")),
+        log.Info(u"%s: %s" % (self.__class__.__name__,
+                 _(u"task completed successfully")),
                  log.InfoCode.synchronous_upload_done)
 
         return _waiter
@@ -185,19 +185,19 @@
 
         def check_pending_failure():
             if self.__failed:
-                log.Info("%s: %s" % (self.__class__.__name__,
-                         _("a previously scheduled task has failed; "
-                           "propagating the result immediately")),
+                log.Info(u"%s: %s" % (self.__class__.__name__,
+                         _(u"a previously scheduled task has failed; "
+                           u"propagating the result immediately")),
                          log.InfoCode.asynchronous_upload_done)
                 self.__failed_waiter()
-                raise AssertionError("%s: waiter should have raised an exception; "
-                                     "this is a bug" % (self.__class__.__name__,))
+                raise AssertionError(u"%s: waiter should have raised an exception; "
+                                     u"this is a bug" % (self.__class__.__name__,))
 
         def wait_for_and_register_launch():
             check_pending_failure()  # raise on fail
             while self.__worker_count >= self.__concurrency or self.__barrier:
                 if self.__worker_count == 0:
-                    assert self.__barrier, "barrier should be in effect"
+                    assert self.__barrier, u"barrier should be in effect"
                     self.__barrier = False
                     self.__cv.notifyAll()
                 else:
@@ -208,8 +208,8 @@
                 check_pending_failure()  # raise on fail
 
             self.__worker_count += 1
-            log.Debug("%s: %s" % (self.__class__.__name__,
-                                  _("active workers = %d") % (self.__worker_count,)))
+            log.Debug(u"%s: %s" % (self.__class__.__name__,
+                                   _(u"active workers = %d") % (self.__worker_count,)))
 
         # simply wait for an OK condition to start, then launch our worker. the worker
         # never waits on us, we just wait for them.
@@ -220,7 +220,7 @@
         return waiter
 
     def __start_worker(self, caller):
-        """
+        u"""
         Start a new worker.
         """
         def trampoline():
@@ -229,8 +229,8 @@
             finally:
                 def complete_worker():
                     self.__worker_count -= 1
-                    log.Debug("%s: %s" % (self.__class__.__name__,
-                                          _("active workers = %d") % (self.__worker_count,)))
+                    log.Debug(u"%s: %s" % (self.__class__.__name__,
+                                           _(u"active workers = %d") % (self.__worker_count,)))
                     self.__cv.notifyAll()
                 with_lock(self.__cv, complete_worker)
 
@@ -249,6 +249,6 @@
                         self.__cv.notifyAll()
                 with_lock(self.__cv, _signal_failed)
 
-            log.Info("%s: %s" % (self.__class__.__name__,
-                     _("task execution done (success: %s)") % succeeded),
+            log.Info(u"%s: %s" % (self.__class__.__name__,
+                     _(u"task execution done (success: %s)") % succeeded),
                      log.InfoCode.asynchronous_upload_done)

=== modified file 'duplicity/backend.py'
--- duplicity/backend.py	2018-07-24 11:52:33 +0000
+++ duplicity/backend.py	2018-09-13 20:09:49 +0000
@@ -19,7 +19,7 @@
 # along with duplicity; if not, write to the Free Software Foundation,
 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 
-"""
+u"""
 Provides a common interface to all backends and certain sevices
 intended to be used by the backends themselves.
 """
@@ -78,7 +78,7 @@
 
 
 def import_backends():
-    """
+    u"""
     Import files in the duplicity/backends directory where
     the filename ends in 'backend.py' and ignore the rest.
 
@@ -86,26 +86,26 @@
     @return: void
     """
     path = duplicity.backends.__path__[0]
-    assert path.endswith("duplicity/backends"), duplicity.backends.__path__
+    assert path.endswith(u"duplicity/backends"), duplicity.backends.__path__
 
     files = os.listdir(path)
     files.sort()
     for fn in files:
-        if fn.endswith("backend.py"):
+        if fn.endswith(u"backend.py"):
             fn = fn[:-3]
-            imp = "duplicity.backends.%s" % (fn,)
+            imp = u"duplicity.backends.%s" % (fn,)
             try:
                 __import__(imp)
-                res = "Succeeded"
+                res = u"Succeeded"
             except Exception:
-                res = "Failed: " + str(sys.exc_info()[1])
-            log.Log(_("Import of %s %s") % (imp, res), log.INFO)
+                res = u"Failed: " + str(sys.exc_info()[1])
+            log.Log(_(u"Import of %s %s") % (imp, res), log.INFO)
         else:
             continue
 
 
 def register_backend(scheme, backend_factory):
-    """
+    u"""
     Register a given backend factory responsible for URL:s with the
     given scheme.
 
@@ -120,18 +120,18 @@
     """
     global _backends
 
-    assert callable(backend_factory), "backend factory must be callable"
+    assert callable(backend_factory), u"backend factory must be callable"
 
     if scheme in _backends:
-        raise ConflictingScheme("the scheme %s already has a backend "
-                                "associated with it"
-                                "" % (scheme,))
+        raise ConflictingScheme(u"the scheme %s already has a backend "
+                                u"associated with it"
+                                u"" % (scheme,))
 
     _backends[scheme] = backend_factory
 
 
 def register_backend_prefix(scheme, backend_factory):
-    """
+    u"""
     Register a given backend factory responsible for URL:s with the
     given scheme prefix.
 
@@ -146,25 +146,25 @@
     """
     global _backend_prefixes
 
-    assert callable(backend_factory), "backend factory must be callable"
+    assert callable(backend_factory), u"backend factory must be callable"
 
     if scheme in _backend_prefixes:
-        raise ConflictingScheme("the prefix %s already has a backend "
-                                "associated with it"
-                                "" % (scheme,))
+        raise ConflictingScheme(u"the prefix %s already has a backend "
+                                u"associated with it"
+                                u"" % (scheme,))
 
     _backend_prefixes[scheme] = backend_factory
 
 
 def strip_prefix(url_string, prefix_scheme):
-    """
+    u"""
     strip the prefix from a string e.g. par2+ftp://... -> ftp://...
     """
     return re.sub(r'(?i)^' + re.escape(prefix_scheme) + r'\+', r'', url_string)
 
 
 def is_backend_url(url_string):
-    """
+    u"""
     @return Whether the given string looks like a backend URL.
     """
     pu = ParsedUrl(url_string)
@@ -177,7 +177,7 @@
 
 
 def get_backend_object(url_string):
-    """
+    u"""
     Find the right backend class instance for the given URL, or return None
     if the given string looks like a local path rather than a URL.
 
@@ -189,12 +189,12 @@
     global _backends, _backend_prefixes
 
     pu = ParsedUrl(url_string)
-    assert pu.scheme, "should be a backend url according to is_backend_url"
+    assert pu.scheme, u"should be a backend url according to is_backend_url"
 
     factory = None
 
     for prefix in _backend_prefixes:
-        if url_string.startswith(prefix + '+'):
+        if url_string.startswith(prefix + u'+'):
             factory = _backend_prefixes[prefix]
             pu = ParsedUrl(strip_prefix(url_string, prefix))
             break
@@ -208,18 +208,18 @@
     try:
         return factory(pu)
     except ImportError:
-        raise BackendException(_("Could not initialize backend: %s") % str(sys.exc_info()[1]))
+        raise BackendException(_(u"Could not initialize backend: %s") % str(sys.exc_info()[1]))
 
 
 def get_backend(url_string):
-    """
+    u"""
     Instantiate a backend suitable for the given URL, or return None
     if the given string looks like a local path rather than a URL.
 
     Raise InvalidBackendURL if the URL is not a valid URL.
     """
     if globals.use_gio:
-        url_string = 'gio+' + url_string
+        url_string = u'gio+' + url_string
     obj = get_backend_object(url_string)
     if obj:
         obj = BackendWrapper(obj)
@@ -227,7 +227,7 @@
 
 
 class ParsedUrl:
-    """
+    u"""
     Parse the given URL as a duplicity backend URL.
 
     Returns the data of a parsed URL with the same names as that of
@@ -252,29 +252,29 @@
         try:
             pu = urlparse.urlparse(url_string)
         except Exception:
-            raise InvalidBackendURL("Syntax error in: %s" % url_string)
+            raise InvalidBackendURL(u"Syntax error in: %s" % url_string)
 
         try:
             self.scheme = pu.scheme
         except Exception:
-            raise InvalidBackendURL("Syntax error (scheme) in: %s" % url_string)
+            raise InvalidBackendURL(u"Syntax error (scheme) in: %s" % url_string)
 
         try:
             self.netloc = pu.netloc
         except Exception:
-            raise InvalidBackendURL("Syntax error (netloc) in: %s" % url_string)
+            raise InvalidBackendURL(u"Syntax error (netloc) in: %s" % url_string)
 
         try:
             self.path = pu.path
             if self.path:
                 self.path = urllib.unquote(self.path)
         except Exception:
-            raise InvalidBackendURL("Syntax error (path) in: %s" % url_string)
+            raise InvalidBackendURL(u"Syntax error (path) in: %s" % url_string)
 
         try:
             self.username = pu.username
         except Exception:
-            raise InvalidBackendURL("Syntax error (username) in: %s" % url_string)
+            raise InvalidBackendURL(u"Syntax error (username) in: %s" % url_string)
         if self.username:
             self.username = urllib.unquote(pu.username)
         else:
@@ -283,7 +283,7 @@
         try:
             self.password = pu.password
         except Exception:
-            raise InvalidBackendURL("Syntax error (password) in: %s" % url_string)
+            raise InvalidBackendURL(u"Syntax error (password) in: %s" % url_string)
         if self.password:
             self.password = urllib.unquote(self.password)
         else:
@@ -292,7 +292,7 @@
         try:
             self.hostname = pu.hostname
         except Exception:
-            raise InvalidBackendURL("Syntax error (hostname) in: %s" % url_string)
+            raise InvalidBackendURL(u"Syntax error (hostname) in: %s" % url_string)
 
         # init to None, overwrite with actual value on success
         self.port = None
@@ -300,21 +300,21 @@
             self.port = pu.port
         except Exception:  # not raised in python2.7+, just returns None
             # old style rsync://host::[/]dest, are still valid, though they contain no port
-            if not (self.scheme in ['rsync'] and re.search('::[^:]*$', self.url_string)):
-                raise InvalidBackendURL("Syntax error (port) in: %s A%s B%s C%s" %
-                                        (url_string, (self.scheme in ['rsync']),
-                                         re.search('::[^:]+$', self.netloc), self.netloc))
+            if not (self.scheme in [u'rsync'] and re.search(u'::[^:]*$', self.url_string)):
+                raise InvalidBackendURL(u"Syntax error (port) in: %s A%s B%s C%s" %
+                                        (url_string, (self.scheme in [u'rsync']),
+                                         re.search(u'::[^:]+$', self.netloc), self.netloc))
 
         # Our URL system uses two slashes more than urlparse's does when using
         # non-netloc URLs.  And we want to make sure that if urlparse assuming
         # a netloc where we don't want one, that we correct it.
         if self.scheme not in uses_netloc:
             if self.netloc:
-                self.path = '//' + self.netloc + self.path
-                self.netloc = ''
+                self.path = u'//' + self.netloc + self.path
+                self.netloc = u''
                 self.hostname = None
-            elif not self.path.startswith('//') and self.path.startswith('/'):
-                self.path = '//' + self.path
+            elif not self.path.startswith(u'//') and self.path.startswith(u'/'):
+                self.path = u'//' + self.path
 
         # This happens for implicit local paths.
         if not self.scheme:
@@ -322,33 +322,33 @@
 
         # Our backends do not handle implicit hosts.
         if self.scheme in uses_netloc and not self.hostname:
-            raise InvalidBackendURL("Missing hostname in a backend URL which "
-                                    "requires an explicit hostname: %s"
-                                    "" % (url_string))
+            raise InvalidBackendURL(u"Missing hostname in a backend URL which "
+                                    u"requires an explicit hostname: %s"
+                                    u"" % (url_string))
 
         # Our backends do not handle implicit relative paths.
-        if self.scheme not in uses_netloc and not self.path.startswith('//'):
-            raise InvalidBackendURL("missing // - relative paths not supported "
-                                    "for scheme %s: %s"
-                                    "" % (self.scheme, url_string))
+        if self.scheme not in uses_netloc and not self.path.startswith(u'//'):
+            raise InvalidBackendURL(u"missing // - relative paths not supported "
+                                    u"for scheme %s: %s"
+                                    u"" % (self.scheme, url_string))
 
     def geturl(self):
         return self.url_string
 
 
 def strip_auth_from_url(parsed_url):
-    """Return a URL from a urlparse object without a username or password."""
+    u"""Return a URL from a urlparse object without a username or password."""
 
-    clean_url = re.sub('^([^:/]+://)(.*@)?(.*)', r'\1\3', parsed_url.geturl())
+    clean_url = re.sub(u'^([^:/]+://)(.*@)?(.*)', r'\1\3', parsed_url.geturl())
     return clean_url
 
 
 def _get_code_from_exception(backend, operation, e):
     if isinstance(e, BackendException) and e.code != log.ErrorCode.backend_error:
         return e.code
-    elif hasattr(backend, '_error_code'):
+    elif hasattr(backend, u'_error_code'):
         return backend._error_code(operation, e) or log.ErrorCode.backend_error
-    elif hasattr(e, 'errno'):
+    elif hasattr(e, u'errno'):
         # A few backends return such errors (local, paramiko, etc)
         if e.errno == errno.EACCES:
             return log.ErrorCode.backend_permission_denied
@@ -372,7 +372,7 @@
                     raise e
                 except Exception as e:
                     # retry on anything else
-                    log.Debug(_("Backtrace of previous error: %s")
+                    log.Debug(_(u"Backtrace of previous error: %s")
                               % exception_traceback())
                     at_end = n == globals.num_retries
                     code = _get_code_from_exception(self.backend, operation, e)
@@ -386,19 +386,19 @@
                                 return util.escape(f.uc_name)
                             else:
                                 return util.escape(f)
-                        extra = ' '.join([operation] + [make_filename(x) for x in args if x])
-                        log.FatalError(_("Giving up after %s attempts. %s: %s")
+                        extra = u' '.join([operation] + [make_filename(x) for x in args if x])
+                        log.FatalError(_(u"Giving up after %s attempts. %s: %s")
                                        % (n, e.__class__.__name__,
                                           util.uexc(e)), code=code, extra=extra)
                     else:
-                        log.Warn(_("Attempt %s failed. %s: %s")
+                        log.Warn(_(u"Attempt %s failed. %s: %s")
                                  % (n, e.__class__.__name__, util.uexc(e)))
                     if not at_end:
                         if isinstance(e, TemporaryLoadException):
                             time.sleep(3 * globals.backend_retry_delay)  # wait longer before trying again
                         else:
                             time.sleep(globals.backend_retry_delay)  # wait a bit before trying again
-                        if hasattr(self.backend, '_retry_cleanup'):
+                        if hasattr(self.backend, u'_retry_cleanup'):
                             self.backend._retry_cleanup()
 
         return inner_retry
@@ -406,17 +406,17 @@
 
 
 class Backend(object):
-    """
+    u"""
     See README in backends directory for information on how to write a backend.
     """
     def __init__(self, parsed_url):
         self.parsed_url = parsed_url
 
-    """ use getpass by default, inherited backends may overwrite this behaviour """
+    u""" use getpass by default, inherited backends may overwrite this behaviour """
     use_getpass = True
 
     def get_password(self):
-        """
+        u"""
         Return a password for authentication purposes. The password
         will be obtained from the backend URL, the environment, by
         asking the user, or by some other method. When applicable, the
@@ -426,18 +426,18 @@
             return self.parsed_url.password
 
         try:
-            password = os.environ['FTP_PASSWORD']
+            password = os.environ[u'FTP_PASSWORD']
         except KeyError:
             if self.use_getpass:
-                password = getpass.getpass("Password for '%s@%s': " %
+                password = getpass.getpass(u"Password for '%s@%s': " %
                                            (self.parsed_url.username, self.parsed_url.hostname))
-                os.environ['FTP_PASSWORD'] = password
+                os.environ[u'FTP_PASSWORD'] = password
             else:
                 password = None
         return password
 
     def munge_password(self, commandline):
-        """
+        u"""
         Remove password from commandline by substituting the password
         found in the URL, if any, with a generic place-holder.
 
@@ -451,7 +451,7 @@
             return commandline
 
     def __subprocess_popen(self, args):
-        """
+        u"""
         For internal use.
         Execute the given command line, interpreted as a shell command.
         Returns int Exitcode, string StdOut, string StdErr
@@ -464,12 +464,12 @@
 
         return p.returncode, stdout, stderr
 
-    """ a dictionary for breaking exceptions, syntax is
+    u""" a dictionary for breaking exceptions, syntax is
         { 'command' : [ code1, code2 ], ... } see ftpbackend for an example """
     popen_breaks = {}
 
     def subprocess_popen(self, commandline):
-        """
+        u"""
         Execute the given command line with error check.
         Returns int Exitcode, string StdOut, string StdErr
 
@@ -478,30 +478,30 @@
         import shlex
 
         if isinstance(commandline, (types.ListType, types.TupleType)):
-            logstr = ' '.join(commandline)
+            logstr = u' '.join(commandline)
             args = commandline
         else:
             logstr = commandline
             args = shlex.split(commandline)
 
         logstr = self.munge_password(logstr)
-        log.Info(_("Reading results of '%s'") % logstr)
+        log.Info(_(u"Reading results of '%s'") % logstr)
 
         result, stdout, stderr = self.__subprocess_popen(args)
         if result != 0:
             try:
                 ignores = self.popen_breaks[args[0]]
                 ignores.index(result)
-                """ ignore a predefined set of error codes """
-                return 0, '', ''
+                u""" ignore a predefined set of error codes """
+                return 0, u'', u''
             except (KeyError, ValueError):
-                raise BackendException("Error running '%s': returned %d, with output:\n%s" %
-                                       (logstr, result, stdout + '\n' + stderr))
+                raise BackendException(u"Error running '%s': returned %d, with output:\n%s" %
+                                       (logstr, result, stdout + u'\n' + stderr))
         return result, stdout, stderr
 
 
 class BackendWrapper(object):
-    """
+    u"""
     Represents a generic duplicity backend, capable of storing and
     retrieving files.
     """
@@ -510,15 +510,15 @@
         self.backend = backend
 
     def __do_put(self, source_path, remote_filename):
-        if hasattr(self.backend, '_put'):
-            log.Info(_("Writing %s") % util.fsdecode(remote_filename))
+        if hasattr(self.backend, u'_put'):
+            log.Info(_(u"Writing %s") % util.fsdecode(remote_filename))
             self.backend._put(source_path, remote_filename)
         else:
             raise NotImplementedError()
 
-    @retry('put', fatal=True)
+    @retry(u'put', fatal=True)
     def put(self, source_path, remote_filename=None):
-        """
+        u"""
         Transfer source_path (Path object) to remote_filename (string)
 
         If remote_filename is None, get the filename from the last
@@ -528,9 +528,9 @@
             remote_filename = source_path.get_filename()
         self.__do_put(source_path, remote_filename)
 
-    @retry('move', fatal=True)
+    @retry(u'move', fatal=True)
     def move(self, source_path, remote_filename=None):
-        """
+        u"""
         Move source_path (Path object) to remote_filename (string)
 
         Same as put(), but unlinks source_path in the process.  This allows the
@@ -538,32 +538,32 @@
         """
         if not remote_filename:
             remote_filename = source_path.get_filename()
-        if hasattr(self.backend, '_move'):
+        if hasattr(self.backend, u'_move'):
             if self.backend._move(source_path, remote_filename) is not False:
                 source_path.setdata()
                 return
         self.__do_put(source_path, remote_filename)
         source_path.delete()
 
-    @retry('get', fatal=True)
+    @retry(u'get', fatal=True)
     def get(self, remote_filename, local_path):
-        """Retrieve remote_filename and place in local_path"""
-        if hasattr(self.backend, '_get'):
+        u"""Retrieve remote_filename and place in local_path"""
+        if hasattr(self.backend, u'_get'):
             self.backend._get(remote_filename, local_path)
             local_path.setdata()
             if not local_path.exists():
-                raise BackendException(_("File %s not found locally after get "
-                                         "from backend") % local_path.uc_name)
+                raise BackendException(_(u"File %s not found locally after get "
+                                         u"from backend") % local_path.uc_name)
         else:
             raise NotImplementedError()
 
-    @retry('list', fatal=True)
+    @retry(u'list', fatal=True)
     def list(self):
-        """
+        u"""
         Return list of filenames (byte strings) present in backend
         """
         def tobytes(filename):
-            "Convert a (maybe unicode) filename to bytes"
+            u"Convert a (maybe unicode) filename to bytes"
             if isinstance(filename, unicode):
                 # There shouldn't be any encoding errors for files we care
                 # about, since duplicity filenames are ascii.  But user files
@@ -572,7 +572,7 @@
             else:
                 return filename
 
-        if hasattr(self.backend, '_list'):
+        if hasattr(self.backend, u'_list'):
             # Make sure that duplicity internals only ever see byte strings
             # for filenames, no matter what the backend thinks it is talking.
             return [tobytes(x) for x in self.backend._list()]
@@ -580,26 +580,26 @@
             raise NotImplementedError()
 
     def delete(self, filename_list):
-        """
+        u"""
         Delete each filename in filename_list, in order if possible.
         """
         assert not isinstance(filename_list, types.StringType)
-        if hasattr(self.backend, '_delete_list'):
+        if hasattr(self.backend, u'_delete_list'):
             self._do_delete_list(filename_list)
-        elif hasattr(self.backend, '_delete'):
+        elif hasattr(self.backend, u'_delete'):
             for filename in filename_list:
                 self._do_delete(filename)
         else:
             raise NotImplementedError()
 
-    @retry('delete', fatal=False)
+    @retry(u'delete', fatal=False)
     def _do_delete_list(self, filename_list):
         while filename_list:
             sublist = filename_list[:100]
             self.backend._delete_list(sublist)
             filename_list = filename_list[100:]
 
-    @retry('delete', fatal=False)
+    @retry(u'delete', fatal=False)
     def _do_delete(self, filename):
         self.backend._delete(filename)
 
@@ -614,15 +614,15 @@
     # Returned dictionary is guaranteed to contain a metadata dictionary for
     # each filename, and all metadata are guaranteed to be present.
     def query_info(self, filename_list):
-        """
+        u"""
         Return metadata about each filename in filename_list
         """
         info = {}
-        if hasattr(self.backend, '_query_list'):
+        if hasattr(self.backend, u'_query_list'):
             info = self._do_query_list(filename_list)
             if info is None:
                 info = {}
-        elif hasattr(self.backend, '_query'):
+        elif hasattr(self.backend, u'_query'):
             for filename in filename_list:
                 info[filename] = self._do_query(filename)
 
@@ -631,39 +631,39 @@
         for filename in filename_list:
             if filename not in info or info[filename] is None:
                 info[filename] = {}
-            for metadata in ['size']:
+            for metadata in [u'size']:
                 info[filename].setdefault(metadata, None)
 
         return info
 
-    @retry('query', fatal=False)
+    @retry(u'query', fatal=False)
     def _do_query_list(self, filename_list):
         info = self.backend._query_list(filename_list)
         if info is None:
             info = {}
         return info
 
-    @retry('query', fatal=False)
+    @retry(u'query', fatal=False)
     def _do_query(self, filename):
         try:
             return self.backend._query(filename)
         except Exception as e:
-            code = _get_code_from_exception(self.backend, 'query', e)
+            code = _get_code_from_exception(self.backend, u'query', e)
             if code == log.ErrorCode.backend_not_found:
-                return {'size': -1}
+                return {u'size': -1}
             else:
                 raise e
 
     def close(self):
-        """
+        u"""
         Close the backend, releasing any resources held and
         invalidating any file objects obtained from the backend.
         """
-        if hasattr(self.backend, '_close'):
+        if hasattr(self.backend, u'_close'):
             self.backend._close()
 
     def get_fileobj_read(self, filename, parseresults=None):
-        """
+        u"""
         Return fileobject opened for reading of filename on backend
 
         The file will be downloaded first into a temp file.  When the
@@ -671,14 +671,14 @@
         """
         if not parseresults:
             parseresults = file_naming.parse(filename)
-            assert parseresults, "Filename not correctly parsed"
+            assert parseresults, u"Filename not correctly parsed"
         tdp = dup_temp.new_tempduppath(parseresults)
         self.get(filename, tdp)
         tdp.setdata()
-        return tdp.filtered_open_with_delete("rb")
+        return tdp.filtered_open_with_delete(u"rb")
 
     def get_data(self, filename, parseresults=None):
-        """
+        u"""
         Retrieve a file from backend, process it, return contents.
         """
         fin = self.get_fileobj_read(filename, parseresults)

=== modified file 'duplicity/cached_ops.py'
--- duplicity/cached_ops.py	2018-07-24 11:52:33 +0000
+++ duplicity/cached_ops.py	2018-09-13 20:09:49 +0000
@@ -18,14 +18,14 @@
 # along with duplicity; if not, write to the Free Software Foundation,
 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 
-"""Cache-wrapped functions for grp and pwd lookups."""
+u"""Cache-wrapped functions for grp and pwd lookups."""
 
 import grp
 import pwd
 
 
 class CachedCall(object):
-    """Decorator for caching the results of function calls."""
+    u"""Decorator for caching the results of function calls."""
 
     def __init__(self, f):
         self.cache = {}

=== modified file 'duplicity/collections.py'
--- duplicity/collections.py	2018-07-24 11:52:33 +0000
+++ duplicity/collections.py	2018-09-13 20:09:49 +0000
@@ -19,7 +19,7 @@
 # along with duplicity; if not, write to the Free Software Foundation,
 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 
-"""Classes and functions on collections of backup volumes"""
+u"""Classes and functions on collections of backup volumes"""
 
 from future_builtins import filter, map
 
@@ -49,11 +49,11 @@
 
 
 class BackupSet:
-    """
+    u"""
     Backup set - the backup information produced by one session
     """
     def __init__(self, backend, action):
-        """
+        u"""
         Initialize new backup set, only backend is required at first
         """
         self.backend = backend
@@ -70,13 +70,13 @@
         self.action = action
 
     def is_complete(self):
-        """
+        u"""
         Assume complete if found manifest file
         """
         return self.remote_manifest_name
 
     def add_filename(self, filename):
-        """
+        u"""
         Add a filename to given set.  Return true if it fits.
 
         The filename will match the given set if it has the right
@@ -87,7 +87,7 @@
         @type filename: string
         """
         pr = file_naming.parse(filename)
-        if not pr or not (pr.type == "full" or pr.type == "inc"):
+        if not pr or not (pr.type == u"full" or pr.type == u"inc"):
             return False
 
         if not self.info_set:
@@ -115,7 +115,7 @@
         return True
 
     def set_info(self, pr):
-        """
+        u"""
         Set BackupSet information from ParseResults object
 
         @param pr: parse results
@@ -136,14 +136,14 @@
         self.files_changed = mf.get_files_changed()
 
     def set_manifest(self, remote_filename):
-        """
+        u"""
         Add local and remote manifest filenames to backup set
         """
         assert not self.remote_manifest_name, (self.remote_manifest_name,
                                                remote_filename)
         self.remote_manifest_name = remote_filename
 
-        if self.action not in ["collection-status", "replicate"]:
+        if self.action not in [u"collection-status", u"replicate"]:
             local_filename_list = globals.archive_dir_path.listdir()
         else:
             local_filename_list = []
@@ -160,7 +160,7 @@
                 break
 
     def delete(self):
-        """
+        u"""
         Remove all files in set, both local and remote
         """
         rfn = self.get_filenames()
@@ -168,9 +168,9 @@
         try:
             self.backend.delete(rfn)
         except Exception:
-            log.Debug(_("BackupSet.delete: missing %s") % [util.fsdecode(f) for f in rfn])
+            log.Debug(_(u"BackupSet.delete: missing %s") % [util.fsdecode(f) for f in rfn])
             pass
-        if self.action not in ["collection-status", "replicate"]:
+        if self.action not in [u"collection-status", u"replicate"]:
             local_filename_list = globals.archive_dir_path.listdir()
         else:
             local_filename_list = []
@@ -182,12 +182,12 @@
                 try:
                     globals.archive_dir_path.append(lfn).delete()
                 except Exception:
-                    log.Debug(_("BackupSet.delete: missing %s") % [util.fsdecode(f) for f in lfn])
+                    log.Debug(_(u"BackupSet.delete: missing %s") % [util.fsdecode(f) for f in lfn])
                     pass
         util.release_lockfile()
 
     def __unicode__(self):
-        """
+        u"""
         For now just list files in set
         """
         filelist = []
@@ -197,65 +197,65 @@
         return u"[%s]" % u", ".join(map(util.fsdecode, filelist))
 
     def get_timestr(self):
-        """
+        u"""
         Return time string suitable for log statements
         """
         return dup_time.timetopretty(self.time or self.end_time)
 
     def check_manifests(self, check_remote=True):
-        """
+        u"""
         Make sure remote manifest is equal to local one
         """
         if not self.remote_manifest_name and not self.local_manifest_path:
-            log.FatalError(_("Fatal Error: No manifests found for most recent backup"),
+            log.FatalError(_(u"Fatal Error: No manifests found for most recent backup"),
                            log.ErrorCode.no_manifests)
-        assert self.remote_manifest_name, "if only one, should be remote"
+        assert self.remote_manifest_name, u"if only one, should be remote"
 
         remote_manifest = self.get_remote_manifest() if check_remote else None
         if self.local_manifest_path:
             local_manifest = self.get_local_manifest()
         if remote_manifest and self.local_manifest_path and local_manifest:
             if remote_manifest != local_manifest:
-                log.FatalError(_("Fatal Error: Remote manifest does not match "
-                                 "local one.  Either the remote backup set or "
-                                 "the local archive directory has been corrupted."),
+                log.FatalError(_(u"Fatal Error: Remote manifest does not match "
+                                 u"local one.  Either the remote backup set or "
+                                 u"the local archive directory has been corrupted."),
                                log.ErrorCode.mismatched_manifests)
         if not remote_manifest:
             if self.local_manifest_path:
                 remote_manifest = local_manifest
             else:
-                log.FatalError(_("Fatal Error: Neither remote nor local "
-                                 "manifest is readable."),
+                log.FatalError(_(u"Fatal Error: Neither remote nor local "
+                                 u"manifest is readable."),
                                log.ErrorCode.unreadable_manifests)
         remote_manifest.check_dirinfo()
 
     def get_local_manifest(self):
-        """
+        u"""
         Return manifest object by reading local manifest file
         """
         assert self.local_manifest_path
         manifest_buffer = self.local_manifest_path.get_data()
-        log.Info(_("Processing local manifest %s (%s)") % (
+        log.Info(_(u"Processing local manifest %s (%s)") % (
             self.local_manifest_path.name, len(manifest_buffer)))
         return manifest.Manifest().from_string(manifest_buffer)
 
     def get_remote_manifest(self):
-        """
+        u"""
         Return manifest by reading remote manifest on backend
         """
         assert self.remote_manifest_name
         try:
             manifest_buffer = self.backend.get_data(self.remote_manifest_name)
         except GPGError as message:
-            log.Error(_("Error processing remote manifest (%s): %s") %
+            log.Error(_(u"Error processing remote manifest (%s): %s") %
                       (util.fsdecode(self.remote_manifest_name), util.uexc(message)))
             return None
-        log.Info(_("Processing remote manifest %s (%s)") % (
+        log.Info(_(u"Processing remote manifest %s (%s)") % (
             util.fsdecode(self.remote_manifest_name), len(manifest_buffer)))
         return manifest.Manifest().from_string(manifest_buffer)
 
     def get_manifest(self):
-        """
+        u"""
         Return manifest object, showing preference for local copy
         """
         if self.local_manifest_path:
@@ -264,7 +264,7 @@
             return self.get_remote_manifest()
 
     def get_filenames(self):
-        """
+        u"""
         Return sorted list of (remote) filenames of files in set
         """
         assert self.info_set
@@ -282,26 +282,26 @@
         return volume_filenames
 
     def get_time(self):
-        """
+        u"""
         Return time if full backup, or end_time if incremental
         """
         if self.time:
             return self.time
         if self.end_time:
             return self.end_time
-        assert 0, "Neither self.time nor self.end_time set"
+        assert 0, u"Neither self.time nor self.end_time set"
 
     def get_files_changed(self):
         return self.files_changed
 
     def __len__(self):
-        """
+        u"""
         Return the number of volumes in the set
         """
         return len(self.volume_name_dict.keys())
 
     def __eq__(self, other):
-        """
+        u"""
         Return whether this backup set is equal to other
         """
         return self.type == other.type and \
@@ -312,14 +312,14 @@
 
 
 class BackupChain:
-    """
+    u"""
     BackupChain - a number of linked BackupSets
 
     A BackupChain always starts with a full backup set and continues
     with incremental ones.
     """
     def __init__(self, backend):
-        """
+        u"""
         Initialize new chain, only backend is required at first
         """
         self.backend = backend
@@ -328,7 +328,7 @@
         self.start_time, self.end_time = None, None
 
     def set_full(self, fullset):
-        """
+        u"""
         Add full backup set
         """
         assert not self.fullset and isinstance(fullset, BackupSet)
@@ -337,7 +337,7 @@
         self.start_time, self.end_time = fullset.time, fullset.time
 
     def add_inc(self, incset):
-        """
+        u"""
         Add incset to self.  Return False if incset does not match
         """
         if self.end_time == incset.start_time:
@@ -346,22 +346,22 @@
             if (self.incset_list and
                     incset.start_time == self.incset_list[-1].start_time and
                     incset.end_time > self.incset_list[-1]):
-                log.Info(_("Preferring Backupset over previous one!"))
+                log.Info(_(u"Preferring Backupset over previous one!"))
                 self.incset_list[-1] = incset
             else:
-                log.Info(_("Ignoring incremental Backupset (start_time: %s; needed: %s)") %
+                log.Info(_(u"Ignoring incremental Backupset (start_time: %s; needed: %s)") %
                          (dup_time.timetopretty(incset.start_time),
                           dup_time.timetopretty(self.end_time)))
                 return False
         self.end_time = incset.end_time
-        log.Info(_("Added incremental Backupset (start_time: %s / end_time: %s)") %
+        log.Info(_(u"Added incremental Backupset (start_time: %s / end_time: %s)") %
                  (dup_time.timetopretty(incset.start_time),
                   dup_time.timetopretty(incset.end_time)))
         assert self.end_time
         return True
 
     def delete(self, keep_full=False):
-        """
+        u"""
         Delete all sets in chain, in reverse order
         """
         for i in range(len(self.incset_list) - 1, -1, -1):
@@ -370,14 +370,14 @@
             self.fullset.delete()
 
     def get_sets_at_time(self, time):
-        """
+        u"""
         Return a list of sets in chain earlier or equal to time
         """
         older_incsets = [s for s in self.incset_list if s.end_time <= time]
         return [self.fullset] + older_incsets
 
     def get_last(self):
-        """
+        u"""
         Return last BackupSet in chain
         """
         if self.incset_list:
@@ -386,66 +386,66 @@
             return self.fullset
 
     def get_first(self):
-        """
+        u"""
         Return first BackupSet in chain (ie the full backup)
         """
         return self.fullset
 
     def short_desc(self):
-        """
+        u"""
         Return a short one-line description of the chain,
         suitable for log messages.
         """
-        return "[%s]-[%s]" % (dup_time.timetopretty(self.start_time),
-                              dup_time.timetopretty(self.end_time))
+        return u"[%s]-[%s]" % (dup_time.timetopretty(self.start_time),
+                               dup_time.timetopretty(self.end_time))
 
-    def to_log_info(self, prefix=''):
-        """
+    def to_log_info(self, prefix=u''):
+        u"""
         Return summary, suitable for printing to log
         """
         l = []
         for s in self.get_all_sets():
             if s.time:
-                type = "full"
+                type = u"full"
                 time = s.time
             else:
-                type = "inc"
+                type = u"inc"
                 time = s.end_time
             if s.encrypted:
-                enc = "enc"
+                enc = u"enc"
             else:
-                enc = "noenc"
-            l.append("%s%s %s %d %s" % (prefix, type, dup_time.timetostring(time), (len(s)), enc))
+                enc = u"noenc"
+            l.append(u"%s%s %s %d %s" % (prefix, type, dup_time.timetostring(time), (len(s)), enc))
         return l
 
     def __str__(self):
-        """
+        u"""
         Return string representation, for testing purposes
         """
-        set_schema = "%20s   %30s   %15s"
-        l = ["-------------------------",
-             _("Chain start time: ") + dup_time.timetopretty(self.start_time),
-             _("Chain end time: ") + dup_time.timetopretty(self.end_time),
-             _("Number of contained backup sets: %d") %
+        set_schema = u"%20s   %30s   %15s"
+        l = [u"-------------------------",
+             _(u"Chain start time: ") + dup_time.timetopretty(self.start_time),
+             _(u"Chain end time: ") + dup_time.timetopretty(self.end_time),
+             _(u"Number of contained backup sets: %d") %
              (len(self.incset_list) + 1,),
-             _("Total number of contained volumes: %d") %
+             _(u"Total number of contained volumes: %d") %
              (self.get_num_volumes(),),
-             set_schema % (_("Type of backup set:"), _("Time:"), _("Num volumes:"))]
+             set_schema % (_(u"Type of backup set:"), _(u"Time:"), _(u"Num volumes:"))]
 
         for s in self.get_all_sets():
             if s.time:
-                type = _("Full")
+                type = _(u"Full")
                 time = s.time
             else:
-                type = _("Incremental")
+                type = _(u"Incremental")
                 time = s.end_time
             l.append(set_schema % (type, dup_time.timetopretty(time), len(s)))
 
-        l.append("-------------------------")
-        return "\n".join(l)
+        l.append(u"-------------------------")
+        return u"\n".join(l)
 
     def get_num_volumes(self):
-        """
+        u"""
         Return the total number of volumes in the chain
         """
         n = 0
@@ -454,7 +454,7 @@
         return n
 
     def get_all_sets(self):
-        """
+        u"""
         Return list of all backup sets in chain
         """
         if self.fullset:
@@ -464,14 +464,14 @@
 
 
 class SignatureChain:
-    """
+    u"""
     A number of linked SignatureSets
 
     Analog to BackupChain - start with a full-sig, and continue with
     new-sigs.
     """
     def __init__(self, local, location):
-        """
+        u"""
         Return new SignatureChain.
 
         local should be true iff the signature chain resides in
@@ -493,29 +493,29 @@
         self.start_time, self.end_time = None, None
 
     def __str__(self):
-        """
+        u"""
         Local or Remote and List of files in the set
         """
         if self.archive_dir_path:
-            place = _("local")
+            place = _(u"local")
         else:
-            place = _("remote")
+            place = _(u"remote")
         filelist = []
         if self.fullsig:
             filelist.append(self.fullsig)
         filelist.extend(self.inclist)
-        return "%s: [%s]" % (place, ", ".join(filelist))
+        return u"%s: [%s]" % (place, u", ".join(filelist))
 
     def check_times(self, time_list):
-        """
+        u"""
         Check to make sure times are in whole seconds
         """
         for time in time_list:
             if type(time) not in integer_types:
-                assert 0, "Time %s in %s wrong type" % (time, time_list)
+                assert 0, u"Time %s in %s wrong type" % (time, time_list)
 
     def islocal(self):
-        """
+        u"""
         Return true if represents a signature chain in archive_dir_path
         """
         if self.archive_dir_path:
@@ -524,7 +524,7 @@
             return False
 
     def add_filename(self, filename, pr=None):
-        """
+        u"""
         Add new sig filename to current chain.  Return true if fits
         """
         if not pr:
@@ -533,7 +533,7 @@
             return None
 
         if self.fullsig:
-            if pr.type != "new-sig":
+            if pr.type != u"new-sig":
                 return None
             if pr.start_time != self.end_time:
                 return None
@@ -542,7 +542,7 @@
             self.end_time = pr.end_time
             return 1
         else:
-            if pr.type != "full-sig":
+            if pr.type != u"full-sig":
                 return None
             self.fullsig = filename
             self.check_times([pr.time, pr.time])
@@ -550,22 +550,22 @@
             return 1
 
     def get_fileobjs(self, time=None):
-        """
+        u"""
         Return ordered list of signature fileobjs opened for reading,
         optionally at a certain time
         """
         assert self.fullsig
         if self.archive_dir_path:  # local
             def filename_to_fileobj(filename):
-                """Open filename in archive_dir_path, return filtered fileobj"""
+                u"""Open filename in archive_dir_path, return filtered fileobj"""
                 sig_dp = path.DupPath(self.archive_dir_path.name, (filename,))
-                return sig_dp.filtered_open("rb")
+                return sig_dp.filtered_open(u"rb")
         else:
             filename_to_fileobj = self.backend.get_fileobj_read
         return [filename_to_fileobj(f) for f in self.get_filenames(time)]
 
     def delete(self, keep_full=False):
-        """
+        u"""
         Remove all files in signature set
         """
         # Try to delete in opposite order, so something useful even if aborted
@@ -583,7 +583,7 @@
             self.backend.delete(inclist_copy)
 
     def get_filenames(self, time=None):
-        """
+        u"""
         Return ordered list of filenames in set, up to a provided time
         """
         if self.fullsig:
@@ -601,11 +601,11 @@
 
 
 class CollectionsStatus:
-    """
+    u"""
     Hold information about available chains and sets
     """
     def __init__(self, backend, archive_dir_path, action):
-        """
+        u"""
         Make new object.  Does not set values
         """
         self.backend = backend
@@ -631,73 +631,73 @@
         self.values_set = None
 
     def to_log_info(self):
-        """
+        u"""
         Return summary of the collection, suitable for printing to log
         """
-        l = ["backend %s" % (self.backend.__class__.__name__,),
-             "archive-dir %s" % (self.archive_dir_path,)]
+        l = [u"backend %s" % (self.backend.__class__.__name__,),
+             u"archive-dir %s" % (self.archive_dir_path,)]
 
         for i in range(len(self.other_backup_chains)):
             # A bit of a misnomer.  Chain might have a sig.
-            l.append("chain-no-sig %d" % (i,))
-            l += self.other_backup_chains[i].to_log_info(' ')
+            l.append(u"chain-no-sig %d" % (i,))
+            l += self.other_backup_chains[i].to_log_info(u' ')
 
         if self.matched_chain_pair:
-            l.append("chain-complete")
-            l += self.matched_chain_pair[1].to_log_info(' ')
+            l.append(u"chain-complete")
+            l += self.matched_chain_pair[1].to_log_info(u' ')
 
-        l.append("orphaned-sets-num %d" % (len(self.orphaned_backup_sets),))
-        l.append("incomplete-sets-num %d" % (len(self.incomplete_backup_sets),))
+        l.append(u"orphaned-sets-num %d" % (len(self.orphaned_backup_sets),))
+        l.append(u"incomplete-sets-num %d" % (len(self.incomplete_backup_sets),))
 
         return l
 
     def __unicode__(self):
-        """
+        u"""
         Return string summary of the collection
         """
-        l = [_("Collection Status"),
+        l = [_(u"Collection Status"),
              u"-----------------",
-             _("Connecting with backend: %s") %
+             _(u"Connecting with backend: %s") %
              (self.backend.__class__.__name__,),
-             _("Archive dir: %s") % (self.archive_dir_path.uc_name if self.archive_dir_path else 'None',)]
+             _(u"Archive dir: %s") % (self.archive_dir_path.uc_name if self.archive_dir_path else u'None',)]
 
-        l.append("\n" +
-                 ngettext("Found %d secondary backup chain.",
-                          "Found %d secondary backup chains.",
+        l.append(u"\n" +
+                 ngettext(u"Found %d secondary backup chain.",
+                          u"Found %d secondary backup chains.",
                           len(self.other_backup_chains))
                  % len(self.other_backup_chains))
         for i in range(len(self.other_backup_chains)):
-            l.append(_("Secondary chain %d of %d:") %
+            l.append(_(u"Secondary chain %d of %d:") %
                      (i + 1, len(self.other_backup_chains)))
             l.append(unicode(self.other_backup_chains[i]))
-            l.append("")
+            l.append(u"")
 
         if self.matched_chain_pair:
-            l.append("\n" + _("Found primary backup chain with matching "
-                     "signature chain:"))
+            l.append(u"\n" + _(u"Found primary backup chain with matching "
+                     u"signature chain:"))
             l.append(unicode(self.matched_chain_pair[1]))
         else:
-            l.append(_("No backup chains with active signatures found"))
+            l.append(_(u"No backup chains with active signatures found"))
 
         if self.orphaned_backup_sets or self.incomplete_backup_sets:
-            l.append(ngettext("Also found %d backup set not part of any chain,",
-                              "Also found %d backup sets not part of any chain,",
+            l.append(ngettext(u"Also found %d backup set not part of any chain,",
+                              u"Also found %d backup sets not part of any chain,",
                               len(self.orphaned_backup_sets))
                      % (len(self.orphaned_backup_sets),))
-            l.append(ngettext("and %d incomplete backup set.",
-                              "and %d incomplete backup sets.",
+            l.append(ngettext(u"and %d incomplete backup set.",
+                              u"and %d incomplete backup sets.",
                               len(self.incomplete_backup_sets))
                      % (len(self.incomplete_backup_sets),))
             # TRANSL: "cleanup" is a hard-coded command, so do not translate it
-            l.append(_('These may be deleted by running duplicity with the '
-                       '"cleanup" command.'))
+            l.append(_(u'These may be deleted by running duplicity with the '
+                       u'"cleanup" command.'))
         else:
-            l.append(_("No orphaned or incomplete backup sets found."))
+            l.append(_(u"No orphaned or incomplete backup sets found."))
 
         return u"\n".join(l)
 
     def set_values(self, sig_chain_warning=1):
-        """
+        u"""
         Set values from archive_dir_path and backend.
 
         Returns self for convenience.  If sig_chain_warning is set to None,
@@ -708,18 +708,18 @@
 
         # get remote filename list
         backend_filename_list = self.backend.list()
-        log.Debug(ngettext("%d file exists on backend",
-                           "%d files exist on backend",
+        log.Debug(ngettext(u"%d file exists on backend",
+                           u"%d files exist on backend",
                            len(backend_filename_list)) %
                   len(backend_filename_list))
 
         # get local filename list
-        if self.action not in ["collection-status", "replicate"]:
+        if self.action not in [u"collection-status", u"replicate"]:
             local_filename_list = self.archive_dir_path.listdir()
         else:
             local_filename_list = []
-        log.Debug(ngettext("%d file exists in cache",
-                           "%d files exist in cache",
+        log.Debug(ngettext(u"%d file exists in cache",
+                           u"%d files exist in cache",
                            len(local_filename_list)) %
                   len(local_filename_list))
 
@@ -738,7 +738,7 @@
         self.all_backup_chains = backup_chains
 
         assert len(backup_chains) == len(self.all_backup_chains), \
-            "get_sorted_chains() did something more than re-ordering"
+            u"get_sorted_chains() did something more than re-ordering"
 
         local_sig_chains, self.local_orphaned_sig_names = \
             self.get_signature_chains(True)
@@ -750,7 +750,7 @@
         return self
 
     def set_matched_chain_pair(self, sig_chains, backup_chains):
-        """
+        u"""
         Set self.matched_chain_pair and self.other_sig/backup_chains
 
         The latest matched_chain_pair will be set.  If there are both
@@ -771,8 +771,8 @@
                 elif (len(latest_backup_chain.get_all_sets()) >= 2 and
                       sig_chains[i].end_time == latest_backup_chain.get_all_sets()[-2].end_time):
                     # It matches, remove the last backup set:
-                    log.Warn(_("Warning, discarding last backup set, because "
-                               "of missing signature file."))
+                    log.Warn(_(u"Warning, discarding last backup set, because "
+                               u"of missing signature file."))
                     self.incomplete_backup_sets.append(latest_backup_chain.incset_list[-1])
                     latest_backup_chain.incset_list = latest_backup_chain.incset_list[:-1]
                 else:
@@ -788,48 +788,48 @@
             self.other_backup_chains.remove(self.matched_chain_pair[1])
 
     def warn(self, sig_chain_warning):
-        """
+        u"""
         Log various error messages if find incomplete/orphaned files
         """
         assert self.values_set
 
         if self.local_orphaned_sig_names:
-            log.Warn(ngettext("Warning, found the following local orphaned "
-                              "signature file:",
-                              "Warning, found the following local orphaned "
-                              "signature files:",
+            log.Warn(ngettext(u"Warning, found the following local orphaned "
+                              u"signature file:",
+                              u"Warning, found the following local orphaned "
+                              u"signature files:",
                               len(self.local_orphaned_sig_names)) + u"\n" +
                      u"\n".join(map(util.fsdecode, self.local_orphaned_sig_names)),
                      log.WarningCode.orphaned_sig)
 
         if self.remote_orphaned_sig_names:
-            log.Warn(ngettext("Warning, found the following remote orphaned "
-                              "signature file:",
-                              "Warning, found the following remote orphaned "
-                              "signature files:",
+            log.Warn(ngettext(u"Warning, found the following remote orphaned "
+                              u"signature file:",
+                              u"Warning, found the following remote orphaned "
+                              u"signature files:",
                               len(self.remote_orphaned_sig_names)) + u"\n" +
                      u"\n".join(map(util.fsdecode, self.remote_orphaned_sig_names)),
                      log.WarningCode.orphaned_sig)
 
         if self.all_sig_chains and sig_chain_warning and not self.matched_chain_pair:
-            log.Warn(_("Warning, found signatures but no corresponding "
-                       "backup files"), log.WarningCode.unmatched_sig)
+            log.Warn(_(u"Warning, found signatures but no corresponding "
+                       u"backup files"), log.WarningCode.unmatched_sig)
 
         if self.incomplete_backup_sets:
-            log.Warn(_("Warning, found incomplete backup sets, probably left "
-                       "from aborted session"), log.WarningCode.incomplete_backup)
+            log.Warn(_(u"Warning, found incomplete backup sets, probably left "
+                       u"from aborted session"), log.WarningCode.incomplete_backup)
 
         if self.orphaned_backup_sets:
-            log.Warn(ngettext("Warning, found the following orphaned "
-                              "backup file:",
-                              "Warning, found the following orphaned "
-                              "backup files:",
+            log.Warn(ngettext(u"Warning, found the following orphaned "
+                              u"backup file:",
+                              u"Warning, found the following orphaned "
+                              u"backup files:",
                               len(self.orphaned_backup_sets)) + u"\n" +
                      u"\n".join(map(unicode, self.orphaned_backup_sets)),
                      log.WarningCode.orphaned_backup)
 
     def get_backup_chains(self, filename_list):
-        """
+        u"""
         Split given filename_list into chains
 
         Return value will be tuple (list of chains, list of sets, list
@@ -837,26 +837,26 @@
         not fitting into any chain, and the incomplete sets are sets
         missing files.
         """
-        log.Debug(_("Extracting backup chains from list of files: %s")
+        log.Debug(_(u"Extracting backup chains from list of files: %s")
                   % [util.fsdecode(f) for f in filename_list])
         # First put filenames in set form
         sets = []
 
         def add_to_sets(filename):
-            """
+            u"""
             Try adding filename to existing sets, or make new one
             """
             for set in sets:
                 if set.add_filename(filename):
-                    log.Debug(_("File %s is part of known set") % (util.fsdecode(filename),))
+                    log.Debug(_(u"File %s is part of known set") % (util.fsdecode(filename),))
                     break
             else:
-                log.Debug(_("File %s is not part of a known set; creating new set") % (util.fsdecode(filename),))
+                log.Debug(_(u"File %s is not part of a known set; creating new set") % (util.fsdecode(filename),))
                 new_set = BackupSet(self.backend, self.action)
                 if new_set.add_filename(filename):
                     sets.append(new_set)
                 else:
-                    log.Debug(_("Ignoring file (rejected by backup set) '%s'") % util.fsdecode(filename))
+                    log.Debug(_(u"Ignoring file (rejected by backup set) '%s'") % util.fsdecode(filename))
 
         for f in filename_list:
             add_to_sets(f)
@@ -865,37 +865,37 @@
         chains, orphaned_sets = [], []
 
         def add_to_chains(set):
-            """
+            u"""
             Try adding set to existing chains, or make new one
             """
-            if set.type == "full":
+            if set.type == u"full":
                 new_chain = BackupChain(self.backend)
                 new_chain.set_full(set)
                 chains.append(new_chain)
-                log.Debug(_("Found backup chain %s") % (new_chain.short_desc()))
+                log.Debug(_(u"Found backup chain %s") % (new_chain.short_desc()))
             else:
-                assert set.type == "inc"
+                assert set.type == u"inc"
                 for chain in chains:
                     if chain.add_inc(set):
-                        log.Debug(_("Added set %s to pre-existing chain %s") % (set.get_timestr(),
-                                                                                chain.short_desc()))
+                        log.Debug(_(u"Added set %s to pre-existing chain %s") % (set.get_timestr(),
+                                                                                 chain.short_desc()))
                         break
                 else:
-                    log.Debug(_("Found orphaned set %s") % (set.get_timestr(),))
+                    log.Debug(_(u"Found orphaned set %s") % (set.get_timestr(),))
                     orphaned_sets.append(set)
         for s in sets:
             add_to_chains(s)
         return (chains, orphaned_sets, incomplete_sets)
 
     def get_sorted_sets(self, set_list):
-        """
+        u"""
         Sort set list by end time, return (sorted list, incomplete)
         """
         time_set_pairs, incomplete_sets = [], []
         for set in set_list:
             if not set.is_complete():
                 incomplete_sets.append(set)
-            elif set.type == "full":
+            elif set.type == u"full":
                 time_set_pairs.append((set.time, set))
             else:
                 time_set_pairs.append((set.end_time, set))
@@ -903,7 +903,7 @@
         return ([p[1] for p in time_set_pairs], incomplete_sets)
 
     def get_signature_chains(self, local, filelist=None):
-        """
+        u"""
         Find chains in archive_dir_path (if local is true) or backend
 
         Use filelist if given, otherwise regenerate.  Return value is
@@ -914,7 +914,7 @@
             if filelist is not None:
                 return filelist
             elif local:
-                if self.action not in ["collection-status", "replicate"]:
+                if self.action not in [u"collection-status", u"replicate"]:
                     return self.archive_dir_path.listdir()
                 else:
                     return []
@@ -922,7 +922,7 @@
                 return self.backend.list()
 
         def get_new_sigchain():
-            """
+            u"""
             Return new empty signature chain
             """
             if local:
@@ -935,11 +935,11 @@
         for filename in get_filelist():
             pr = file_naming.parse(filename)
             if pr:
-                if pr.type == "full-sig":
+                if pr.type == u"full-sig":
                     new_chain = get_new_sigchain()
                     assert new_chain.add_filename(filename, pr)
                     chains.append(new_chain)
-                elif pr.type == "new-sig":
+                elif pr.type == u"new-sig":
                     new_sig_filenames.append(filename)
 
         # compare by file time
@@ -958,7 +958,7 @@
         return (chains, orphaned_filenames)
 
     def get_sorted_chains(self, chain_list):
-        """
+        u"""
         Return chains sorted by end_time.  If tie, local goes last
         """
         # Build dictionary from end_times to lists of corresponding chains
@@ -989,7 +989,7 @@
         return sorted_chain_list
 
     def get_backup_chain_at_time(self, time):
-        """
+        u"""
         Return backup chain covering specified time
 
         Tries to find the backup chain covering the given time.  If
@@ -997,12 +997,12 @@
         that, the earliest chain.
         """
         if not self.all_backup_chains:
-            raise CollectionsError("No backup chains found")
+            raise CollectionsError(u"No backup chains found")
 
         covering_chains = [c for c in self.all_backup_chains
                            if c.start_time <= time <= c.end_time]
         if len(covering_chains) > 1:
-            raise CollectionsError("Two chains cover the given time")
+            raise CollectionsError(u"Two chains cover the given time")
         elif len(covering_chains) == 1:
             return covering_chains[0]
 
@@ -1013,7 +1013,7 @@
             return self.all_backup_chains[0]  # no chains are old enough
 
     def get_signature_chain_at_time(self, time):
-        """
+        u"""
         Return signature chain covering specified time
 
         Tries to find the signature chain covering the given time.  If
@@ -1021,7 +1021,7 @@
         that, the earliest chain.
         """
         if not self.all_sig_chains:
-            raise CollectionsError("No signature chains found")
+            raise CollectionsError(u"No signature chains found")
 
         covering_chains = [c for c in self.all_sig_chains
                            if c.start_time <= time <= c.end_time]
@@ -1035,15 +1035,15 @@
             # no chains are old enough, give oldest and warn user
             oldest = self.all_sig_chains[0]
             if time < oldest.start_time:
-                log.Warn(_("No signature chain for the requested time. "
-                           "Using oldest available chain, starting at time %s.") %
+                log.Warn(_(u"No signature chain for the requested time. "
+                           u"Using oldest available chain, starting at time %s.") %
                          dup_time.timetopretty(oldest.start_time),
                          log.WarningCode.no_sig_for_time,
                          dup_time.timetostring(oldest.start_time))
             return oldest
 
     def get_extraneous(self, extra_clean):
-        """
+        u"""
         Return list of the names of extraneous duplicity files
 
         A duplicity file is considered extraneous if it is
@@ -1073,13 +1073,13 @@
         return local_filenames, remote_filenames
 
     def sort_sets(self, setlist):
-        """Return new list containing same elems of setlist, sorted by time"""
+        u"""Return new list containing same elems of setlist, sorted by time"""
         pairs = [(s.get_time(), s) for s in setlist]
         pairs.sort()
         return [p[1] for p in pairs]
 
     def get_chains_older_than(self, t):
-        """
+        u"""
         Returns a list of backup chains older than the given time t
 
         All of the times will be associated with an intact chain.
@@ -1099,7 +1099,7 @@
         return old_chains
 
     def get_signature_chains_older_than(self, t):
-        """
+        u"""
         Returns a list of signature chains older than the given time t
 
         All of the times will be associated with an intact chain.
@@ -1119,14 +1119,14 @@
         return old_chains
 
     def get_last_full_backup_time(self):
-        """
+        u"""
         Return the time of the last full backup,
         or 0 if there is none.
         """
         return self.get_nth_last_full_backup_time(1)
 
     def get_nth_last_full_backup_time(self, n):
-        """
+        u"""
         Return the time of the nth to last full backup,
         or 0 if there is none.
         """
@@ -1137,14 +1137,14 @@
             return chain.get_first().time
 
     def get_last_backup_chain(self):
-        """
+        u"""
         Return the last full backup of the collection,
         or None if there is no full backup chain.
         """
         return self.get_nth_last_backup_chain(1)
 
     def get_nth_last_backup_chain(self, n):
-        """
+        u"""
         Return the nth-to-last full backup of the collection,
         or None if there is less than n backup chains.
 
@@ -1168,7 +1168,7 @@
         return sorted[n - 1]
 
     def get_older_than(self, t):
-        """
+        u"""
         Returns a list of backup sets older than the given time t
 
         All of the times will be associated with an intact chain.
@@ -1183,7 +1183,7 @@
         return self.sort_sets(old_sets)
 
     def get_older_than_required(self, t):
-        """
+        u"""
         Returns list of old backup sets required by new sets
 
         This function is similar to the previous one, but it only
@@ -1199,16 +1199,16 @@
         return self.sort_sets(result_sets)
 
     def get_file_changed_record(self, filepath):
-        """
+        u"""
         Returns time line of specified file changed
         """
         # quick fix to spaces in filepath
         modified_filepath = filepath
-        if " " in filepath:
-            modified_filepath = '"' + filepath.replace(" ", r"\x20") + '"'
+        if u" " in filepath:
+            modified_filepath = u'"' + filepath.replace(u" ", r"\x20") + u'"'
 
         if not self.matched_chain_pair:
-            return ""
+            return u""
 
         all_backup_set = self.matched_chain_pair[1].get_all_sets()
         specified_file_backup_set = []
@@ -1230,20 +1230,20 @@
         self.fileinfo_list = fileinfo_list
 
     def __unicode__(self):
-        set_schema = "%20s   %30s  %20s"
-        l = ["-------------------------",
-             _("File: %s") % (self.filepath),
-             _("Total number of backup: %d") % len(self.fileinfo_list),
-             set_schema % (_("Type of backup set:"), _("Time:"), _("Type of file change:"))]
+        set_schema = u"%20s   %30s  %20s"
+        l = [u"-------------------------",
+             _(u"File: %s") % (self.filepath),
+             _(u"Total number of backup: %d") % len(self.fileinfo_list),
+             set_schema % (_(u"Type of backup set:"), _(u"Time:"), _(u"Type of file change:"))]
 
         for s in self.fileinfo_list:
             backup_type = s[0]
             backup_set = s[1]
             if backup_set.time:
-                type = _("Full")
+                type = _(u"Full")
             else:
-                type = _("Incremental")
+                type = _(u"Incremental")
             l.append(set_schema % (type, dup_time.timetopretty(backup_set.get_time()), backup_type.title()))
 
-        l.append("-------------------------")
-        return "\n".join(l)
+        l.append(u"-------------------------")
+        return u"\n".join(l)

=== modified file 'duplicity/commandline.py'
--- duplicity/commandline.py	2018-07-24 11:52:33 +0000
+++ duplicity/commandline.py	2018-09-13 20:09:49 +0000
@@ -19,7 +19,7 @@
 # along with duplicity; if not, write to the Free Software Foundation,
 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 
-"""Parse command line, check for consistency, and set globals"""
+u"""Parse command line, check for consistency, and set globals"""
 from __future__ import print_function
 
 from future_builtins import filter
@@ -57,31 +57,31 @@
 verify = None  # Set to true if verify command given
 replicate = None  # Set to true if replicate command given
 
-commands = ["cleanup",
-            "collection-status",
-            "full",
-            "incremental",
-            "list-current-files",
-            "remove-older-than",
-            "remove-all-but-n-full",
-            "remove-all-inc-of-but-n-full",
-            "restore",
-            "verify",
-            "replicate"
+commands = [u"cleanup",
+            u"collection-status",
+            u"full",
+            u"incremental",
+            u"list-current-files",
+            u"remove-older-than",
+            u"remove-all-but-n-full",
+            u"remove-all-inc-of-but-n-full",
+            u"restore",
+            u"verify",
+            u"replicate"
             ]
 
 
 def old_fn_deprecation(opt):
-    log.Log(_("Warning: Option %s is pending deprecation "
-              "and will be removed in a future release.\n"
-              "Use of default filenames is strongly suggested.") % opt,
+    log.Log(_(u"Warning: Option %s is pending deprecation "
+              u"and will be removed in a future release.\n"
+              u"Use of default filenames is strongly suggested.") % opt,
             log.ERROR, force_print=True)
 
 
 def old_globbing_filelist_deprecation(opt):
-    log.Log(_("Warning: Option %s is pending deprecation and will be removed in a future release.\n"
-              "--include-filelist and --exclude-filelist now accept globbing characters and should "
-              "be used instead.") % opt,
+    log.Log(_(u"Warning: Option %s is pending deprecation and will be removed in a future release.\n"
+              u"--include-filelist and --exclude-filelist now accept globbing characters and should "
+              u"be used instead.") % opt,
             log.ERROR, force_print=True)
 
 
@@ -89,10 +89,10 @@
     # See https://bugs.launchpad.net/duplicity/+bug/1423367
     # In almost all Linux distros stdin is a file represented by /dev/stdin,
     # so --exclude-file=/dev/stdin will work as a substitute.
-    log.Log(_("Warning: Option %s is pending deprecation and will be removed in a future release.\n"
-              "On many GNU/Linux systems, stdin is represented by /dev/stdin and\n"
-              "--include-filelist=/dev/stdin or --exclude-filelist=/dev/stdin could\n"
-              "be used as a substitute.") % opt,
+    log.Log(_(u"Warning: Option %s is pending deprecation and will be removed in a future release.\n"
+              u"On many GNU/Linux systems, stdin is represented by /dev/stdin and\n"
+              u"--include-filelist=/dev/stdin or --exclude-filelist=/dev/stdin could\n"
+              u"be used as a substitute.") % opt,
             log.ERROR, force_print=True)
 
 
@@ -101,17 +101,17 @@
 
 
 def expand_archive_dir(archdir, backname):
-    """
+    u"""
     Return expanded version of archdir joined with backname.
     """
     assert globals.backup_name is not None, \
-        "expand_archive_dir() called prior to globals.backup_name being set"
+        u"expand_archive_dir() called prior to globals.backup_name being set"
 
     return expand_fn(os.path.join(archdir, backname))
 
 
 def generate_default_backup_name(backend_url):
-    """
+    u"""
     @param backend_url: URL to backend.
     @returns A default backup name (string).
     """
@@ -146,15 +146,15 @@
     fail = False
 
     value = value.lower()
-    if value in ['e', 'error']:
+    if value in [u'e', u'error']:
         verb = log.ERROR
-    elif value in ['w', 'warning']:
+    elif value in [u'w', u'warning']:
         verb = log.WARNING
-    elif value in ['n', 'notice']:
+    elif value in [u'n', u'notice']:
         verb = log.NOTICE
-    elif value in ['i', 'info']:
+    elif value in [u'i', u'info']:
         verb = log.INFO
-    elif value in ['d', 'debug']:
+    elif value in [u'd', u'debug']:
         verb = log.DEBUG
     else:
         try:
@@ -168,32 +168,32 @@
         # TRANSL: In this portion of the usage instructions, "[ewnid]" indicates which
         # characters are permitted (e, w, n, i, or d); the brackets imply their own
         # meaning in regex; i.e., only one of the characters is allowed in an instance.
-        raise optparse.OptionValueError("Verbosity must be one of: digit [0-9], character [ewnid], "
-                                        "or word ['error', 'warning', 'notice', 'info', 'debug']. "
-                                        "The default is 4 (Notice).  It is strongly recommended "
-                                        "that verbosity level is set at 2 (Warning) or higher.")
+        raise optparse.OptionValueError(u"Verbosity must be one of: digit [0-9], character [ewnid], "
+                                        u"or word ['error', 'warning', 'notice', 'info', 'debug']. "
+                                        u"The default is 4 (Notice).  It is strongly recommended "
+                                        u"that verbosity level is set at 2 (Warning) or higher.")
 
     return verb
 
 
 class DupOption(optparse.Option):
-    TYPES = optparse.Option.TYPES + ("file", "time", "verbosity",)
+    TYPES = optparse.Option.TYPES + (u"file", u"time", u"verbosity",)
     TYPE_CHECKER = copy(optparse.Option.TYPE_CHECKER)
-    TYPE_CHECKER["file"] = check_file
-    TYPE_CHECKER["time"] = check_time
-    TYPE_CHECKER["verbosity"] = check_verbosity
+    TYPE_CHECKER[u"file"] = check_file
+    TYPE_CHECKER[u"time"] = check_time
+    TYPE_CHECKER[u"verbosity"] = check_verbosity
 
-    ACTIONS = optparse.Option.ACTIONS + ("extend",)
-    STORE_ACTIONS = optparse.Option.STORE_ACTIONS + ("extend",)
-    TYPED_ACTIONS = optparse.Option.TYPED_ACTIONS + ("extend",)
-    ALWAYS_TYPED_ACTIONS = optparse.Option.ALWAYS_TYPED_ACTIONS + ("extend",)
+    ACTIONS = optparse.Option.ACTIONS + (u"extend",)
+    STORE_ACTIONS = optparse.Option.STORE_ACTIONS + (u"extend",)
+    TYPED_ACTIONS = optparse.Option.TYPED_ACTIONS + (u"extend",)
+    ALWAYS_TYPED_ACTIONS = optparse.Option.ALWAYS_TYPED_ACTIONS + (u"extend",)
 
     def take_action(self, action, dest, opt, value, values, parser):
-        if action == "extend":
+        if action == u"extend":
             if not value:
                 return
             if hasattr(values, dest) and getattr(values, dest):
-                setattr(values, dest, getattr(values, dest) + ' ' + value)
+                setattr(values, dest, getattr(values, dest) + u' ' + value)
             else:
                 setattr(values, dest, value)
         else:
@@ -201,7 +201,7 @@
                 self, action, dest, opt, value, values, parser)
 
 
-"""
+u"""
 Fix:
     File "/usr/lib/pythonX.X/optparse.py", line XXXX, in print_help
     file.write(self.format_help().encode(encoding, "replace"))
@@ -215,15 +215,15 @@
 class OPHelpFix(optparse.OptionParser):
 
     def _get_encoding(self, file):
-        """
+        u"""
         try to get the encoding or use UTF-8
         which is default encoding in python3 and most recent unixes
         """
-        encoding = getattr(file, "encoding", None)
-        return encoding or 'utf-8'
+        encoding = getattr(file, u"encoding", None)
+        return encoding or u'utf-8'
 
     def print_help(self, file=None):
-        """
+        u"""
         overwrite method with proper utf-8 decoding
         """
         if file is None:
@@ -232,23 +232,23 @@
         help = self.format_help()
         # The help is in unicode or bytes depending on the user's locale
         if not isinstance(help, unicode):
-            help = self.format_help().decode('utf-8')
-        file.write(help.encode(encoding, "replace"))
+            help = self.format_help().decode(u'utf-8')
+        file.write(help.encode(encoding, u"replace"))
 
 
 def parse_cmdline_options(arglist):
-    """Parse argument list"""
+    u"""Parse argument list"""
     global select_opts, select_files, full_backup
     global list_current, collection_status, cleanup, remove_time, verify, replicate
 
     def set_log_fd(fd):
         if fd < 1:
-            raise optparse.OptionValueError("log-fd must be greater than zero.")
+            raise optparse.OptionValueError(u"log-fd must be greater than zero.")
         log.add_fd(fd)
 
     def set_time_sep(sep, opt):
-        if sep == '-':
-            raise optparse.OptionValueError("Dash ('-') not valid for time-separator.")
+        if sep == u'-':
+            raise optparse.OptionValueError(u"Dash ('-') not valid for time-separator.")
         globals.time_separator = sep
         old_fn_deprecation(opt)
 
@@ -258,13 +258,13 @@
     def add_filelist(o, s, filename, p):
         select_opts.append((util.fsdecode(s), util.fsdecode(filename)))
         try:
-            select_files.append(io.open(filename, "rt", encoding="UTF-8"))
+            select_files.append(io.open(filename, u"rt", encoding=u"UTF-8"))
         except IOError:
-            log.FatalError(_("Error opening file %s") % filename,
+            log.FatalError(_(u"Error opening file %s") % filename,
                            log.ErrorCode.cant_open_filelist)
 
     def print_ver(o, s, v, p):
-        print("duplicity %s" % (globals.version))
+        print(u"duplicity %s" % (globals.version))
         sys.exit(0)
 
     def add_rename(o, s, v, p):
@@ -274,244 +274,244 @@
 
     # If this is true, only warn and don't raise fatal error when backup
     # source directory doesn't match previous backup source directory.
-    parser.add_option("--allow-source-mismatch", action="store_true")
+    parser.add_option(u"--allow-source-mismatch", action=u"store_true")
 
     # Set to the path of the archive directory (the directory which
     # contains the signatures and manifests of the relevent backup
     # collection), and for checkpoint state between volumes.
     # TRANSL: Used in usage help to represent a Unix-style path name. Example:
     # --archive-dir <path>
-    parser.add_option("--archive-dir", type="file", metavar=_("path"))
+    parser.add_option(u"--archive-dir", type=u"file", metavar=_(u"path"))
 
     # Asynchronous put/get concurrency limit
     # (default of 0 disables asynchronicity).
-    parser.add_option("--asynchronous-upload", action="store_const", const=1,
-                      dest="async_concurrency")
+    parser.add_option(u"--asynchronous-upload", action=u"store_const", const=1,
+                      dest=u"async_concurrency")
 
-    parser.add_option("--compare-data", action="store_true")
+    parser.add_option(u"--compare-data", action=u"store_true")
 
     # config dir for future use
-    parser.add_option("--config-dir", type="file", metavar=_("path"),
+    parser.add_option(u"--config-dir", type=u"file", metavar=_(u"path"),
                       help=optparse.SUPPRESS_HELP)
 
     # When symlinks are encountered, the item they point to is copied rather than
     # the symlink.
-    parser.add_option("--copy-links", action="store_true")
+    parser.add_option(u"--copy-links", action=u"store_true")
 
     # for testing -- set current time
-    parser.add_option("--current-time", type="int",
-                      dest="current_time", help=optparse.SUPPRESS_HELP)
+    parser.add_option(u"--current-time", type=u"int",
+                      dest=u"current_time", help=optparse.SUPPRESS_HELP)
 
     # Don't actually do anything, but still report what would be done
-    parser.add_option("--dry-run", action="store_true")
+    parser.add_option(u"--dry-run", action=u"store_true")
 
     # TRANSL: Used in usage help to represent an ID for a GnuPG key. Example:
     # --encrypt-key <gpg_key_id>
-    parser.add_option("--encrypt-key", type="string", metavar=_("gpg-key-id"),
-                      dest="", action="callback",
+    parser.add_option(u"--encrypt-key", type=u"string", metavar=_(u"gpg-key-id"),
+                      dest=u"", action=u"callback",
                       callback=lambda o, s, v, p: globals.gpg_profile.recipients.append(v))  # @UndefinedVariable
 
     # secret keyring in which the private encrypt key can be found
-    parser.add_option("--encrypt-secret-keyring", type="string", metavar=_("path"))
+    parser.add_option(u"--encrypt-secret-keyring", type=u"string", metavar=_(u"path"))
 
-    parser.add_option("--encrypt-sign-key", type="string", metavar=_("gpg-key-id"),
-                      dest="", action="callback",
+    parser.add_option(u"--encrypt-sign-key", type=u"string", metavar=_(u"gpg-key-id"),
+                      dest=u"", action=u"callback",
                       callback=lambda o, s, v, p: (globals.gpg_profile.recipients.append(v), set_sign_key(v)))
 
     # TRANSL: Used in usage help to represent a "glob" style pattern for
     # matching one or more files, as described in the documentation.
     # Example:
     # --exclude <shell_pattern>
-    parser.add_option("--exclude", action="callback", metavar=_("shell_pattern"),
-                      dest="", type="string", callback=add_selection)
-
-    parser.add_option("--exclude-device-files", action="callback",
-                      dest="", callback=add_selection)
-
-    parser.add_option("--exclude-filelist", type="file", metavar=_("filename"),
-                      dest="", action="callback", callback=add_filelist)
-
-    parser.add_option("--exclude-filelist-stdin", action="callback", dest="",
-                      callback=lambda o, s, v, p: (select_opts.append(("--exclude-filelist", "standard input")),
+    parser.add_option(u"--exclude", action=u"callback", metavar=_(u"shell_pattern"),
+                      dest=u"", type=u"string", callback=add_selection)
+
+    parser.add_option(u"--exclude-device-files", action=u"callback",
+                      dest=u"", callback=add_selection)
+
+    parser.add_option(u"--exclude-filelist", type=u"file", metavar=_(u"filename"),
+                      dest=u"", action=u"callback", callback=add_filelist)
+
+    parser.add_option(u"--exclude-filelist-stdin", action=u"callback", dest=u"",
+                      callback=lambda o, s, v, p: (select_opts.append((u"--exclude-filelist", u"standard input")),
                                                    select_files.append(sys.stdin),
                                                    stdin_deprecation(o)),
                       help=optparse.SUPPRESS_HELP)
 
-    parser.add_option("--exclude-globbing-filelist", type="file", metavar=_("filename"),
-                      dest="", action="callback", callback=lambda o, s, v, p: (add_filelist(o, s, v, p),
-                                                                               old_globbing_filelist_deprecation(s)),
+    parser.add_option(u"--exclude-globbing-filelist", type=u"file", metavar=_(u"filename"),
+                      dest=u"", action=u"callback", callback=lambda o, s, v, p: (add_filelist(o, s, v, p),
+                                                                                 old_globbing_filelist_deprecation(s)),
                       help=optparse.SUPPRESS_HELP)
 
     # TRANSL: Used in usage help to represent the name of a file. Example:
     # --log-file <filename>
-    parser.add_option("--exclude-if-present", metavar=_("filename"), dest="",
-                      type="file", action="callback", callback=add_selection)
+    parser.add_option(u"--exclude-if-present", metavar=_(u"filename"), dest=u"",
+                      type=u"file", action=u"callback", callback=add_selection)
 
-    parser.add_option("--exclude-other-filesystems", action="callback",
-                      dest="", callback=add_selection)
+    parser.add_option(u"--exclude-other-filesystems", action=u"callback",
+                      dest=u"", callback=add_selection)
 
     # TRANSL: Used in usage help to represent a regular expression (regexp).
-    parser.add_option("--exclude-regexp", metavar=_("regular_expression"),
-                      dest="", type="string", action="callback", callback=add_selection)
+    parser.add_option(u"--exclude-regexp", metavar=_(u"regular_expression"),
+                      dest=u"", type=u"string", action=u"callback", callback=add_selection)
 
     # Exclude any files with modification dates older than this from the backup
-    parser.add_option("--exclude-older-than", type="time", metavar=_("time"),
-                      dest="", action="callback", callback=add_selection)
+    parser.add_option(u"--exclude-older-than", type=u"time", metavar=_(u"time"),
+                      dest=u"", action=u"callback", callback=add_selection)
 
     # Whether we should be particularly aggressive when cleaning up
-    parser.add_option("--extra-clean", action="store_true")
+    parser.add_option(u"--extra-clean", action=u"store_true")
 
     # used in testing only - raises exception after volume
-    parser.add_option("--fail-on-volume", type="int",
+    parser.add_option(u"--fail-on-volume", type=u"int",
                       help=optparse.SUPPRESS_HELP)
 
     # used to provide a prefix on top of the defaul tar file name
-    parser.add_option("--file-prefix", type="string", dest="file_prefix", action="store")
+    parser.add_option(u"--file-prefix", type=u"string", dest=u"file_prefix", action=u"store")
 
     # used to provide a suffix for manifest files only
-    parser.add_option("--file-prefix-manifest", type="string", dest="file_prefix_manifest", action="store")
+    parser.add_option(u"--file-prefix-manifest", type=u"string", dest=u"file_prefix_manifest", action=u"store")
 
     # used to provide a suffix for archive files only
-    parser.add_option("--file-prefix-archive", type="string", dest="file_prefix_archive", action="store")
+    parser.add_option(u"--file-prefix-archive", type=u"string", dest=u"file_prefix_archive", action=u"store")
 
     # used to provide a suffix for sigature files only
-    parser.add_option("--file-prefix-signature", type="string", dest="file_prefix_signature", action="store")
+    parser.add_option(u"--file-prefix-signature", type=u"string", dest=u"file_prefix_signature", action=u"store")
 
     # used in testing only - skips upload for a given volume
-    parser.add_option("--skip-volume", type="int",
+    parser.add_option(u"--skip-volume", type=u"int",
                       help=optparse.SUPPRESS_HELP)
 
     # If set, restore only the subdirectory or file specified, not the
     # whole root.
     # TRANSL: Used in usage help to represent a Unix-style path name. Example:
     # --archive-dir <path>
-    parser.add_option("--file-to-restore", "-r", action="callback", type="file",
-                      metavar=_("path"), dest="restore_dir",
-                      callback=lambda o, s, v, p: setattr(p.values, "restore_dir", v.strip('/')))
+    parser.add_option(u"--file-to-restore", u"-r", action=u"callback", type=u"file",
+                      metavar=_(u"path"), dest=u"restore_dir",
+                      callback=lambda o, s, v, p: setattr(p.values, u"restore_dir", v.strip(u'/')))
 
     # Used to confirm certain destructive operations like deleting old files.
-    parser.add_option("--force", action="store_true")
+    parser.add_option(u"--force", action=u"store_true")
 
     # FTP data connection type
-    parser.add_option("--ftp-passive", action="store_const", const="passive", dest="ftp_connection")
-    parser.add_option("--ftp-regular", action="store_const", const="regular", dest="ftp_connection")
+    parser.add_option(u"--ftp-passive", action=u"store_const", const=u"passive", dest=u"ftp_connection")
+    parser.add_option(u"--ftp-regular", action=u"store_const", const=u"regular", dest=u"ftp_connection")
 
     # If set, forces a full backup if the last full backup is older than
     # the time specified
-    parser.add_option("--full-if-older-than", type="time", dest="full_force_time", metavar=_("time"))
+    parser.add_option(u"--full-if-older-than", type=u"time", dest=u"full_force_time", metavar=_(u"time"))
 
-    parser.add_option("--gio", action="callback", dest="use_gio",
+    parser.add_option(u"--gio", action=u"callback", dest=u"use_gio",
                       callback=lambda o, s, v, p: (setattr(p.values, o.dest, True),
                                                    old_fn_deprecation(s)))
 
-    parser.add_option("--gpg-binary", type="file", metavar=_("path"))
+    parser.add_option(u"--gpg-binary", type=u"file", metavar=_(u"path"))
 
-    parser.add_option("--gpg-options", action="extend", metavar=_("options"))
+    parser.add_option(u"--gpg-options", action=u"extend", metavar=_(u"options"))
 
     # TRANSL: Used in usage help to represent an ID for a hidden GnuPG key. Example:
     # --hidden-encrypt-key <gpg_key_id>
-    parser.add_option("--hidden-encrypt-key", type="string", metavar=_("gpg-key-id"),
-                      dest="", action="callback",
+    parser.add_option(u"--hidden-encrypt-key", type=u"string", metavar=_(u"gpg-key-id"),
+                      dest=u"", action=u"callback",
                       callback=lambda o, s, v, p: globals.gpg_profile.hidden_recipients.append(v))  # @UndefinedVariable
 
     # ignore (some) errors during operations; supposed to make it more
     # likely that you are able to restore data under problematic
     # circumstances. the default should absolutely always be False unless
     # you know what you are doing.
-    parser.add_option("--ignore-errors", action="callback",
-                      dest="ignore_errors",
+    parser.add_option(u"--ignore-errors", action=u"callback",
+                      dest=u"ignore_errors",
                       callback=lambda o, s, v, p: (log.Warn(
-                          _("Running in 'ignore errors' mode due to %s; please "
-                            "re-consider if this was not intended") % s),
-                          setattr(p.values, "ignore_errors", True)))
+                          _(u"Running in 'ignore errors' mode due to %s; please "
+                            u"re-consider if this was not intended") % s),
+                          setattr(p.values, u"ignore_errors", True)))
 
     # Whether to use the full email address as the user name when
     # logging into an imap server. If false just the user name
     # part of the email address is used.
-    parser.add_option("--imap-full-address", action="store_true")
+    parser.add_option(u"--imap-full-address", action=u"store_true")
 
     # Name of the imap folder where we want to store backups.
     # Can be changed with a command line argument.
     # TRANSL: Used in usage help to represent an imap mailbox
-    parser.add_option("--imap-mailbox", metavar=_("imap_mailbox"))
+    parser.add_option(u"--imap-mailbox", metavar=_(u"imap_mailbox"))
 
-    parser.add_option("--include", action="callback", metavar=_("shell_pattern"),
-                      dest="", type="string", callback=add_selection)
-    parser.add_option("--include-filelist", type="file", metavar=_("filename"),
-                      dest="", action="callback", callback=add_filelist)
-    parser.add_option("--include-filelist-stdin", action="callback", dest="",
-                      callback=lambda o, s, v, p: (select_opts.append(("--include-filelist", "standard input")),
+    parser.add_option(u"--include", action=u"callback", metavar=_(u"shell_pattern"),
+                      dest=u"", type=u"string", callback=add_selection)
+    parser.add_option(u"--include-filelist", type=u"file", metavar=_(u"filename"),
+                      dest=u"", action=u"callback", callback=add_filelist)
+    parser.add_option(u"--include-filelist-stdin", action=u"callback", dest=u"",
+                      callback=lambda o, s, v, p: (select_opts.append((u"--include-filelist", u"standard input")),
                                                    select_files.append(sys.stdin),
                                                    stdin_deprecation(o)),
                       help=optparse.SUPPRESS_HELP)
-    parser.add_option("--include-globbing-filelist", type="file", metavar=_("filename"),
-                      dest="", action="callback", callback=lambda o, s, v, p: (add_filelist(o, s, v, p),
-                                                                               old_globbing_filelist_deprecation(s)),
+    parser.add_option(u"--include-globbing-filelist", type=u"file", metavar=_(u"filename"),
+                      dest=u"", action=u"callback", callback=lambda o, s, v, p: (add_filelist(o, s, v, p),
+                                                                                 old_globbing_filelist_deprecation(s)),
                       help=optparse.SUPPRESS_HELP)
-    parser.add_option("--include-regexp", metavar=_("regular_expression"), dest="",
-                      type="string", action="callback", callback=add_selection)
+    parser.add_option(u"--include-regexp", metavar=_(u"regular_expression"), dest=u"",
+                      type=u"string", action=u"callback", callback=add_selection)
 
-    parser.add_option("--log-fd", type="int", metavar=_("file_descriptor"),
-                      dest="", action="callback",
+    parser.add_option(u"--log-fd", type=u"int", metavar=_(u"file_descriptor"),
+                      dest=u"", action=u"callback",
                       callback=lambda o, s, v, p: set_log_fd(v))
 
     # TRANSL: Used in usage help to represent the name of a file. Example:
     # --log-file <filename>
-    parser.add_option("--log-file", type="file", metavar=_("filename"),
-                      dest="", action="callback",
+    parser.add_option(u"--log-file", type=u"file", metavar=_(u"filename"),
+                      dest=u"", action=u"callback",
                       callback=lambda o, s, v, p: log.add_file(v))
 
     # Maximum block size for large files
-    parser.add_option("--max-blocksize", type="int", metavar=_("number"))
+    parser.add_option(u"--max-blocksize", type=u"int", metavar=_(u"number"))
 
     # TRANSL: Used in usage help (noun)
-    parser.add_option("--name", dest="backup_name", metavar=_("backup name"))
+    parser.add_option(u"--name", dest=u"backup_name", metavar=_(u"backup name"))
 
     # If set to false, then do not encrypt files on remote system
-    parser.add_option("--no-encryption", action="store_false", dest="encryption")
+    parser.add_option(u"--no-encryption", action=u"store_false", dest=u"encryption")
 
     # If set to false, then do not compress files on remote system
-    parser.add_option("--no-compression", action="store_false", dest="compression")
+    parser.add_option(u"--no-compression", action=u"store_false", dest=u"compression")
 
     # If set, print the statistics after every backup session
-    parser.add_option("--no-print-statistics", action="store_false", dest="print_statistics")
+    parser.add_option(u"--no-print-statistics", action=u"store_false", dest=u"print_statistics")
 
     # If true, filelists and directory statistics will be split on
     # nulls instead of newlines.
-    parser.add_option("--null-separator", action="store_true")
+    parser.add_option(u"--null-separator", action=u"store_true")
 
     # number of retries on network operations
     # TRANSL: Used in usage help to represent a desired number of
     # something. Example:
     # --num-retries <number>
-    parser.add_option("--num-retries", type="int", metavar=_("number"))
+    parser.add_option(u"--num-retries", type=u"int", metavar=_(u"number"))
 
     # File owner uid keeps number from tar file. Like same option in GNU tar.
-    parser.add_option("--numeric-owner", action="store_true")
+    parser.add_option(u"--numeric-owner", action=u"store_true")
 
     # Whether the old filename format is in effect.
-    parser.add_option("--old-filenames", action="callback",
-                      dest="old_filenames",
+    parser.add_option(u"--old-filenames", action=u"callback",
+                      dest=u"old_filenames",
                       callback=lambda o, s, v, p: (setattr(p.values, o.dest, True),
                                                    old_fn_deprecation(s)))
 
     # Level of Redundancy in % for Par2 files
-    parser.add_option("--par2-redundancy", type="int", metavar=_("number"))
+    parser.add_option(u"--par2-redundancy", type=u"int", metavar=_(u"number"))
 
     # Verbatim par2 options
-    parser.add_option("--par2-options", action="extend", metavar=_("options"))
+    parser.add_option(u"--par2-options", action=u"extend", metavar=_(u"options"))
 
     # Used to display the progress for the full and incremental backup operations
-    parser.add_option("--progress", action="store_true")
+    parser.add_option(u"--progress", action=u"store_true")
 
     # Used to control the progress option update rate in seconds. Default: prompts each 3 seconds
-    parser.add_option("--progress-rate", type="int", metavar=_("number"))
+    parser.add_option(u"--progress-rate", type=u"int", metavar=_(u"number"))
 
     # option to trigger Pydev debugger
-    parser.add_option("--pydevd", action="store_true")
+    parser.add_option(u"--pydevd", action=u"store_true")
 
     # option to rename files during restore
-    parser.add_option("--rename", type="file", action="callback", nargs=2,
+    parser.add_option(u"--rename", type=u"file", action=u"callback", nargs=2,
                       callback=add_rename)
 
     # Restores will try to bring back the state as of the following time.
@@ -519,144 +519,144 @@
     # TRANSL: Used in usage help to represent a time spec for a previous
     # point in time, as described in the documentation. Example:
     # duplicity remove-older-than time [options] target_url
-    parser.add_option("--restore-time", "--time", "-t", type="time", metavar=_("time"))
+    parser.add_option(u"--restore-time", u"--time", u"-t", type=u"time", metavar=_(u"time"))
 
     # user added rsync options
-    parser.add_option("--rsync-options", action="extend", metavar=_("options"))
+    parser.add_option(u"--rsync-options", action=u"extend", metavar=_(u"options"))
 
     # Whether to create European buckets (sorry, hard-coded to only
     # support european for now).
-    parser.add_option("--s3-european-buckets", action="store_true")
+    parser.add_option(u"--s3-european-buckets", action=u"store_true")
 
     # Whether to use S3 Reduced Redudancy Storage
-    parser.add_option("--s3-use-rrs", action="store_true")
+    parser.add_option(u"--s3-use-rrs", action=u"store_true")
 
     # Whether to use S3 Infrequent Access Storage
-    parser.add_option("--s3-use-ia", action="store_true")
+    parser.add_option(u"--s3-use-ia", action=u"store_true")
 
     # Whether to use S3 One Zone Infrequent Access Storage
-    parser.add_option("--s3-use-onezone-ia", action="store_true")
+    parser.add_option(u"--s3-use-onezone-ia", action=u"store_true")
 
     # Whether to use "new-style" subdomain addressing for S3 buckets. Such
     # use is not backwards-compatible with upper-case buckets, or buckets
     # that are otherwise not expressable in a valid hostname.
-    parser.add_option("--s3-use-new-style", action="store_true")
+    parser.add_option(u"--s3-use-new-style", action=u"store_true")
 
     # Whether to use plain HTTP (without SSL) to send data to S3
     # See <https://bugs.launchpad.net/duplicity/+bug/433970>.
-    parser.add_option("--s3-unencrypted-connection", action="store_true")
+    parser.add_option(u"--s3-unencrypted-connection", action=u"store_true")
 
     # Chunk size used for S3 multipart uploads.The number of parallel uploads to
     # S3 be given by chunk size / volume size. Use this to maximize the use of
     # your bandwidth. Defaults to 25MB
-    parser.add_option("--s3-multipart-chunk-size", type="int", action="callback", metavar=_("number"),
-                      callback=lambda o, s, v, p: setattr(p.values, "s3_multipart_chunk_size", v * 1024 * 1024))
+    parser.add_option(u"--s3-multipart-chunk-size", type=u"int", action=u"callback", metavar=_(u"number"),
+                      callback=lambda o, s, v, p: setattr(p.values, u"s3_multipart_chunk_size", v * 1024 * 1024))
 
     # Number of processes to set the Processor Pool to when uploading multipart
     # uploads to S3. Use this to control the maximum simultaneous uploads to S3.
-    parser.add_option("--s3-multipart-max-procs", type="int", metavar=_("number"))
+    parser.add_option(u"--s3-multipart-max-procs", type=u"int", metavar=_(u"number"))
 
     # Number of seconds to wait for each part of a multipart upload to S3. Use this
     # to prevent hangups when doing a multipart upload to S3.
-    parser.add_option("--s3-multipart-max-timeout", type="int", metavar=_("number"))
+    parser.add_option(u"--s3-multipart-max-timeout", type=u"int", metavar=_(u"number"))
 
     # Option to allow the s3/boto backend use the multiprocessing version.
-    parser.add_option("--s3-use-multiprocessing", action="store_true")
+    parser.add_option(u"--s3-use-multiprocessing", action=u"store_true")
 
     # Option to allow use of server side encryption in s3
-    parser.add_option("--s3-use-server-side-encryption", action="store_true", dest="s3_use_sse")
+    parser.add_option(u"--s3-use-server-side-encryption", action=u"store_true", dest=u"s3_use_sse")
 
     # Option to specify a Swift container storage policy.
-    parser.add_option("--swift-storage-policy", type="string", metavar=_("policy"))
+    parser.add_option(u"--swift-storage-policy", type=u"string", metavar=_(u"policy"))
 
     # Number of the largest supported upload size where the Azure library makes only one put call.
     # This is used to upload a single block if the content length is known and is less than this value.
     # The default is 67108864 (64MiB)
-    parser.add_option("--azure-max-single-put-size", type="int", metavar=_("number"))
+    parser.add_option(u"--azure-max-single-put-size", type=u"int", metavar=_(u"number"))
 
     # Number for the block size used by the Azure library to upload a blob if the length is unknown
     # or is larger than the value set by --azure-max-single-put-size".
     # The maximum block size the service supports is 100MiB.
     # The default is 4 * 1024 * 1024 (4MiB)
-    parser.add_option("--azure-max-block-size", type="int", metavar=_("number"))
+    parser.add_option(u"--azure-max-block-size", type=u"int", metavar=_(u"number"))
 
     # The number for the maximum parallel connections to use when the blob size exceeds 64MB.
     # max_connections (int) - Maximum number of parallel connections to use when the blob size exceeds 64MB.
-    parser.add_option("--azure-max-connections", type="int", metavar=_("number"))
+    parser.add_option(u"--azure-max-connections", type=u"int", metavar=_(u"number"))
 
     # scp command to use (ssh pexpect backend)
-    parser.add_option("--scp-command", metavar=_("command"))
+    parser.add_option(u"--scp-command", metavar=_(u"command"))
 
     # sftp command to use (ssh pexpect backend)
-    parser.add_option("--sftp-command", metavar=_("command"))
+    parser.add_option(u"--sftp-command", metavar=_(u"command"))
 
     # allow the user to switch cloudfiles backend
-    parser.add_option("--cf-backend", metavar=_("pyrax|cloudfiles"))
+    parser.add_option(u"--cf-backend", metavar=_(u"pyrax|cloudfiles"))
 
     # If set, use short (< 30 char) filenames for all the remote files.
-    parser.add_option("--short-filenames", action="callback",
-                      dest="short_filenames",
+    parser.add_option(u"--short-filenames", action=u"callback",
+                      dest=u"short_filenames",
                       callback=lambda o, s, v, p: (setattr(p.values, o.dest, True),
                                                    old_fn_deprecation(s)))
 
     # TRANSL: Used in usage help to represent an ID for a GnuPG key. Example:
     # --encrypt-key <gpg_key_id>
-    parser.add_option("--sign-key", type="string", metavar=_("gpg-key-id"),
-                      dest="", action="callback",
+    parser.add_option(u"--sign-key", type=u"string", metavar=_(u"gpg-key-id"),
+                      dest=u"", action=u"callback",
                       callback=lambda o, s, v, p: set_sign_key(v))
 
     # default to batch mode using public-key encryption
-    parser.add_option("--ssh-askpass", action="store_true")
+    parser.add_option(u"--ssh-askpass", action=u"store_true")
 
     # user added ssh options
-    parser.add_option("--ssh-options", action="extend", metavar=_("options"))
+    parser.add_option(u"--ssh-options", action=u"extend", metavar=_(u"options"))
 
     # user added ssl options (used by webdav, lftp backend)
-    parser.add_option("--ssl-cacert-file", metavar=_("pem formatted bundle of certificate authorities"))
-    parser.add_option("--ssl-cacert-path", metavar=_("path to a folder with certificate authority files"))
-    parser.add_option("--ssl-no-check-certificate", action="store_true")
+    parser.add_option(u"--ssl-cacert-file", metavar=_(u"pem formatted bundle of certificate authorities"))
+    parser.add_option(u"--ssl-cacert-path", metavar=_(u"path to a folder with certificate authority files"))
+    parser.add_option(u"--ssl-no-check-certificate", action=u"store_true")
 
     # Working directory for the tempfile module. Defaults to /tmp on most systems.
-    parser.add_option("--tempdir", dest="temproot", type="file", metavar=_("path"))
+    parser.add_option(u"--tempdir", dest=u"temproot", type=u"file", metavar=_(u"path"))
 
     # network timeout value
     # TRANSL: Used in usage help. Example:
     # --timeout <seconds>
-    parser.add_option("--timeout", type="int", metavar=_("seconds"))
+    parser.add_option(u"--timeout", type=u"int", metavar=_(u"seconds"))
 
     # Character used like the ":" in time strings like
     # 2002-08-06T04:22:00-07:00.  The colon isn't good for filenames on
     # windows machines.
     # TRANSL: abbreviation for "character" (noun)
-    parser.add_option("--time-separator", type="string", metavar=_("char"),
-                      action="callback",
+    parser.add_option(u"--time-separator", type=u"string", metavar=_(u"char"),
+                      action=u"callback",
                       callback=lambda o, s, v, p: set_time_sep(v, s))
 
     # Whether to specify --use-agent in GnuPG options
-    parser.add_option("--use-agent", action="store_true")
+    parser.add_option(u"--use-agent", action=u"store_true")
 
-    parser.add_option("--verbosity", "-v", type="verbosity", metavar="[0-9]",
-                      dest="", action="callback",
+    parser.add_option(u"--verbosity", u"-v", type=u"verbosity", metavar=u"[0-9]",
+                      dest=u"", action=u"callback",
                       callback=lambda o, s, v, p: log.setverbosity(v))
 
-    parser.add_option("-V", "--version", action="callback", callback=print_ver)
+    parser.add_option(u"-V", u"--version", action=u"callback", callback=print_ver)
 
     # volume size
     # TRANSL: Used in usage help to represent a desired number of
     # something. Example:
     # --num-retries <number>
-    parser.add_option("--volsize", type="int", action="callback", metavar=_("number"),
-                      callback=lambda o, s, v, p: setattr(p.values, "volsize", v * 1024 * 1024))
+    parser.add_option(u"--volsize", type=u"int", action=u"callback", metavar=_(u"number"),
+                      callback=lambda o, s, v, p: setattr(p.values, u"volsize", v * 1024 * 1024))
 
     # If set, collect only the file status, not the whole root.
-    parser.add_option("--file-changed", action="callback", type="file",
-                      metavar=_("path"), dest="file_changed",
-                      callback=lambda o, s, v, p: setattr(p.values, "file_changed", v.rstrip('/')))
+    parser.add_option(u"--file-changed", action=u"callback", type=u"file",
+                      metavar=_(u"path"), dest=u"file_changed",
+                      callback=lambda o, s, v, p: setattr(p.values, u"file_changed", v.rstrip(u'/')))
 
     # delay time before next try after a failure of a backend operation
     # TRANSL: Used in usage help. Example:
     # --backend-retry-delay <seconds>
-    parser.add_option("--backend-retry-delay", type="int", metavar=_("seconds"))
+    parser.add_option(u"--backend-retry-delay", type=u"int", metavar=_(u"seconds"))
 
     # parse the options
     (options, args) = parser.parse_args(arglist)
@@ -665,7 +665,7 @@
     # attributes that are 'hidden' (start with an underscore) or whose name is
     # the empty string (used for arguments that don't directly store a value
     # by using dest="")
-    for f in filter(lambda x: x and not x.startswith("_"), dir(options)):
+    for f in filter(lambda x: x and not x.startswith(u"_"), dir(options)):
         v = getattr(options, f)
         # Only set if v is not None because None is the default for all the
         # variables.  If user didn't set it, we'll use defaults in globals.py
@@ -675,7 +675,7 @@
     socket.setdefaulttimeout(globals.timeout)
 
     # expect no cmd and two positional args
-    cmd = ""
+    cmd = u""
     num_expect = 2
 
     # process first arg as command
@@ -684,7 +684,7 @@
         possible = [c for c in commands if c.startswith(cmd)]
         # no unique match, that's an error
         if len(possible) > 1:
-            command_line_error("command '%s' not unique, could be %s" % (cmd, possible))
+            command_line_error(u"command '%s' not unique, could be %s" % (cmd, possible))
         # only one match, that's a keeper
         elif len(possible) == 1:
             cmd = possible[0]
@@ -692,53 +692,53 @@
         elif not possible:
             args.insert(0, cmd)
 
-    if cmd == "cleanup":
+    if cmd == u"cleanup":
         cleanup = True
         num_expect = 1
-    elif cmd == "collection-status":
+    elif cmd == u"collection-status":
         collection_status = True
         num_expect = 1
-    elif cmd == "full":
+    elif cmd == u"full":
         full_backup = True
         num_expect = 2
-    elif cmd == "incremental":
+    elif cmd == u"incremental":
         globals.incremental = True
         num_expect = 2
-    elif cmd == "list-current-files":
+    elif cmd == u"list-current-files":
         list_current = True
         num_expect = 1
-    elif cmd == "remove-older-than":
+    elif cmd == u"remove-older-than":
         try:
             arg = args.pop(0)
         except Exception:
-            command_line_error("Missing time string for remove-older-than")
+            command_line_error(u"Missing time string for remove-older-than")
         globals.remove_time = dup_time.genstrtotime(arg)
         num_expect = 1
-    elif cmd == "remove-all-but-n-full" or cmd == "remove-all-inc-of-but-n-full":
-        if cmd == "remove-all-but-n-full":
+    elif cmd == u"remove-all-but-n-full" or cmd == u"remove-all-inc-of-but-n-full":
+        if cmd == u"remove-all-but-n-full":
             globals.remove_all_but_n_full_mode = True
-        if cmd == "remove-all-inc-of-but-n-full":
+        if cmd == u"remove-all-inc-of-but-n-full":
             globals.remove_all_inc_of_but_n_full_mode = True
         try:
             arg = args.pop(0)
         except Exception:
-            command_line_error("Missing count for " + cmd)
+            command_line_error(u"Missing count for " + cmd)
         globals.keep_chains = int(arg)
         if not globals.keep_chains > 0:
-            command_line_error(cmd + " count must be > 0")
+            command_line_error(cmd + u" count must be > 0")
         num_expect = 1
-    elif cmd == "verify":
+    elif cmd == u"verify":
         verify = True
-    elif cmd == "replicate":
+    elif cmd == u"replicate":
         replicate = True
         num_expect = 2
 
     if len(args) != num_expect:
-        command_line_error("Expected %d args, got %d" % (num_expect, len(args)))
+        command_line_error(u"Expected %d args, got %d" % (num_expect, len(args)))
 
     # expand pathname args, but not URL
     for loc in range(len(args)):
-        if '://' not in args[loc]:
+        if u'://' not in args[loc]:
             args[loc] = expand_fn(args[loc])
 
     # Note that ProcessCommandLine depends on us verifying the arg
@@ -746,18 +746,18 @@
     # checks here in order to make enough sense of args to identify
     # the backend URL/lpath for args_to_path_backend().
     if len(args) < 1:
-        command_line_error("Too few arguments")
+        command_line_error(u"Too few arguments")
     elif len(args) == 1:
         backend_url = args[0]
     elif len(args) == 2:
         if replicate:
             if not backend.is_backend_url(args[0]) or not backend.is_backend_url(args[1]):
-                command_line_error("Two URLs expected for replicate.")
+                command_line_error(u"Two URLs expected for replicate.")
             src_backend_url, backend_url = args[0], args[1]
         else:
             lpath, backend_url = args_to_path_backend(args[0], args[1])  # @UnusedVariable
     else:
-        command_line_error("Too many arguments")
+        command_line_error(u"Too many arguments")
 
     if globals.backup_name is None:
         globals.backup_name = generate_default_backup_name(backend_url)
@@ -766,21 +766,21 @@
     set_archive_dir(expand_archive_dir(globals.archive_dir,
                                        globals.backup_name))
 
-    log.Info(_("Using archive dir: %s") % (globals.archive_dir_path.uc_name,))
-    log.Info(_("Using backup name: %s") % (globals.backup_name,))
+    log.Info(_(u"Using archive dir: %s") % (globals.archive_dir_path.uc_name,))
+    log.Info(_(u"Using backup name: %s") % (globals.backup_name,))
 
     return args
 
 
 def command_line_error(message):
-    """Indicate a command line error and exit"""
-    log.FatalError(_("Command line error: %s") % (message,) + "\n" +
-                   _("Enter 'duplicity --help' for help screen."),
+    u"""Indicate a command line error and exit"""
+    log.FatalError(_(u"Command line error: %s") % (message,) + u"\n" +
+                   _(u"Enter 'duplicity --help' for help screen."),
                    log.ErrorCode.command_line)
 
 
 def usage():
-    """Returns terse usage info. The code is broken down into pieces for ease of
+    u"""Returns terse usage info. The code is broken down into pieces for ease of
     translation maintenance. Any comments that look extraneous or redundant should
     be assumed to be for the benefit of translators, since they can get each string
     (paired with its preceding comment, if any) independently of the others."""
@@ -788,139 +788,139 @@
     dict = {
         # TRANSL: Used in usage help to represent a Unix-style path name. Example:
         # rsync://user[:password]@other_host[:port]//absolute_path
-        'absolute_path': _("absolute_path"),
+        u'absolute_path': _(u"absolute_path"),
 
         # TRANSL: Used in usage help. Example:
         # tahoe://alias/some_dir
-        'alias': _("alias"),
+        u'alias': _(u"alias"),
 
         # TRANSL: Used in help to represent a "bucket name" for Amazon Web
         # Services' Simple Storage Service (S3). Example:
         # s3://other.host/bucket_name[/prefix]
-        'bucket_name': _("bucket_name"),
+        u'bucket_name': _(u"bucket_name"),
 
         # TRANSL: abbreviation for "character" (noun)
-        'char': _("char"),
+        u'char': _(u"char"),
 
         # TRANSL: noun
-        'command': _("command"),
+        u'command': _(u"command"),
 
         # TRANSL: Used in usage help to represent the name of a container in
         # Amazon Web Services' Cloudfront. Example:
         # cf+http://container_name
-        'container_name': _("container_name"),
+        u'container_name': _(u"container_name"),
 
         # TRANSL: noun
-        'count': _("count"),
+        u'count': _(u"count"),
 
         # TRANSL: Used in usage help to represent the name of a file directory
-        'directory': _("directory"),
+        u'directory': _(u"directory"),
 
         # TRANSL: Used in usage help to represent the name of a file. Example:
         # --log-file <filename>
-        'filename': _("filename"),
+        u'filename': _(u"filename"),
 
         # TRANSL: Used in usage help to represent an ID for a GnuPG key. Example:
         # --encrypt-key <gpg_key_id>
-        'gpg_key_id': _("gpg-key-id"),
+        u'gpg_key_id': _(u"gpg-key-id"),
 
         # TRANSL: Used in usage help, e.g. to represent the name of a code
         # module. Example:
         # rsync://user[:password]@other.host[:port]::/module/some_dir
-        'module': _("module"),
+        u'module': _(u"module"),
 
         # TRANSL: Used in usage help to represent a desired number of
         # something. Example:
         # --num-retries <number>
-        'number': _("number"),
+        u'number': _(u"number"),
 
         # TRANSL: Used in usage help. (Should be consistent with the "Options:"
         # header.) Example:
         # duplicity [full|incremental] [options] source_dir target_url
-        'options': _("options"),
+        u'options': _(u"options"),
 
         # TRANSL: Used in usage help to represent an internet hostname. Example:
         # ftp://user[:password]@other.host[:port]/some_dir
-        'other_host': _("other.host"),
+        u'other_host': _(u"other.host"),
 
         # TRANSL: Used in usage help. Example:
         # ftp://user[:password]@other.host[:port]/some_dir
-        'password': _("password"),
+        u'password': _(u"password"),
 
         # TRANSL: Used in usage help to represent a Unix-style path name. Example:
         # --archive-dir <path>
-        'path': _("path"),
+        u'path': _(u"path"),
 
         # TRANSL: Used in usage help to represent a TCP port number. Example:
         # ftp://user[:password]@other.host[:port]/some_dir
-        'port': _("port"),
+        u'port': _(u"port"),
 
         # TRANSL: Used in usage help. This represents a string to be used as a
         # prefix to names for backup files created by Duplicity. Example:
         # s3://other.host/bucket_name[/prefix]
-        'prefix': _("prefix"),
+        u'prefix': _(u"prefix"),
 
         # TRANSL: Used in usage help to represent a Unix-style path name. Example:
         # rsync://user[:password]@other.host[:port]/relative_path
-        'relative_path': _("relative_path"),
+        u'relative_path': _(u"relative_path"),
 
         # TRANSL: Used in usage help. Example:
         # --timeout <seconds>
-        'seconds': _("seconds"),
+        u'seconds': _(u"seconds"),
 
         # TRANSL: Used in usage help to represent a "glob" style pattern for
         # matching one or more files, as described in the documentation.
         # Example:
         # --exclude <shell_pattern>
-        'shell_pattern': _("shell_pattern"),
+        u'shell_pattern': _(u"shell_pattern"),
 
         # TRANSL: Used in usage help to represent the name of a single file
         # directory or a Unix-style path to a directory. Example:
         # file:///some_dir
-        'some_dir': _("some_dir"),
+        u'some_dir': _(u"some_dir"),
 
         # TRANSL: Used in usage help to represent the name of a single file
         # directory or a Unix-style path to a directory where files will be
         # coming FROM. Example:
         # duplicity [full|incremental] [options] source_dir target_url
-        'source_dir': _("source_dir"),
+        u'source_dir': _(u"source_dir"),
 
         # TRANSL: Used in usage help to represent a URL files will be coming
         # FROM. Example:
         # duplicity [restore] [options] source_url target_dir
-        'source_url': _("source_url"),
+        u'source_url': _(u"source_url"),
 
         # TRANSL: Used in usage help to represent the name of a single file
         # directory or a Unix-style path to a directory. where files will be
         # going TO. Example:
         # duplicity [restore] [options] source_url target_dir
-        'target_dir': _("target_dir"),
+        u'target_dir': _(u"target_dir"),
 
         # TRANSL: Used in usage help to represent a URL files will be going TO.
         # Example:
         # duplicity [full|incremental] [options] source_dir target_url
-        'target_url': _("target_url"),
+        u'target_url': _(u"target_url"),
 
         # TRANSL: Used in usage help to represent a time spec for a previous
         # point in time, as described in the documentation. Example:
         # duplicity remove-older-than time [options] target_url
-        'time': _("time"),
+        u'time': _(u"time"),
 
         # TRANSL: Used in usage help to represent a user name (i.e. login).
         # Example:
         # ftp://user[:password]@other.host[:port]/some_dir
-        'user': _("user"),
+        u'user': _(u"user"),
 
         # TRANSL: account id for b2. Example: b2://account_id@bucket/
-        'account_id': _("account_id"),
+        u'account_id': _(u"account_id"),
 
         # TRANSL: application_key for b2.
         # Example: b2://account_id:application_key@bucket/
-        'application_key': _("application_key"),
+        u'application_key': _(u"application_key"),
     }
 
     # TRANSL: Header in usage help
-    msg = """
+    msg = u"""
   duplicity [full|incremental] [%(options)s] %(source_dir)s %(target_url)s
   duplicity [restore] [%(options)s] %(source_url)s %(target_dir)s
   duplicity verify [%(options)s] %(source_url)s %(target_dir)s
@@ -935,7 +935,7 @@
 """ % dict
 
     # TRANSL: Header in usage help
-    msg = msg + _("Backends and their URL formats:") + """
+    msg = msg + _(u"Backends and their URL formats:") + u"""
   cf+http://%(container_name)s
   file:///%(some_dir)s
   ftp://%(user)s[:%(password)s]@%(other_host)s[:%(port)s]/%(some_dir)s
@@ -967,7 +967,7 @@
 """ % dict
 
     # TRANSL: Header in usage help
-    msg = msg + _("Commands:") + """
+    msg = msg + _(u"Commands:") + u"""
   cleanup <%(target_url)s>
   collection-status <%(target_url)s>
   full <%(source_dir)s> <%(target_url)s>
@@ -984,7 +984,7 @@
 
 
 def set_archive_dir(dirstring):
-    """Check archive dir and set global"""
+    u"""Check archive dir and set global"""
     if not os.path.exists(dirstring):
         try:
             os.makedirs(dirstring)
@@ -992,23 +992,23 @@
             pass
     archive_dir_path = path.Path(dirstring)
     if not archive_dir_path.isdir():
-        log.FatalError(_("Specified archive directory '%s' does not exist, "
-                         "or is not a directory") % (archive_dir_path.uc_name,),
+        log.FatalError(_(u"Specified archive directory '%s' does not exist, "
+                         u"or is not a directory") % (archive_dir_path.uc_name,),
                        log.ErrorCode.bad_archive_dir)
     globals.archive_dir_path = archive_dir_path
 
 
 def set_sign_key(sign_key):
-    """Set globals.sign_key assuming proper key given"""
-    if not re.search("^(0x)?([0-9A-Fa-f]{8}|[0-9A-Fa-f]{16}|[0-9A-Fa-f]{40})$", sign_key):
-        log.FatalError(_("Sign key should be an 8, 16 alt. 40 character hex string, like "
-                         "'AA0E73D2'.\nReceived '%s' instead.") % (sign_key,),
+    u"""Set globals.sign_key assuming proper key given"""
+    if not re.search(u"^(0x)?([0-9A-Fa-f]{8}|[0-9A-Fa-f]{16}|[0-9A-Fa-f]{40})$", sign_key):
+        log.FatalError(_(u"Sign key should be an 8, 16 alt. 40 character hex string, like "
+                         u"'AA0E73D2'.\nReceived '%s' instead.") % (sign_key,),
                        log.ErrorCode.bad_sign_key)
     globals.gpg_profile.sign_key = sign_key
 
 
 def set_selection():
-    """Return selection iter starting at filename with arguments applied"""
+    u"""Return selection iter starting at filename with arguments applied"""
     global select_opts, select_files
     sel = selection.Select(globals.local_path)
     sel.ParseArgs(select_opts, select_files)
@@ -1016,7 +1016,7 @@
 
 
 def args_to_path_backend(arg1, arg2):
-    """
+    u"""
     Given exactly two arguments, arg1 and arg2, figure out which one
     is the backend URL and which one is a local path, and return
     (local, backend).
@@ -1024,23 +1024,23 @@
     arg1_is_backend, arg2_is_backend = backend.is_backend_url(arg1), backend.is_backend_url(arg2)
 
     if not arg1_is_backend and not arg2_is_backend:
-        command_line_error("""\
+        command_line_error(u"""\
 One of the arguments must be an URL.  Examples of URL strings are
 "scp://user@xxxxxxxx:1234/path" and "file:///usr/local".  See the man
 page for more information.""")
     if arg1_is_backend and arg2_is_backend:
-        command_line_error("Two URLs specified.  "
-                           "One argument should be a path.")
+        command_line_error(u"Two URLs specified.  "
+                           u"One argument should be a path.")
     if arg1_is_backend:
         return (arg2, arg1)
     elif arg2_is_backend:
         return (arg1, arg2)
     else:
-        raise AssertionError('should not be reached')
+        raise AssertionError(u'should not be reached')
 
 
 def set_backend(arg1, arg2):
-    """Figure out which arg is url, set backend
+    u"""Figure out which arg is url, set backend
 
     Return value is pair (path_first, path) where is_first is true iff
     path made from arg1.
@@ -1057,22 +1057,22 @@
 
 
 def process_local_dir(action, local_pathname):
-    """Check local directory, set globals.local_path"""
+    u"""Check local directory, set globals.local_path"""
     local_path = path.Path(path.Path(local_pathname).get_canonical())
-    if action == "restore":
+    if action == u"restore":
         if (local_path.exists() and not local_path.isemptydir()) and not globals.force:
-            log.FatalError(_("Restore destination directory %s already "
-                             "exists.\nWill not overwrite.") % (local_path.uc_name,),
+            log.FatalError(_(u"Restore destination directory %s already "
+                             u"exists.\nWill not overwrite.") % (local_path.uc_name,),
                            log.ErrorCode.restore_dir_exists)
-    elif action == "verify":
+    elif action == u"verify":
         if not local_path.exists():
-            log.FatalError(_("Verify directory %s does not exist") %
+            log.FatalError(_(u"Verify directory %s does not exist") %
                            (local_path.uc_name,),
                            log.ErrorCode.verify_dir_doesnt_exist)
     else:
-        assert action == "full" or action == "inc"
+        assert action == u"full" or action == u"inc"
         if not local_path.exists():
-            log.FatalError(_("Backup source directory %s does not exist.")
+            log.FatalError(_(u"Backup source directory %s does not exist.")
                            % (local_path.uc_name,),
                            log.ErrorCode.backup_dir_doesnt_exist)
 
@@ -1080,46 +1080,46 @@
 
 
 def check_consistency(action):
-    """Final consistency check, see if something wrong with command line"""
+    u"""Final consistency check, see if something wrong with command line"""
     global full_backup, select_opts, list_current, collection_status, cleanup, replicate
 
     def assert_only_one(arglist):
-        """Raises error if two or more of the elements of arglist are true"""
+        u"""Raises error if two or more of the elements of arglist are true"""
         n = 0
         for m in arglist:
             if m:
                 n += 1
-        assert n <= 1, "Invalid syntax, two conflicting modes specified"
+        assert n <= 1, u"Invalid syntax, two conflicting modes specified"
 
-    if action in ["list-current", "collection-status",
-                  "cleanup", "remove-old", "remove-all-but-n-full", "remove-all-inc-of-but-n-full", "replicate"]:
+    if action in [u"list-current", u"collection-status",
+                  u"cleanup", u"remove-old", u"remove-all-but-n-full", u"remove-all-inc-of-but-n-full", u"replicate"]:
         assert_only_one([list_current, collection_status, cleanup, replicate,
                          globals.remove_time is not None])
-    elif action == "restore" or action == "verify":
+    elif action == u"restore" or action == u"verify":
         if full_backup:
-            command_line_error("--full option cannot be used when "
-                               "restoring or verifying")
+            command_line_error(u"--full option cannot be used when "
+                               u"restoring or verifying")
         elif globals.incremental:
-            command_line_error("--incremental option cannot be used when "
-                               "restoring or verifying")
-        if select_opts and action == "restore":
-            log.Warn(_("Command line warning: %s") % _("Selection options --exclude/--include\n"
-                                                       "currently work only when backing up,"
-                                                       "not restoring."))
+            command_line_error(u"--incremental option cannot be used when "
+                               u"restoring or verifying")
+        if select_opts and action == u"restore":
+            log.Warn(_(u"Command line warning: %s") % _(u"Selection options --exclude/--include\n"
+                                                        u"currently work only when backing up,"
+                                                        u"not restoring."))
     else:
-        assert action == "inc" or action == "full"
+        assert action == u"inc" or action == u"full"
         if verify:
-            command_line_error("--verify option cannot be used "
-                               "when backing up")
+            command_line_error(u"--verify option cannot be used "
+                               u"when backing up")
         if globals.restore_dir:
-            command_line_error("restore option incompatible with %s backup"
+            command_line_error(u"restore option incompatible with %s backup"
                                % (action,))
         if sum([globals.s3_use_rrs, globals.s3_use_ia, globals.s3_use_onezone_ia]) >= 2:
-            command_line_error("only one of --s3-use-rrs, --s3-use-ia, and --s3-use-onezone-ia may be used")
+            command_line_error(u"only one of --s3-use-rrs, --s3-use-ia, and --s3-use-onezone-ia may be used")
 
 
 def ProcessCommandLine(cmdline_list):
-    """Process command line, set globals, return action
+    u"""Process command line, set globals, return action
 
     action will be "list-current", "collection-status", "cleanup",
     "remove-old", "restore", "verify", "full", or "inc".
@@ -1139,34 +1139,34 @@
             sign_key=src.sign_key,
             recipients=src.recipients,
             hidden_recipients=src.hidden_recipients)
-    log.Debug(_("GPG binary is %s, version %s") %
-              ((globals.gpg_binary or 'gpg'), globals.gpg_profile.gpg_version))
+    log.Debug(_(u"GPG binary is %s, version %s") %
+              ((globals.gpg_binary or u'gpg'), globals.gpg_profile.gpg_version))
 
     # we can now try to import all the backends
     backend.import_backends()
 
     # parse_cmdline_options already verified that we got exactly 1 or 2
     # non-options arguments
-    assert len(args) >= 1 and len(args) <= 2, "arg count should have been checked already"
+    assert len(args) >= 1 and len(args) <= 2, u"arg count should have been checked already"
 
     if len(args) == 1:
         if list_current:
-            action = "list-current"
+            action = u"list-current"
         elif collection_status:
-            action = "collection-status"
+            action = u"collection-status"
         elif cleanup:
-            action = "cleanup"
+            action = u"cleanup"
         elif globals.remove_time is not None:
-            action = "remove-old"
+            action = u"remove-old"
         elif globals.remove_all_but_n_full_mode:
-            action = "remove-all-but-n-full"
+            action = u"remove-all-but-n-full"
         elif globals.remove_all_inc_of_but_n_full_mode:
-            action = "remove-all-inc-of-but-n-full"
+            action = u"remove-all-inc-of-but-n-full"
         else:
-            command_line_error("Too few arguments")
+            command_line_error(u"Too few arguments")
         globals.backend = backend.get_backend(args[0])
         if not globals.backend:
-            log.FatalError(_("""Bad URL '%s'.
+            log.FatalError(_(u"""Bad URL '%s'.
 Examples of URL strings are "scp://user@xxxxxxxx:1234/path" and
 "file:///usr/local".  See the man page for more information.""") % (args[0],),
                            log.ErrorCode.bad_url)
@@ -1174,27 +1174,27 @@
         if replicate:
             globals.src_backend = backend.get_backend(args[0])
             globals.backend = backend.get_backend(args[1])
-            action = "replicate"
+            action = u"replicate"
         else:
             # Figure out whether backup or restore
             backup, local_pathname = set_backend(args[0], args[1])
             if backup:
                 if full_backup:
-                    action = "full"
+                    action = u"full"
                 else:
-                    action = "inc"
+                    action = u"inc"
             else:
                 if verify:
-                    action = "verify"
+                    action = u"verify"
                 else:
-                    action = "restore"
+                    action = u"restore"
 
             process_local_dir(action, local_pathname)
-            if action in ['full', 'inc', 'verify']:
+            if action in [u'full', u'inc', u'verify']:
                 set_selection()
     elif len(args) > 2:
-        raise AssertionError("this code should not be reachable")
+        raise AssertionError(u"this code should not be reachable")
 
     check_consistency(action)
-    log.Info(_("Main action: ") + action)
+    log.Info(_(u"Main action: ") + action)
     return action

=== modified file 'duplicity/dup_temp.py'
--- duplicity/dup_temp.py	2018-07-24 11:52:33 +0000
+++ duplicity/dup_temp.py	2018-09-13 20:09:49 +0000
@@ -19,7 +19,7 @@
 # along with duplicity; if not, write to the Free Software Foundation,
 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 
-"""Manage temporary files"""
+u"""Manage temporary files"""
 
 import os
 import sys
@@ -35,7 +35,7 @@
 
 
 def new_temppath():
-    """
+    u"""
     Return a new TempPath
     """
     filename = tempdir.default().mktemp()
@@ -43,18 +43,18 @@
 
 
 class TempPath(path.Path):
-    """
+    u"""
     Path object used as a temporary file
     """
     def delete(self):
-        """
+        u"""
         Forget and delete
         """
         path.Path.delete(self)
         tempdir.default().forget(self.name)
 
     def open_with_delete(self, mode):
-        """
+        u"""
         Returns a fileobj.  When that is closed, delete file
         """
         fh = FileobjHooked(path.Path.open(self, mode))
@@ -63,7 +63,7 @@
 
 
 def get_fileobj_duppath(dirpath, partname, permname, remname, overwrite=False):
-    """
+    u"""
     Return a file object open for writing, will write to filename
 
     Data will be processed and written to a temporary file.  When the
@@ -74,13 +74,13 @@
         td = tempdir.TemporaryDirectory(dirpath.name)
         tdpname = td.mktemp()
         tdp = TempDupPath(tdpname, parseresults=file_naming.parse(partname))
-        fh = FileobjHooked(tdp.filtered_open("wb"), tdp=tdp, dirpath=dirpath,
+        fh = FileobjHooked(tdp.filtered_open(u"wb"), tdp=tdp, dirpath=dirpath,
                            partname=partname, permname=permname, remname=remname)
     else:
         dp = path.DupPath(dirpath.name, index=(partname,))
-        mode = "ab"
+        mode = u"ab"
         if overwrite:
-            mode = "wb"
+            mode = u"wb"
         fh = FileobjHooked(dp.filtered_open(mode), tdp=None, dirpath=dirpath,
                            partname=partname, permname=permname, remname=remname)
 
@@ -95,7 +95,7 @@
 
 
 def new_tempduppath(parseresults):
-    """
+    u"""
     Return a new TempDupPath, using settings from parseresults
     """
     filename = tempdir.default().mktemp()
@@ -103,41 +103,41 @@
 
 
 class TempDupPath(path.DupPath):
-    """
+    u"""
     Like TempPath, but build around DupPath
     """
     def delete(self):
-        """
+        u"""
         Forget and delete
         """
         path.DupPath.delete(self)
         tempdir.default().forget(self.name)
 
     def filtered_open_with_delete(self, mode):
-        """
+        u"""
         Returns a filtered fileobj.  When that is closed, delete file
         """
         fh = FileobjHooked(path.DupPath.filtered_open(self, mode))
         fh.addhook(self.delete)
         return fh
 
-    def open_with_delete(self, mode="rb"):
-        """
+    def open_with_delete(self, mode=u"rb"):
+        u"""
         Returns a fileobj.  When that is closed, delete file
         """
-        assert mode == "rb"  # Why write a file and then close it immediately?
+        assert mode == u"rb"  # Why write a file and then close it immediately?
         fh = FileobjHooked(path.DupPath.open(self, mode))
         fh.addhook(self.delete)
         return fh
 
 
 class FileobjHooked:
-    """
+    u"""
     Simulate a file, but add hook on close
     """
     def __init__(self, fileobj, tdp=None, dirpath=None,
                  partname=None, permname=None, remname=None):
-        """
+        u"""
         Initializer.  fileobj is the file object to simulate
         """
         self.fileobj = fileobj  # the actual file object
@@ -150,20 +150,20 @@
         self.remname = remname  # remote filename
 
     def write(self, buf):
-        """
+        u"""
         Write fileobj, return result of write()
         """
         return self.fileobj.write(buf)
 
     def flush(self):
-        """
+        u"""
         Flush fileobj and force sync.
         """
         self.fileobj.flush()
         os.fsync(self.fileobj.fileno())
 
     def to_partial(self):
-        """
+        u"""
         We have achieved the first checkpoint, make file visible and permanent.
         """
         assert not globals.restart
@@ -172,7 +172,7 @@
         del self.hooklist[0]
 
     def to_remote(self):
-        """
+        u"""
         We have written the last checkpoint, now encrypt or compress
         and send a copy of it to the remote for final storage.
         """
@@ -189,7 +189,7 @@
         globals.backend.move(tgt)  # @UndefinedVariable
 
     def to_final(self):
-        """
+        u"""
         We are finished, rename to final, gzip if needed.
         """
         src = self.dirpath.append(self.partname)
@@ -203,25 +203,25 @@
             os.rename(src.name, tgt.name)
 
     def read(self, length=-1):
-        """
+        u"""
         Read fileobj, return result of read()
         """
         return self.fileobj.read(length)
 
     def tell(self):
-        """
+        u"""
         Returns current location of fileobj
         """
         return self.fileobj.tell()
 
     def seek(self, offset):
-        """
+        u"""
         Seeks to a location of fileobj
         """
         return self.fileobj.seek(offset)
 
     def close(self):
-        """
+        u"""
         Close fileobj, running hooks right afterwards
         """
         assert not self.fileobj.close()
@@ -229,13 +229,13 @@
             hook()
 
     def addhook(self, hook):
-        """
+        u"""
         Add hook (function taking no arguments) to run upon closing
         """
         self.hooklist.append(hook)
 
     def get_name(self):
-        """
+        u"""
         Return the name of the file
         """
         return self.fileobj.name
@@ -244,7 +244,7 @@
 
 
 class Block:
-    """
+    u"""
     Data block to return from SrcIter
     """
     def __init__(self, data):
@@ -252,18 +252,18 @@
 
 
 class SrcIter:
-    """
+    u"""
     Iterate over source and return Block of data.
     """
     def __init__(self, src):
         self.src = src
-        self.fp = src.open("rb")
+        self.fp = src.open(u"rb")
 
     def next(self):
         try:
             res = Block(self.fp.read(self.get_read_size()))
         except Exception:
-            log.FatalError(_("Failed to read %s: %s") %
+            log.FatalError(_(u"Failed to read %s: %s") %
                            (self.src.uc_name, sys.exc_info()),
                            log.ErrorCode.generic)
         if not res.data:
@@ -275,4 +275,4 @@
         return 128 * 1024
 
     def get_footer(self):
-        return ""
+        return u""

=== modified file 'duplicity/dup_threading.py'
--- duplicity/dup_threading.py	2018-07-24 11:52:33 +0000
+++ duplicity/dup_threading.py	2018-09-13 20:09:49 +0000
@@ -19,7 +19,7 @@
 # along with duplicity; if not, write to the Free Software Foundation,
 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 
-"""
+u"""
 Duplicity specific but otherwise generic threading interfaces and
 utilities.
 
@@ -48,7 +48,7 @@
 
 
 def threading_supported():
-    """
+    u"""
     Returns whether threading is supported on the system we are
     running on.
     """
@@ -56,7 +56,7 @@
 
 
 def require_threading(reason=None):
-    """
+    u"""
     Assert that threading is required for operation to continue. Raise
     an appropriate exception if this is not the case.
 
@@ -66,14 +66,14 @@
     """
     if not threading_supported():
         if reason is None:
-            reason = "(no reason given)"
-        raise errors.NotSupported("threading was needed because [%s], but "
-                                  "is not supported by the python "
-                                  "interpreter" % (reason,))
+            reason = u"(no reason given)"
+        raise errors.NotSupported(u"threading was needed because [%s], but "
+                                  u"is not supported by the python "
+                                  u"interpreter" % (reason,))
 
 
 def thread_module():
-    """
+    u"""
     Returns the thread module, or dummy_thread if threading is not
     supported.
     """
@@ -81,7 +81,7 @@
 
 
 def threading_module():
-    """
+    u"""
     Returns the threading module, or dummy_thread if threading is not
     supported.
     """
@@ -89,7 +89,7 @@
 
 
 def with_lock(lock, fn):
-    """
+    u"""
     Call fn with lock acquired. Guarantee that lock is released upon
     the return of fn.
 
@@ -108,7 +108,7 @@
 
 
 def interruptably_wait(cv, waitFor):
-    """
+    u"""
     cv   - The threading.Condition instance to wait on
     test - Callable returning a boolean to indicate whether
            the criteria being waited on has been satisfied.
@@ -160,7 +160,7 @@
 
 
 def async_split(fn):
-    """
+    u"""
     Splits the act of calling the given function into one front-end
     part for waiting on the result, and a back-end part for performing
     the work in another thread.
@@ -185,20 +185,20 @@
     # used for significant amounts of work.
 
     cv = threading.Condition()  # @UndefinedVariable
-    state = {'done': False,
-             'error': None,
-             'trace': None,
-             'value': None}
+    state = {u'done': False,
+             u'error': None,
+             u'trace': None,
+             u'value': None}
 
     def waiter():
         cv.acquire()
         try:
-            interruptably_wait(cv, lambda: state['done'])
+            interruptably_wait(cv, lambda: state[u'done'])
 
-            if state['error'] is None:
-                return state['value']
+            if state[u'error'] is None:
+                return state[u'value']
             else:
-                raise state['error'].with_traceback(state['trace'])
+                raise state[u'error'].with_traceback(state[u'trace'])
         finally:
             cv.release()
 
@@ -207,17 +207,17 @@
             value = fn()
 
             cv.acquire()
-            state['done'] = True
-            state['value'] = value
+            state[u'done'] = True
+            state[u'value'] = value
             cv.notify()
             cv.release()
 
             return (True, waiter)
         except Exception as e:
             cv.acquire()
-            state['done'] = True
-            state['error'] = e
-            state['trace'] = sys.exc_info()[2]
+            state[u'done'] = True
+            state[u'error'] = e
+            state[u'trace'] = sys.exc_info()[2]
             cv.notify()
             cv.release()
 
@@ -227,7 +227,7 @@
 
 
 class Value:
-    """
+    u"""
     A thread-safe container of a reference to an object (but not the
     object itself).
 
@@ -252,7 +252,7 @@
     """
 
     def __init__(self, value=None):
-        """
+        u"""
         Initialuze with the given value.
         """
         self.__value = value
@@ -260,13 +260,13 @@
         self.__cv = threading.Condition()  # @UndefinedVariable
 
     def get(self):
-        """
+        u"""
         Returns the value protected by this Value.
         """
         return with_lock(self.__cv, lambda: self.__value)
 
     def set(self, value):
-        """
+        u"""
         Resets the value protected by this Value.
         """
         def _set():
@@ -275,7 +275,7 @@
         with_lock(self.__cv, _set)
 
     def transform(self, fn):
-        """
+        u"""
         Call fn with the current value as the parameter, and reset the
         value to the return value of fn.
 
@@ -294,7 +294,7 @@
         return with_lock(self.cv, _transform)
 
     def acquire(self):
-        """
+        u"""
         Acquire this Value for mutually exclusive access. Only ever
         needed when calling code must perform operations that cannot
         be done with get(), set() or transform().
@@ -302,7 +302,7 @@
         self.__cv.acquire()
 
     def release(self):
-        """
+        u"""
         Release this Value for mutually exclusive access.
         """
         self.__cv.release()

=== modified file 'duplicity/dup_time.py'
--- duplicity/dup_time.py	2018-07-27 02:18:12 +0000
+++ duplicity/dup_time.py	2018-09-13 20:09:49 +0000
@@ -19,7 +19,7 @@
 # along with duplicity; if not, write to the Free Software Foundation,
 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 
-"""Provide time related exceptions and functions"""
+u"""Provide time related exceptions and functions"""
 
 from future_builtins import map
 
@@ -42,29 +42,29 @@
     pass
 
 
-_interval_conv_dict = {"s": 1, "m": 60, "h": 3600, "D": 86400,
-                       "W": 7 * 86400, "M": 30 * 86400, "Y": 365 * 86400}
-_integer_regexp = re.compile("^[0-9]+$")
-_interval_regexp = re.compile("^([0-9]+)([smhDWMY])")
-_genstr_date_regexp1 = re.compile("^(?P<year>[0-9]{4})[-/]"
-                                  "(?P<month>[0-9]{1,2})[-/]"
-                                  "(?P<day>[0-9]{1,2})$")
-_genstr_date_regexp2 = re.compile("^(?P<month>[0-9]{1,2})[-/]"
-                                  "(?P<day>[0-9]{1,2})[-/]"
-                                  "(?P<year>[0-9]{4})$")
-_genstr_date_regexp3 = re.compile("^(?P<year>[0-9]{4})"
-                                  "(?P<month>[0-9]{2})"
-                                  "(?P<day>[0-9]{2})Z$")
+_interval_conv_dict = {u"s": 1, u"m": 60, u"h": 3600, u"D": 86400,
+                       u"W": 7 * 86400, u"M": 30 * 86400, u"Y": 365 * 86400}
+_integer_regexp = re.compile(u"^[0-9]+$")
+_interval_regexp = re.compile(u"^([0-9]+)([smhDWMY])")
+_genstr_date_regexp1 = re.compile(u"^(?P<year>[0-9]{4})[-/]"
+                                  u"(?P<month>[0-9]{1,2})[-/]"
+                                  u"(?P<day>[0-9]{1,2})$")
+_genstr_date_regexp2 = re.compile(u"^(?P<month>[0-9]{1,2})[-/]"
+                                  u"(?P<day>[0-9]{1,2})[-/]"
+                                  u"(?P<year>[0-9]{4})$")
+_genstr_date_regexp3 = re.compile(u"^(?P<year>[0-9]{4})"
+                                  u"(?P<month>[0-9]{2})"
+                                  u"(?P<day>[0-9]{2})Z$")
 curtime = curtimestr = None
 prevtime = prevtimestr = None
 
-bad_interval_string = _("""Bad interval string "%s"
+bad_interval_string = _(u"""Bad interval string "%s"
 
 Intervals are specified like 2Y (2 years) or 2h30m (2.5 hours).  The
 allowed special characters are s, m, h, D, W, M, and Y.  See the man
 page for more information.""")
 
-bad_time_string = _("""Bad time string "%s"
+bad_time_string = _(u"""Bad time string "%s"
 
 The acceptible time strings are intervals (like "3D64s"), w3-datetime
 strings, like "2002-04-26T04:22:01-07:00" (strings like
@@ -75,7 +75,7 @@
 
 
 def setcurtime(time_in_secs=None):
-    """Sets the current time in curtime and curtimestr"""
+    u"""Sets the current time in curtime and curtimestr"""
     global curtime, curtimestr
     t = time_in_secs or int(time.time())
     assert type(t) in integer_types
@@ -83,37 +83,37 @@
 
 
 def setprevtime(time_in_secs):
-    """Sets the previous time in prevtime and prevtimestr"""
+    u"""Sets the previous time in prevtime and prevtimestr"""
     global prevtime, prevtimestr
     assert type(time_in_secs) in integer_types, prevtime
     prevtime, prevtimestr = time_in_secs, timetostring(time_in_secs)
 
 
 def timetostring(timeinseconds):
-    """Return w3 or duplicity datetime compliant listing of timeinseconds"""
+    u"""Return w3 or duplicity datetime compliant listing of timeinseconds"""
 
     if globals.old_filenames:
         # We need to know if DST applies to append the correct offset. So
         #    1. Save the tuple returned by localtime.
         #    2. Pass the DST flag into gettzd
         lcltime = time.localtime(timeinseconds)
-        return time.strftime("%Y-%m-%dT%H" + globals.time_separator +
-                             "%M" + globals.time_separator + "%S",
+        return time.strftime(u"%Y-%m-%dT%H" + globals.time_separator +
+                             u"%M" + globals.time_separator + u"%S",
                              lcltime) + gettzd(lcltime[-1])
     else:
         # DST never applies to UTC
         lcltime = time.gmtime(timeinseconds)
-        return time.strftime("%Y%m%dT%H%M%SZ", lcltime)
+        return time.strftime(u"%Y%m%dT%H%M%SZ", lcltime)
 
 
 def stringtotime(timestring):
-    """Return time in seconds from w3 or duplicity timestring
+    u"""Return time in seconds from w3 or duplicity timestring
 
     If there is an error parsing the string, or it doesn't look
     like a valid datetime string, return None.
     """
     try:
-        date, daytime = timestring[:19].split("T")
+        date, daytime = timestring[:19].split(u"T")
         if len(timestring) == 16:
             # new format for filename time
             year, month, day = map(int,
@@ -122,7 +122,7 @@
                                        [daytime[0:2], daytime[2:4], daytime[4:6]])
         else:
             # old format for filename time
-            year, month, day = map(int, date.split("-"))
+            year, month, day = map(int, date.split(u"-"))
             hour, minute, second = map(int,
                                        daytime.split(globals.time_separator))
         assert 1900 < year < 2100, year
@@ -163,42 +163,42 @@
 
 
 def timetopretty(timeinseconds):
-    """Return pretty version of time"""
+    u"""Return pretty version of time"""
     return time.asctime(time.localtime(timeinseconds))
 
 
 def stringtopretty(timestring):
-    """Return pretty version of time given w3 time string"""
+    u"""Return pretty version of time given w3 time string"""
     return timetopretty(stringtotime(timestring))
 
 
 def inttopretty(seconds):
-    """Convert num of seconds to readable string like "2 hours"."""
+    u"""Convert num of seconds to readable string like "2 hours"."""
     partlist = []
     hours, seconds = divmod(seconds, 3600)
     if hours > 1:
-        partlist.append("%d hours" % hours)
+        partlist.append(u"%d hours" % hours)
     elif hours == 1:
-        partlist.append("1 hour")
+        partlist.append(u"1 hour")
 
     minutes, seconds = divmod(seconds, 60)
     if minutes > 1:
-        partlist.append("%d minutes" % minutes)
+        partlist.append(u"%d minutes" % minutes)
     elif minutes == 1:
-        partlist.append("1 minute")
+        partlist.append(u"1 minute")
 
     if seconds == 1:
-        partlist.append("1 second")
+        partlist.append(u"1 second")
     elif not partlist or seconds > 1:
         if isinstance(seconds, integer_types):
-            partlist.append("%s seconds" % seconds)
+            partlist.append(u"%s seconds" % seconds)
         else:
-            partlist.append("%.2f seconds" % seconds)
-    return " ".join(partlist)
+            partlist.append(u"%.2f seconds" % seconds)
+    return u" ".join(partlist)
 
 
 def intstringtoseconds(interval_string):
-    """Convert a string expressing an interval (e.g. "4D2s") to seconds"""
+    u"""Convert a string expressing an interval (e.g. "4D2s") to seconds"""
     def error():
         raise TimeException(bad_interval_string % util.escape(interval_string))
 
@@ -219,7 +219,7 @@
 
 
 def gettzd(dstflag):
-    """Return w3's timezone identification string.
+    u"""Return w3's timezone identification string.
 
     Expresed as [+/-]hh:mm.  For instance, PST is -08:00.  Zone is
     coincides with what localtime(), etc., use.
@@ -236,30 +236,30 @@
     else:
         offset = -1 * time.timezone / 60
     if offset > 0:
-        prefix = "+"
+        prefix = u"+"
     elif offset < 0:
-        prefix = "-"
+        prefix = u"-"
     else:
-        return "Z"  # time is already in UTC
+        return u"Z"  # time is already in UTC
 
     hours, minutes = map(abs, divmod(offset, 60))
     assert 0 <= hours <= 23
     assert 0 <= minutes <= 59
-    return "%s%02d%s%02d" % (prefix, hours, globals.time_separator, minutes)
+    return u"%s%02d%s%02d" % (prefix, hours, globals.time_separator, minutes)
 
 
 def tzdtoseconds(tzd):
-    """Given w3 compliant TZD, return how far ahead UTC is"""
-    if tzd == "Z":
+    u"""Given w3 compliant TZD, return how far ahead UTC is"""
+    if tzd == u"Z":
         return 0
     assert len(tzd) == 6  # only accept forms like +08:00 for now
-    assert (tzd[0] == "-" or tzd[0] == "+") and \
+    assert (tzd[0] == u"-" or tzd[0] == u"+") and \
         tzd[3] == globals.time_separator
     return -60 * (60 * int(tzd[:3]) + int(tzd[4:]))
 
 
 def cmp(time1, time2):
-    """Compare time1 and time2 and return -1, 0, or 1"""
+    u"""Compare time1 and time2 and return -1, 0, or 1"""
     if isinstance(time1, types.StringTypes):
         time1 = stringtotime(time1)
         assert time1 is not None
@@ -276,10 +276,10 @@
 
 
 def genstrtotime(timestr, override_curtime=None):
-    """Convert a generic time string to a time in seconds"""
+    u"""Convert a generic time string to a time in seconds"""
     if override_curtime is None:
         override_curtime = curtime
-    if timestr == "now":
+    if timestr == u"now":
         return override_curtime
 
     def error():
@@ -311,10 +311,10 @@
              _genstr_date_regexp3.search(timestr))
     if not match:
         error()
-    timestr = "%s-%02d-%02dT00:00:00%s" % (match.group('year'),
-                                           int(match.group('month')),
-                                           int(match.group('day')),
-                                           gettzd(0))
+    timestr = u"%s-%02d-%02dT00:00:00%s" % (match.group(u'year'),
+                                            int(match.group(u'month')),
+                                            int(match.group(u'day')),
+                                            gettzd(0))
     t = stringtotime(timestr)
     if t:
         return t

=== modified file 'duplicity/errors.py'
--- duplicity/errors.py	2018-07-24 11:52:33 +0000
+++ duplicity/errors.py	2018-09-13 20:09:49 +0000
@@ -19,7 +19,7 @@
 # along with duplicity; if not, write to the Free Software Foundation,
 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 
-"""
+u"""
 Error/exception classes that do not fit naturally anywhere else.
 """
 
@@ -31,7 +31,7 @@
 
 
 class UserError(DuplicityError):
-    """
+    u"""
     Subclasses use this in their inheritance hierarchy to signal that
     the error is a user generated one, and that it is therefore
     typically unsuitable to display a full stack trace.
@@ -40,7 +40,7 @@
 
 
 class NotSupported(DuplicityError):
-    """
+    u"""
     Exception raised when an action cannot be completed because some
     particular feature is not supported by the environment.
     """
@@ -48,7 +48,7 @@
 
 
 class ConflictingScheme(DuplicityError):
-    """
+    u"""
     Raised to indicate an attempt was made to register a backend for a
     scheme for which there is already a backend registered.
     """
@@ -56,25 +56,25 @@
 
 
 class InvalidBackendURL(UserError):
-    """
+    u"""
     Raised to indicate a URL was not a valid backend URL.
     """
     pass
 
 
 class UnsupportedBackendScheme(InvalidBackendURL, UserError):
-    """
+    u"""
     Raised to indicate that a backend URL was parsed successfully as a
     URL, but was not supported.
     """
     def __init__(self, url):
         InvalidBackendURL.__init__(self,
-                                   ("scheme not supported in url: %s" % (url,)))
+                                   (u"scheme not supported in url: %s" % (url,)))
         self.url = url
 
 
 class BackendException(DuplicityError):
-    """
+    u"""
     Raised to indicate a backend specific problem.
     """
     def __init__(self, msg, code=log.ErrorCode.backend_error):
@@ -83,14 +83,14 @@
 
 
 class FatalBackendException(BackendException):
-    """
+    u"""
     Raised to indicate a backend failed fatally.
     """
     pass
 
 
 class TemporaryLoadException(BackendException):
-    """
+    u"""
     Raised to indicate a temporary issue on the backend.
     Duplicity should back off for a bit and try again.
     """

=== modified file 'duplicity/file_naming.py'
--- duplicity/file_naming.py	2018-07-24 11:52:33 +0000
+++ duplicity/file_naming.py	2018-09-13 20:09:49 +0000
@@ -19,7 +19,7 @@
 # along with duplicity; if not, write to the Free Software Foundation,
 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 
-"""Produce and parse the names of duplicity's backup files"""
+u"""Produce and parse the names of duplicity's backup files"""
 
 import re
 from duplicity import dup_time
@@ -57,98 +57,98 @@
     if full_vol_re and not force:
         return
 
-    full_vol_re = re.compile("^" + globals.file_prefix + globals.file_prefix_archive + "duplicity-full"
-                             "\\.(?P<time>.*?)"
-                             "\\.vol(?P<num>[0-9]+)"
-                             "\\.difftar"
-                             "(?P<partial>(\\.part))?"
-                             "($|\\.)")
-
-    full_vol_re_short = re.compile("^" + globals.file_prefix + globals.file_prefix_archive + "df"
-                                   "\\.(?P<time>[0-9a-z]+?)"
-                                   "\\.(?P<num>[0-9a-z]+)"
-                                   "\\.dt"
-                                   "(?P<partial>(\\.p))?"
-                                   "($|\\.)")
-
-    full_manifest_re = re.compile("^" + globals.file_prefix + globals.file_prefix_manifest + "duplicity-full"
-                                  "\\.(?P<time>.*?)"
-                                  "\\.manifest"
-                                  "(?P<partial>(\\.part))?"
-                                  "($|\\.)")
-
-    full_manifest_re_short = re.compile("^" + globals.file_prefix + globals.file_prefix_manifest + "df"
-                                        "\\.(?P<time>[0-9a-z]+?)"
-                                        "\\.m"
-                                        "(?P<partial>(\\.p))?"
-                                        "($|\\.)")
-
-    inc_vol_re = re.compile("^" + globals.file_prefix + globals.file_prefix_archive + "duplicity-inc"
-                            "\\.(?P<start_time>.*?)"
-                            "\\.to\\.(?P<end_time>.*?)"
-                            "\\.vol(?P<num>[0-9]+)"
-                            "\\.difftar"
-                            "($|\\.)")
-
-    inc_vol_re_short = re.compile("^" + globals.file_prefix + globals.file_prefix_archive + "di"
-                                  "\\.(?P<start_time>[0-9a-z]+?)"
-                                  "\\.(?P<end_time>[0-9a-z]+?)"
-                                  "\\.(?P<num>[0-9a-z]+)"
-                                  "\\.dt"
-                                  "($|\\.)")
-
-    inc_manifest_re = re.compile("^" + globals.file_prefix + globals.file_prefix_manifest + "duplicity-inc"
-                                 "\\.(?P<start_time>.*?)"
-                                 "\\.to"
-                                 "\\.(?P<end_time>.*?)"
-                                 "\\.manifest"
-                                 "(?P<partial>(\\.part))?"
-                                 "(\\.|$)")
-
-    inc_manifest_re_short = re.compile("^" + globals.file_prefix + globals.file_prefix_manifest + "di"
-                                       "\\.(?P<start_time>[0-9a-z]+?)"
-                                       "\\.(?P<end_time>[0-9a-z]+?)"
-                                       "\\.m"
-                                       "(?P<partial>(\\.p))?"
-                                       "(\\.|$)")
-
-    full_sig_re = re.compile("^" + globals.file_prefix + globals.file_prefix_signature + "duplicity-full-signatures"
-                             "\\.(?P<time>.*?)"
-                             "\\.sigtar"
-                             "(?P<partial>(\\.part))?"
-                             "(\\.|$)")
-
-    full_sig_re_short = re.compile("^" + globals.file_prefix + globals.file_prefix_signature + "dfs"
-                                   "\\.(?P<time>[0-9a-z]+?)"
-                                   "\\.st"
-                                   "(?P<partial>(\\.p))?"
-                                   "(\\.|$)")
-
-    new_sig_re = re.compile("^" + globals.file_prefix + globals.file_prefix_signature + "duplicity-new-signatures"
-                            "\\.(?P<start_time>.*?)"
-                            "\\.to"
-                            "\\.(?P<end_time>.*?)"
-                            "\\.sigtar"
-                            "(?P<partial>(\\.part))?"
-                            "(\\.|$)")
-
-    new_sig_re_short = re.compile("^" + globals.file_prefix + globals.file_prefix_signature + "dns"
-                                  "\\.(?P<start_time>[0-9a-z]+?)"
-                                  "\\.(?P<end_time>[0-9a-z]+?)"
-                                  "\\.st"
-                                  "(?P<partial>(\\.p))?"
-                                  "(\\.|$)")
+    full_vol_re = re.compile(u"^" + globals.file_prefix + globals.file_prefix_archive + u"duplicity-full"
+                             u"\\.(?P<time>.*?)"
+                             u"\\.vol(?P<num>[0-9]+)"
+                             u"\\.difftar"
+                             u"(?P<partial>(\\.part))?"
+                             u"($|\\.)")
+
+    full_vol_re_short = re.compile(u"^" + globals.file_prefix + globals.file_prefix_archive + u"df"
+                                   u"\\.(?P<time>[0-9a-z]+?)"
+                                   u"\\.(?P<num>[0-9a-z]+)"
+                                   u"\\.dt"
+                                   u"(?P<partial>(\\.p))?"
+                                   u"($|\\.)")
+
+    full_manifest_re = re.compile(u"^" + globals.file_prefix + globals.file_prefix_manifest + u"duplicity-full"
+                                  u"\\.(?P<time>.*?)"
+                                  u"\\.manifest"
+                                  u"(?P<partial>(\\.part))?"
+                                  u"($|\\.)")
+
+    full_manifest_re_short = re.compile(u"^" + globals.file_prefix + globals.file_prefix_manifest + u"df"
+                                        u"\\.(?P<time>[0-9a-z]+?)"
+                                        u"\\.m"
+                                        u"(?P<partial>(\\.p))?"
+                                        u"($|\\.)")
+
+    inc_vol_re = re.compile(u"^" + globals.file_prefix + globals.file_prefix_archive + u"duplicity-inc"
+                            u"\\.(?P<start_time>.*?)"
+                            u"\\.to\\.(?P<end_time>.*?)"
+                            u"\\.vol(?P<num>[0-9]+)"
+                            u"\\.difftar"
+                            u"($|\\.)")
+
+    inc_vol_re_short = re.compile(u"^" + globals.file_prefix + globals.file_prefix_archive + u"di"
+                                  u"\\.(?P<start_time>[0-9a-z]+?)"
+                                  u"\\.(?P<end_time>[0-9a-z]+?)"
+                                  u"\\.(?P<num>[0-9a-z]+)"
+                                  u"\\.dt"
+                                  u"($|\\.)")
+
+    inc_manifest_re = re.compile(u"^" + globals.file_prefix + globals.file_prefix_manifest + u"duplicity-inc"
+                                 u"\\.(?P<start_time>.*?)"
+                                 u"\\.to"
+                                 u"\\.(?P<end_time>.*?)"
+                                 u"\\.manifest"
+                                 u"(?P<partial>(\\.part))?"
+                                 u"(\\.|$)")
+
+    inc_manifest_re_short = re.compile(u"^" + globals.file_prefix + globals.file_prefix_manifest + u"di"
+                                       u"\\.(?P<start_time>[0-9a-z]+?)"
+                                       u"\\.(?P<end_time>[0-9a-z]+?)"
+                                       u"\\.m"
+                                       u"(?P<partial>(\\.p))?"
+                                       u"(\\.|$)")
+
+    full_sig_re = re.compile(u"^" + globals.file_prefix + globals.file_prefix_signature + u"duplicity-full-signatures"
+                             u"\\.(?P<time>.*?)"
+                             u"\\.sigtar"
+                             u"(?P<partial>(\\.part))?"
+                             u"(\\.|$)")
+
+    full_sig_re_short = re.compile(u"^" + globals.file_prefix + globals.file_prefix_signature + u"dfs"
+                                   u"\\.(?P<time>[0-9a-z]+?)"
+                                   u"\\.st"
+                                   u"(?P<partial>(\\.p))?"
+                                   u"(\\.|$)")
+
+    new_sig_re = re.compile(u"^" + globals.file_prefix + globals.file_prefix_signature + u"duplicity-new-signatures"
+                            u"\\.(?P<start_time>.*?)"
+                            u"\\.to"
+                            u"\\.(?P<end_time>.*?)"
+                            u"\\.sigtar"
+                            u"(?P<partial>(\\.part))?"
+                            u"(\\.|$)")
+
+    new_sig_re_short = re.compile(u"^" + globals.file_prefix + globals.file_prefix_signature + u"dns"
+                                  u"\\.(?P<start_time>[0-9a-z]+?)"
+                                  u"\\.(?P<end_time>[0-9a-z]+?)"
+                                  u"\\.st"
+                                  u"(?P<partial>(\\.p))?"
+                                  u"(\\.|$)")
 
 
 def to_base36(n):
-    """
+    u"""
     Return string representation of n in base 36 (use 0-9 and a-z)
     """
     div, mod = divmod(n, 36)
     if mod <= 9:
         last_digit = str(mod)
     else:
-        last_digit = chr(ord('a') + mod - 10)
+        last_digit = chr(ord(u'a') + mod - 10)
     if n == mod:
         return last_digit
     else:
@@ -156,24 +156,24 @@
 
 
 def from_base36(s):
-    """
+    u"""
     Convert string s in base 36 to long int
     """
     total = 0
     for i in range(len(s)):
         total *= 36
         digit_ord = ord(s[i])
-        if ord('0') <= digit_ord <= ord('9'):
-            total += digit_ord - ord('0')
-        elif ord('a') <= digit_ord <= ord('z'):
-            total += digit_ord - ord('a') + 10
+        if ord(u'0') <= digit_ord <= ord(u'9'):
+            total += digit_ord - ord(u'0')
+        elif ord(u'a') <= digit_ord <= ord(u'z'):
+            total += digit_ord - ord(u'a') + 10
         else:
-            assert 0, "Digit %s in %s not in proper range" % (s[i], s)
+            assert 0, u"Digit %s in %s not in proper range" % (s[i], s)
     return total
 
 
 def get_suffix(encrypted, gzipped):
-    """
+    u"""
     Return appropriate suffix depending on status of
     encryption, compression, and short_filenames.
     """
@@ -181,22 +181,22 @@
         gzipped = False
     if encrypted:
         if globals.short_filenames:
-            suffix = '.g'
+            suffix = u'.g'
         else:
-            suffix = ".gpg"
+            suffix = u".gpg"
     elif gzipped:
         if globals.short_filenames:
-            suffix = ".z"
+            suffix = u".z"
         else:
-            suffix = '.gz'
+            suffix = u'.gz'
     else:
-        suffix = ""
+        suffix = u""
     return suffix
 
 
 def get(type, volume_number=None, manifest=False,
         encrypted=False, gzipped=False, partial=False):
-    """
+    u"""
     Return duplicity filename of specified type
 
     type can be "full", "inc", "full-sig", or "new-sig". volume_number
@@ -207,36 +207,36 @@
     if encrypted:
         gzipped = False
     suffix = get_suffix(encrypted, gzipped)
-    part_string = ""
+    part_string = u""
     if globals.short_filenames:
         if partial:
-            part_string = ".p"
+            part_string = u".p"
     else:
         if partial:
-            part_string = ".part"
+            part_string = u".part"
 
-    if type == "full-sig" or type == "new-sig":
+    if type == u"full-sig" or type == u"new-sig":
         assert not volume_number and not manifest
         assert not (volume_number and part_string)
-        if type == "full-sig":
+        if type == u"full-sig":
             if globals.short_filenames:
                 return (globals.file_prefix + globals.file_prefix_signature +
-                        "dfs.%s.st%s%s" %
+                        u"dfs.%s.st%s%s" %
                         (to_base36(dup_time.curtime), part_string, suffix))
             else:
                 return (globals.file_prefix + globals.file_prefix_signature +
-                        "duplicity-full-signatures.%s.sigtar%s%s" %
+                        u"duplicity-full-signatures.%s.sigtar%s%s" %
                         (dup_time.curtimestr, part_string, suffix))
-        elif type == "new-sig":
+        elif type == u"new-sig":
             if globals.short_filenames:
                 return (globals.file_prefix + globals.file_prefix_signature +
-                        "dns.%s.%s.st%s%s" %
+                        u"dns.%s.%s.st%s%s" %
                         (to_base36(dup_time.prevtime),
                          to_base36(dup_time.curtime),
                          part_string, suffix))
             else:
                 return (globals.file_prefix + globals.file_prefix_signature +
-                        "duplicity-new-signatures.%s.to.%s.sigtar%s%s" %
+                        u"duplicity-new-signatures.%s.to.%s.sigtar%s%s" %
                         (dup_time.prevtimestr, dup_time.curtimestr,
                          part_string, suffix))
     else:
@@ -247,43 +247,43 @@
 
         if volume_number:
             if globals.short_filenames:
-                vol_string = "%s.dt" % to_base36(volume_number)
+                vol_string = u"%s.dt" % to_base36(volume_number)
             else:
-                vol_string = "vol%d.difftar" % volume_number
+                vol_string = u"vol%d.difftar" % volume_number
             prefix += globals.file_prefix_archive
         else:
             if globals.short_filenames:
-                vol_string = "m"
+                vol_string = u"m"
             else:
-                vol_string = "manifest"
+                vol_string = u"manifest"
             prefix += globals.file_prefix_manifest
 
-        if type == "full":
-            if globals.short_filenames:
-                return ("%sdf.%s.%s%s%s" % (prefix, to_base36(dup_time.curtime),
-                                            vol_string, part_string, suffix))
-            else:
-                return ("%sduplicity-full.%s.%s%s%s" % (prefix, dup_time.curtimestr,
-                                                        vol_string, part_string, suffix))
-        elif type == "inc":
-            if globals.short_filenames:
-                return ("%sdi.%s.%s.%s%s%s" % (prefix, to_base36(dup_time.prevtime),
-                                               to_base36(dup_time.curtime),
-                                               vol_string, part_string, suffix))
-            else:
-                return ("%sduplicity-inc.%s.to.%s.%s%s%s" % (prefix, dup_time.prevtimestr,
-                                                             dup_time.curtimestr,
-                                                             vol_string, part_string, suffix))
+        if type == u"full":
+            if globals.short_filenames:
+                return (u"%sdf.%s.%s%s%s" % (prefix, to_base36(dup_time.curtime),
+                                             vol_string, part_string, suffix))
+            else:
+                return (u"%sduplicity-full.%s.%s%s%s" % (prefix, dup_time.curtimestr,
+                                                         vol_string, part_string, suffix))
+        elif type == u"inc":
+            if globals.short_filenames:
+                return (u"%sdi.%s.%s.%s%s%s" % (prefix, to_base36(dup_time.prevtime),
+                                                to_base36(dup_time.curtime),
+                                                vol_string, part_string, suffix))
+            else:
+                return (u"%sduplicity-inc.%s.to.%s.%s%s%s" % (prefix, dup_time.prevtimestr,
+                                                              dup_time.curtimestr,
+                                                              vol_string, part_string, suffix))
         else:
             assert 0
 
 
 def parse(filename):
-    """
+    u"""
     Parse duplicity filename, return None or ParseResults object
     """
     def str2time(timestr, short):
-        """
+        u"""
         Return time in seconds if string can be converted, None otherwise
         """
         if short:
@@ -296,7 +296,7 @@
         return t
 
     def get_vol_num(s, short):
-        """
+        u"""
         Return volume number from volume number string
         """
         if short:
@@ -305,7 +305,7 @@
             return int(s)
 
     def check_full():
-        """
+        u"""
         Return ParseResults if file is from full backup, None otherwise
         """
         prepare_regex()
@@ -317,18 +317,18 @@
             m1 = full_vol_re.search(filename)
             m2 = full_manifest_re.search(filename)
         if m1 or m2:
-            t = str2time((m1 or m2).group("time"), short)
+            t = str2time((m1 or m2).group(u"time"), short)
             if t:
                 if m1:
-                    return ParseResults("full", time=t,
-                                        volume_number=get_vol_num(m1.group("num"), short))
+                    return ParseResults(u"full", time=t,
+                                        volume_number=get_vol_num(m1.group(u"num"), short))
                 else:
-                    return ParseResults("full", time=t, manifest=True,
-                                        partial=(m2.group("partial") is not None))
+                    return ParseResults(u"full", time=t, manifest=True,
+                                        partial=(m2.group(u"partial") is not None))
         return None
 
     def check_inc():
-        """
+        u"""
         Return ParseResults if file is from inc backup, None otherwise
         """
         prepare_regex()
@@ -340,19 +340,19 @@
             m1 = inc_vol_re.search(filename)
             m2 = inc_manifest_re.search(filename)
         if m1 or m2:
-            t1 = str2time((m1 or m2).group("start_time"), short)
-            t2 = str2time((m1 or m2).group("end_time"), short)
+            t1 = str2time((m1 or m2).group(u"start_time"), short)
+            t2 = str2time((m1 or m2).group(u"end_time"), short)
             if t1 and t2:
                 if m1:
-                    return ParseResults("inc", start_time=t1,
-                                        end_time=t2, volume_number=get_vol_num(m1.group("num"), short))
+                    return ParseResults(u"inc", start_time=t1,
+                                        end_time=t2, volume_number=get_vol_num(m1.group(u"num"), short))
                 else:
-                    return ParseResults("inc", start_time=t1, end_time=t2, manifest=1,
-                                        partial=(m2.group("partial") is not None))
+                    return ParseResults(u"inc", start_time=t1, end_time=t2, manifest=1,
+                                        partial=(m2.group(u"partial") is not None))
         return None
 
     def check_sig():
-        """
+        u"""
         Return ParseResults if file is a signature, None otherwise
         """
         prepare_regex()
@@ -362,10 +362,10 @@
             short = False
             m = full_sig_re.search(filename)
         if m:
-            t = str2time(m.group("time"), short)
+            t = str2time(m.group(u"time"), short)
             if t:
-                return ParseResults("full-sig", time=t,
-                                    partial=(m.group("partial") is not None))
+                return ParseResults(u"full-sig", time=t,
+                                    partial=(m.group(u"partial") is not None))
             else:
                 return None
 
@@ -375,25 +375,25 @@
             short = False
             m = new_sig_re.search(filename)
         if m:
-            t1 = str2time(m.group("start_time"), short)
-            t2 = str2time(m.group("end_time"), short)
+            t1 = str2time(m.group(u"start_time"), short)
+            t2 = str2time(m.group(u"end_time"), short)
             if t1 and t2:
-                return ParseResults("new-sig", start_time=t1, end_time=t2,
-                                    partial=(m.group("partial") is not None))
+                return ParseResults(u"new-sig", start_time=t1, end_time=t2,
+                                    partial=(m.group(u"partial") is not None))
         return None
 
     def set_encryption_or_compression(pr):
-        """
+        u"""
         Set encryption and compression flags in ParseResults pr
         """
-        if (filename.endswith('.z') or
-                not globals.short_filenames and filename.endswith('gz')):
+        if (filename.endswith(u'.z') or
+                not globals.short_filenames and filename.endswith(u'gz')):
             pr.compressed = 1
         else:
             pr.compressed = None
 
-        if (filename.endswith('.g') or
-                not globals.short_filenames and filename.endswith('.gpg')):
+        if (filename.endswith(u'.g') or
+                not globals.short_filenames and filename.endswith(u'.gpg')):
             pr.encrypted = 1
         else:
             pr.encrypted = None
@@ -410,19 +410,19 @@
 
 
 class ParseResults:
-    """
+    u"""
     Hold information taken from a duplicity filename
     """
     def __init__(self, type, manifest=None, volume_number=None,
                  time=None, start_time=None, end_time=None,
                  encrypted=None, compressed=None, partial=False):
 
-        assert type in ["full-sig", "new-sig", "inc", "full"]
+        assert type in [u"full-sig", u"new-sig", u"inc", u"full"]
 
         self.type = type
-        if type == "inc" or type == "full":
+        if type == u"inc" or type == u"full":
             assert manifest or volume_number
-        if type == "inc" or type == "new-sig":
+        if type == u"inc" or type == u"new-sig":
             assert start_time and end_time
         else:
             assert time

=== modified file 'duplicity/filechunkio.py'
--- duplicity/filechunkio.py	2018-07-24 11:52:33 +0000
+++ duplicity/filechunkio.py	2018-09-13 20:09:49 +0000
@@ -9,25 +9,25 @@
 import os
 
 
-SEEK_SET = getattr(io, 'SEEK_SET', 0)
-SEEK_CUR = getattr(io, 'SEEK_CUR', 1)
-SEEK_END = getattr(io, 'SEEK_END', 2)
+SEEK_SET = getattr(io, u'SEEK_SET', 0)
+SEEK_CUR = getattr(io, u'SEEK_CUR', 1)
+SEEK_END = getattr(io, u'SEEK_END', 2)
 
 
 class FileChunkIO(io.FileIO):
-    """
+    u"""
     A class that allows you reading only a chunk of a file.
     """
-    def __init__(self, name, mode='r', closefd=True, offset=0, bytes=None,
+    def __init__(self, name, mode=u'r', closefd=True, offset=0, bytes=None,
                  *args, **kwargs):
-        """
+        u"""
         Open a file chunk. The mode can only be 'r' for reading. Offset
         is the amount of bytes that the chunks starts after the real file's
         first byte. Bytes defines the amount of bytes the chunk has, which you
         can set to None to include the last byte of the real file.
         """
-        if not mode.startswith('r'):
-            raise ValueError("Mode string must begin with 'r'")
+        if not mode.startswith(u'r'):
+            raise ValueError(u"Mode string must begin with 'r'")
         self.offset = offset
         self.bytes = bytes
         if bytes is None:
@@ -36,7 +36,7 @@
         self.seek(0)
 
     def seek(self, offset, whence=SEEK_SET):
-        """
+        u"""
         Move to a new chunk position.
         """
         if whence == SEEK_SET:
@@ -47,13 +47,13 @@
             self.seek(self.bytes + offset)
 
     def tell(self):
-        """
+        u"""
         Current file position.
         """
         return super(FileChunkIO, self).tell() - self.offset
 
     def read(self, n=-1):
-        """
+        u"""
         Read and return at most n bytes.
         """
         if n >= 0:
@@ -64,13 +64,13 @@
             return self.readall()
 
     def readall(self):
-        """
+        u"""
         Read all data from the chunk.
         """
         return self.read(self.bytes - self.tell())
 
     def readinto(self, b):
-        """
+        u"""
         Same as RawIOBase.readinto().
         """
         data = self.read(len(b))

=== modified file 'duplicity/globals.py'
--- duplicity/globals.py	2018-07-24 11:52:33 +0000
+++ duplicity/globals.py	2018-09-13 20:09:49 +0000
@@ -19,7 +19,7 @@
 # along with duplicity; if not, write to the Free Software Foundation,
 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 
-"""Store global configuration information"""
+u"""Store global configuration information"""
 
 import os
 import sys
@@ -27,19 +27,19 @@
 
 
 # The current version of duplicity
-version = "$version"
+version = u"$version"
 
 # Prefix for all files (appended before type-specific prefixes)
-file_prefix = ""
+file_prefix = u""
 
 # Prefix for manifest files only
-file_prefix_manifest = ""
+file_prefix_manifest = u""
 
 # Prefix for archive files only
-file_prefix_archive = ""
+file_prefix_archive = u""
 
 # Prefix for sig files only
-file_prefix_signature = ""
+file_prefix_signature = u""
 
 # The name of the current host, or None if it cannot be set
 hostname = socket.getfqdn()
@@ -58,13 +58,13 @@
 # contains the signatures and manifests of the relevent backup
 # collection), and for checkpoint state between volumes.
 # NOTE: this gets expanded in duplicity.commandline
-os.environ["XDG_CACHE_HOME"] = os.getenv("XDG_CACHE_HOME", os.path.expanduser("~/.cache"))
-archive_dir = os.path.expandvars("$XDG_CACHE_HOME/duplicity")
+os.environ[u"XDG_CACHE_HOME"] = os.getenv(u"XDG_CACHE_HOME", os.path.expanduser(u"~/.cache"))
+archive_dir = os.path.expandvars(u"$XDG_CACHE_HOME/duplicity")
 archive_dir_path = None
 
 # config dir for future use
-os.environ["XDG_CONFIG_HOME"] = os.getenv("XDG_CONFIG_HOME", os.path.expanduser("~/.config"))
-config_dir = os.path.expandvars("$XDG_CONFIG_HOME/duplicity")
+os.environ[u"XDG_CONFIG_HOME"] = os.getenv(u"XDG_CONFIG_HOME", os.path.expanduser(u"~/.config"))
+config_dir = os.path.expandvars(u"$XDG_CONFIG_HOME/duplicity")
 
 # Restores will try to bring back the state as of the following time.
 # If it is None, default to current time.
@@ -88,7 +88,7 @@
 gpg_profile = None
 
 # Options to pass to gpg
-gpg_options = ''
+gpg_options = u''
 
 # Maximum file blocksize
 max_blocksize = 2048
@@ -106,10 +106,10 @@
 # Character used like the ":" in time strings like
 # 2002-08-06T04:22:00-07:00.  The colon isn't good for filenames on
 # windows machines.
-time_separator = ":"
+time_separator = u":"
 
 # Global lockfile used to manage concurrency
-lockpath = ""
+lockpath = u""
 lockfile = None
 
 # If this is true, only warn and don't raise fatal error when backup
@@ -166,10 +166,10 @@
 timeout = 30
 
 # FTP data connection type
-ftp_connection = 'passive'
+ftp_connection = u'passive'
 
 # Protocol for webdav
-webdav_proto = 'http'
+webdav_proto = u'http'
 
 # Asynchronous put/get concurrency limit
 # (default of 0 disables asynchronicity).
@@ -221,7 +221,7 @@
 s3_use_sse = False
 
 # Which storage policy to use for Swift containers
-swift_storage_policy = ""
+swift_storage_policy = u""
 
 # The largest size upload supported in a single put call for azure
 azure_max_single_put_size = None
@@ -239,7 +239,7 @@
 
 # Name of the imap folder where we want to store backups.
 # Can be changed with a command line argument.
-imap_mailbox = "INBOX"
+imap_mailbox = u"INBOX"
 
 # Whether the old filename format is in effect.
 old_filenames = False
@@ -255,10 +255,10 @@
 ssh_askpass = False
 
 # user added ssh options
-ssh_options = ""
+ssh_options = u""
 
 # default cf backend is pyrax
-cf_backend = "pyrax"
+cf_backend = u"pyrax"
 
 # HTTPS ssl options (currently only webdav, lftp)
 ssl_cacert_file = None
@@ -266,7 +266,7 @@
 ssl_no_check_certificate = False
 
 # user added rsync options
-rsync_options = ""
+rsync_options = u""
 
 # will be a Restart object if restarting
 restart = None
@@ -308,7 +308,7 @@
 par2_redundancy = 10
 
 # Verbatim par2 other options
-par2_options = ""
+par2_options = u""
 
 # Whether to enable gio backend
 use_gio = False
@@ -324,4 +324,4 @@
 # 'utf-8' or some other sane encoding, but will sometimes fail and return
 # either 'ascii' or None.  Both are bogus, so default to 'utf-8' if it does.
 fsencoding = sys.getfilesystemencoding()
-fsencoding = fsencoding if fsencoding not in ['ascii', 'ANSI_X3.4-1968', None] else 'utf-8'
+fsencoding = fsencoding if fsencoding not in [u'ascii', u'ANSI_X3.4-1968', None] else u'utf-8'

=== modified file 'duplicity/gpg.py'
--- duplicity/gpg.py	2018-07-27 02:18:12 +0000
+++ duplicity/gpg.py	2018-09-13 20:09:49 +0000
@@ -19,7 +19,7 @@
 # along with duplicity; if not, write to the Free Software Foundation,
 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 
-"""
+u"""
 duplicity's gpg interface, builds upon Frank Tobin's GnuPGInterface
 which is now patched with some code for iterative threaded execution
 see duplicity's README for details
@@ -51,19 +51,19 @@
 
 
 class GPGError(Exception):
-    """
+    u"""
     Indicate some GPG Error
     """
     pass
 
 
 class GPGProfile:
-    """
+    u"""
     Just hold some GPG settings, avoid passing tons of arguments
     """
     def __init__(self, passphrase=None, sign_key=None,
                  recipients=None, hidden_recipients=None):
-        """
+        u"""
         Set all data with initializer
 
         passphrase is the passphrase.  If it is None (not ""), assume
@@ -104,20 +104,20 @@
                 gnupg.options.extra_args.append(opt)
 
         # get gpg version
-        res = gnupg.run(["--version"], create_fhs=["stdout"])
-        line = res.handles["stdout"].readline().rstrip()
+        res = gnupg.run([u"--version"], create_fhs=[u"stdout"])
+        line = res.handles[u"stdout"].readline().rstrip()
         m = self._version_re.search(line)
         if m is not None:
-            return (int(m.group("maj")), int(m.group("min")), int(m.group("bug")))
-        raise GPGError("failed to determine gnupg version of %s from %s" % (binary, line))
+            return (int(m.group(u"maj")), int(m.group(u"min")), int(m.group(u"bug")))
+        raise GPGError(u"failed to determine gnupg version of %s from %s" % (binary, line))
 
 
 class GPGFile:
-    """
+    u"""
     File-like object that encrypts decrypts another file on the fly
     """
     def __init__(self, encrypt, encrypt_path, profile):
-        """
+        u"""
         GPGFile initializer
 
         If recipients is set, use public key encryption and encrypt to
@@ -142,14 +142,14 @@
         if globals.gpg_binary is not None:
             gnupg.call = globals.gpg_binary
         gnupg.options.meta_interactive = 0
-        gnupg.options.extra_args.append('--no-secmem-warning')
-        gnupg.options.extra_args.append('--ignore-mdc-error')
+        gnupg.options.extra_args.append(u'--no-secmem-warning')
+        gnupg.options.extra_args.append(u'--ignore-mdc-error')
 
         # Support three versions of gpg present 1.x, 2.0.x, 2.1.x
         if profile.gpg_version[:1] == (1,):
             if globals.use_agent:
                 # gpg1 agent use is optional
-                gnupg.options.extra_args.append('--use-agent')
+                gnupg.options.extra_args.append(u'--use-agent')
 
         elif profile.gpg_version[:2] == (2, 0):
             pass
@@ -158,10 +158,10 @@
             if not globals.use_agent:
                 # This forces gpg2 to ignore the agent.
                 # Necessary to enforce truly non-interactive operation.
-                gnupg.options.extra_args.append('--pinentry-mode=loopback')
+                gnupg.options.extra_args.append(u'--pinentry-mode=loopback')
 
         else:
-            raise GPGError("Unsupported GNUPG version, %s" % profile.gpg_version)
+            raise GPGError(u"Unsupported GNUPG version, %s" % profile.gpg_version)
 
         # user supplied options
         if globals.gpg_options:
@@ -171,7 +171,7 @@
         cmdlist = []
         if profile.sign_key:
             gnupg.options.default_key = profile.sign_key
-            cmdlist.append("--sign")
+            cmdlist.append(u"--sign")
         # encrypt: sign key needs passphrase
         # decrypt: encrypt key needs passphrase
         # special case: allow different symmetric pass with empty sign pass
@@ -182,51 +182,51 @@
         # in case the passphrase is not set, pass an empty one to prevent
         # TypeError: expected a character buffer object on .write()
         if passphrase is None:
-            passphrase = ""
+            passphrase = u""
 
         if encrypt:
             if profile.recipients:
                 gnupg.options.recipients = profile.recipients
-                cmdlist.append('--encrypt')
+                cmdlist.append(u'--encrypt')
             if profile.hidden_recipients:
                 gnupg.options.hidden_recipients = profile.hidden_recipients
-                cmdlist.append('--encrypt')
+                cmdlist.append(u'--encrypt')
             if not (profile.recipients or profile.hidden_recipients):
-                cmdlist.append('--symmetric')
+                cmdlist.append(u'--symmetric')
                 # use integrity protection
-                gnupg.options.extra_args.append('--force-mdc')
+                gnupg.options.extra_args.append(u'--force-mdc')
             # Skip the passphrase if using the agent
             if globals.use_agent:
-                gnupg_fhs = ['stdin', ]
+                gnupg_fhs = [u'stdin', ]
             else:
-                gnupg_fhs = ['stdin', 'passphrase']
+                gnupg_fhs = [u'stdin', u'passphrase']
             p1 = gnupg.run(cmdlist, create_fhs=gnupg_fhs,
-                           attach_fhs={'stdout': encrypt_path.open("wb"),
-                                       'stderr': self.stderr_fp,
-                                       'logger': self.logger_fp})
+                           attach_fhs={u'stdout': encrypt_path.open(u"wb"),
+                                       u'stderr': self.stderr_fp,
+                                       u'logger': self.logger_fp})
             if not globals.use_agent:
-                p1.handles['passphrase'].write(passphrase)
-                p1.handles['passphrase'].close()
-            self.gpg_input = p1.handles['stdin']
+                p1.handles[u'passphrase'].write(passphrase)
+                p1.handles[u'passphrase'].close()
+            self.gpg_input = p1.handles[u'stdin']
         else:
             if (profile.recipients or profile.hidden_recipients) and profile.encrypt_secring:
-                cmdlist.append('--secret-keyring')
+                cmdlist.append(u'--secret-keyring')
                 cmdlist.append(profile.encrypt_secring)
             self.status_fp = tempfile.TemporaryFile(dir=tempdir.default().dir())
             # Skip the passphrase if using the agent
             if globals.use_agent:
-                gnupg_fhs = ['stdout', ]
+                gnupg_fhs = [u'stdout', ]
             else:
-                gnupg_fhs = ['stdout', 'passphrase']
-            p1 = gnupg.run(['--decrypt'], create_fhs=gnupg_fhs,
-                           attach_fhs={'stdin': encrypt_path.open("rb"),
-                                       'status': self.status_fp,
-                                       'stderr': self.stderr_fp,
-                                       'logger': self.logger_fp})
+                gnupg_fhs = [u'stdout', u'passphrase']
+            p1 = gnupg.run([u'--decrypt'], create_fhs=gnupg_fhs,
+                           attach_fhs={u'stdin': encrypt_path.open(u"rb"),
+                                       u'status': self.status_fp,
+                                       u'stderr': self.stderr_fp,
+                                       u'logger': self.logger_fp})
             if not(globals.use_agent):
-                p1.handles['passphrase'].write(passphrase)
-                p1.handles['passphrase'].close()
-            self.gpg_output = p1.handles['stdout']
+                p1.handles[u'passphrase'].write(passphrase)
+                p1.handles[u'passphrase'].close()
+            self.gpg_output = p1.handles[u'stdout']
         self.gpg_process = p1
         self.encrypt = encrypt
 
@@ -253,7 +253,7 @@
 
     def seek(self, offset):
         assert not self.encrypt
-        assert offset >= self.byte_count, "%d < %d" % (offset, self.byte_count)
+        assert offset >= self.byte_count, u"%d < %d" % (offset, self.byte_count)
         if offset > self.byte_count:
             self.read(offset - self.byte_count)
 
@@ -264,14 +264,14 @@
             fp.seek(0)
             for line in fp:
                 try:
-                    msg += unicode(line.strip(), locale.getpreferredencoding(), 'replace') + u"\n"
+                    msg += unicode(line.strip(), locale.getpreferredencoding(), u'replace') + u"\n"
                 except Exception as e:
                     msg += line.strip() + u"\n"
         msg += u"===== End GnuPG log =====\n"
         if not (msg.find(u"invalid packet (ctb=14)") > -1):
             raise GPGError(msg)
         else:
-            return ""
+            return u""
 
     def close(self):
         if self.encrypt:
@@ -308,7 +308,7 @@
         self.closed = 1
 
     def set_signature(self):
-        """
+        u"""
         Set self.signature to signature keyID
 
         This only applies to decrypted files.  If the file was not
@@ -316,7 +316,7 @@
         """
         self.status_fp.seek(0)
         status_buf = self.status_fp.read()
-        match = re.search("^\\[GNUPG:\\] GOODSIG ([0-9A-F]*)",
+        match = re.search(u"^\\[GNUPG:\\] GOODSIG ([0-9A-F]*)",
                           status_buf, re.M)
         if not match:
             self.signature = None
@@ -325,7 +325,7 @@
             self.signature = match.group(1)
 
     def get_signature(self):
-        """
+        u"""
         Return  keyID of signature, or None if none
         """
         assert self.closed
@@ -335,7 +335,7 @@
 def GPGWriteFile(block_iter, filename, profile,
                  size=200 * 1024 * 1024,
                  max_footer_size=16 * 1024):
-    """
+    u"""
     Write GPG compressed file of given size
 
     This function writes a gpg compressed file by reading from the
@@ -359,14 +359,14 @@
     from duplicity import path
 
     def top_off(bytes, file):
-        """
+        u"""
         Add bytes of incompressible data to to_gpg_fp
 
         In this case we take the incompressible data from the
         beginning of filename (it should contain enough because size
         >> largest block size).
         """
-        incompressible_fp = open(filename, "rb")
+        incompressible_fp = open(filename, u"rb")
         assert util.copyfileobj(incompressible_fp, file.gpg_input, bytes) == bytes
         incompressible_fp.close()
 
@@ -404,7 +404,7 @@
 
 
 def GzipWriteFile(block_iter, filename, size=200 * 1024 * 1024, gzipped=True):
-    """
+    u"""
     Write gzipped compressed file of given size
 
     This is like the earlier GPGWriteFile except it writes a gzipped
@@ -416,7 +416,7 @@
     GPGWriteFile (returns true if wrote until end of block_iter).
     """
     class FileCounted:
-        """
+        u"""
         Wrapper around file object that counts number of bytes written
         """
         def __init__(self, fileobj):
@@ -431,11 +431,11 @@
         def close(self):
             return self.fileobj.close()
 
-    file_counted = FileCounted(open(filename, "wb"))
+    file_counted = FileCounted(open(filename, u"wb"))
 
     # if gzipped wrap with GzipFile else plain file out
     if gzipped:
-        outfile = gzip.GzipFile(None, "wb", 6, file_counted)
+        outfile = gzip.GzipFile(None, u"wb", 6, file_counted)
     else:
         outfile = file_counted
     at_end_of_blockiter = 0
@@ -455,7 +455,7 @@
 
 
 def PlainWriteFile(block_iter, filename, size=200 * 1024 * 1024, gzipped=False):
-    """
+    u"""
     Write plain uncompressed file of given size
 
     This is like the earlier GPGWriteFile except it writes a gzipped
@@ -470,20 +470,20 @@
 
 
 def get_hash(hash, path, hex=1):
-    """
+    u"""
     Return hash of path
 
     hash should be "MD5" or "SHA1".  The output will be in hexadecimal
     form if hex is true, and in text (base64) otherwise.
     """
     # assert path.isreg()
-    fp = path.open("rb")
-    if hash == "SHA1":
+    fp = path.open(u"rb")
+    if hash == u"SHA1":
         hash_obj = sha1()
-    elif hash == "MD5":
+    elif hash == u"MD5":
         hash_obj = md5()
     else:
-        assert 0, "Unknown hash %s" % (hash,)
+        assert 0, u"Unknown hash %s" % (hash,)
 
     while 1:
         buf = fp.read(blocksize)

=== modified file 'duplicity/gpginterface.py'
--- duplicity/gpginterface.py	2018-07-24 11:52:33 +0000
+++ duplicity/gpginterface.py	2018-09-13 20:09:49 +0000
@@ -1,4 +1,4 @@
-"""Interface to GNU Privacy Guard (GnuPG)
+u"""Interface to GNU Privacy Guard (GnuPG)
 
 !!! This was renamed to gpginterface.py.
     Please refer to duplicity's README for the reason. !!!
@@ -234,35 +234,35 @@
     import threading
 except ImportError:
     import dummy_threading  # @UnusedImport
-    log.Warn(_("Threading not available -- zombie processes may appear"))
+    log.Warn(_(u"Threading not available -- zombie processes may appear"))
 
-__author__ = "Frank J. Tobin, ftobin@xxxxxxxxxxxxxxx"
-__version__ = "0.3.2"
-__revision__ = "$Id: GnuPGInterface.py,v 1.6 2009/06/06 17:35:19 loafman Exp $"
+__author__ = u"Frank J. Tobin, ftobin@xxxxxxxxxxxxxxx"
+__version__ = u"0.3.2"
+__revision__ = u"$Id: GnuPGInterface.py,v 1.6 2009/06/06 17:35:19 loafman Exp $"
 
 # "standard" filehandles attached to processes
-_stds = ['stdin', 'stdout', 'stderr']
+_stds = [u'stdin', u'stdout', u'stderr']
 
 # the permissions each type of fh needs to be opened with
-_fd_modes = {'stdin': 'w',
-             'stdout': 'r',
-             'stderr': 'r',
-             'passphrase': 'w',
-             'command': 'w',
-             'logger': 'r',
-             'status': 'r'
+_fd_modes = {u'stdin': u'w',
+             u'stdout': u'r',
+             u'stderr': u'r',
+             u'passphrase': u'w',
+             u'command': u'w',
+             u'logger': u'r',
+             u'status': u'r'
              }
 
 # correlation between handle names and the arguments we'll pass
-_fd_options = {'passphrase': '--passphrase-fd',
-               'logger': '--logger-fd',
-               'status': '--status-fd',
-               'command': '--command-fd'
+_fd_options = {u'passphrase': u'--passphrase-fd',
+               u'logger': u'--logger-fd',
+               u'status': u'--status-fd',
+               u'command': u'--command-fd'
                }
 
 
 class GnuPG:
-    """Class instances represent GnuPG.
+    u"""Class instances represent GnuPG.
 
     Instance attributes of a GnuPG object are:
 
@@ -283,12 +283,12 @@
     """
 
     def __init__(self):
-        self.call = 'gpg'
+        self.call = u'gpg'
         self.passphrase = None
         self.options = Options()
 
     def run(self, gnupg_commands, args=None, create_fhs=None, attach_fhs=None):
-        """Calls GnuPG with the list of string commands gnupg_commands,
+        u"""Calls GnuPG with the list of string commands gnupg_commands,
         complete with prefixing dashes.
         For example, gnupg_commands could be
         '["--sign", "--encrypt"]'
@@ -365,38 +365,38 @@
         handle_passphrase = 0
 
         if self.passphrase is not None \
-           and 'passphrase' not in attach_fhs \
-           and 'passphrase' not in create_fhs:
+           and u'passphrase' not in attach_fhs \
+           and u'passphrase' not in create_fhs:
             handle_passphrase = 1
-            create_fhs.append('passphrase')
+            create_fhs.append(u'passphrase')
 
         process = self._attach_fork_exec(gnupg_commands, args,
                                          create_fhs, attach_fhs)
 
         if handle_passphrase:
-            passphrase_fh = process.handles['passphrase']
+            passphrase_fh = process.handles[u'passphrase']
             passphrase_fh.write(self.passphrase)
             passphrase_fh.close()
-            del process.handles['passphrase']
+            del process.handles[u'passphrase']
 
         return process
 
     def _attach_fork_exec(self, gnupg_commands, args, create_fhs, attach_fhs):
-        """This is like run(), but without the passphrase-helping
+        u"""This is like run(), but without the passphrase-helping
         (note that run() calls this)."""
 
         process = Process()
 
         for fh_name in create_fhs + attach_fhs.keys():
             if fh_name not in _fd_modes:
-                raise KeyError("unrecognized filehandle name '%s'; must be one of %s"
+                raise KeyError(u"unrecognized filehandle name '%s'; must be one of %s"
                                % (fh_name, _fd_modes.keys()))
 
         for fh_name in create_fhs:
             # make sure the user doesn't specify a filehandle
             # to be created *and* attached
             if fh_name in attach_fhs:
-                raise ValueError("cannot have filehandle '%s' in both create_fhs and attach_fhs"
+                raise ValueError(u"cannot have filehandle '%s' in both create_fhs and attach_fhs"
                                  % fh_name)
 
             pipe = os.pipe()
@@ -404,7 +404,7 @@
             # that since pipes are unidirectional on some systems,
             # so we have to 'turn the pipe around'
             # if we are writing
-            if _fd_modes[fh_name] == 'w':
+            if _fd_modes[fh_name] == u'w':
                 pipe = (pipe[1], pipe[0])
             process._pipes[fh_name] = Pipe(pipe[0], pipe[1], 0)
 
@@ -415,7 +415,7 @@
         if process.pid != 0:
             # start a threaded_waitpid on the child
             process.thread = threading.Thread(target=threaded_waitpid,
-                                              name="wait%d" % process.pid,
+                                              name=u"wait%d" % process.pid,
                                               args=(process,))
             process.thread.start()
 
@@ -424,7 +424,7 @@
         return self._as_parent(process)
 
     def _as_parent(self, process):
-        """Stuff run after forking in parent"""
+        u"""Stuff run after forking in parent"""
         for k, p in process._pipes.items():
             if not p.direct:
                 os.close(p.child)
@@ -436,11 +436,11 @@
         return process
 
     def _as_child(self, process, gnupg_commands, args):
-        """Stuff run after forking in child"""
+        u"""Stuff run after forking in child"""
         # child
         for std in _stds:
             p = process._pipes[std]
-            os.dup2(p.child, getattr(sys, "__%s__" % std).fileno())
+            os.dup2(p.child, getattr(sys, u"__%s__" % std).fileno())
 
         for k, p in process._pipes.items():
             if p.direct and k not in _stds:
@@ -452,7 +452,7 @@
         for k, p in process._pipes.items():
             # set command-line options for non-standard fds
             if k not in _stds:
-                fd_args.extend([_fd_options[k], "%d" % p.child])
+                fd_args.extend([_fd_options[k], u"%d" % p.child])
 
             if not p.direct:
                 os.close(p.parent)
@@ -463,7 +463,7 @@
 
 
 class Pipe:
-    """simple struct holding stuff about pipes we use"""
+    u"""simple struct holding stuff about pipes we use"""
     def __init__(self, parent, child, direct):
         self.parent = parent
         self.child = child
@@ -471,7 +471,7 @@
 
 
 class Options:
-    """Objects of this class encompass options passed to GnuPG.
+    u"""Objects of this class encompass options passed to GnuPG.
     This class is responsible for determining command-line arguments
     which are based on options.  It can be said that a GnuPG
     object has-a Options object in its options attribute.
@@ -578,75 +578,75 @@
         self.extra_args = []
 
     def get_args(self):
-        """Generate a list of GnuPG arguments based upon attributes."""
+        u"""Generate a list of GnuPG arguments based upon attributes."""
 
         return self.get_meta_args() + self.get_standard_args() + self.extra_args
 
     def get_standard_args(self):
-        """Generate a list of standard, non-meta or extra arguments"""
+        u"""Generate a list of standard, non-meta or extra arguments"""
         args = []
         if self.homedir is not None:
-            args.extend(['--homedir', self.homedir])
+            args.extend([u'--homedir', self.homedir])
         if self.options is not None:
-            args.extend(['--options', self.options])
+            args.extend([u'--options', self.options])
         if self.comment is not None:
-            args.extend(['--comment', self.comment])
+            args.extend([u'--comment', self.comment])
         if self.compress_algo is not None:
-            args.extend(['--compress-algo', self.compress_algo])
+            args.extend([u'--compress-algo', self.compress_algo])
         if self.default_key is not None:
-            args.extend(['--default-key', self.default_key])
+            args.extend([u'--default-key', self.default_key])
 
         if self.no_options:
-            args.append('--no-options')
+            args.append(u'--no-options')
         if self.armor:
-            args.append('--armor')
+            args.append(u'--armor')
         if self.textmode:
-            args.append('--textmode')
+            args.append(u'--textmode')
         if self.no_greeting:
-            args.append('--no-greeting')
+            args.append(u'--no-greeting')
         if self.verbose:
-            args.append('--verbose')
+            args.append(u'--verbose')
         if self.no_verbose:
-            args.append('--no-verbose')
+            args.append(u'--no-verbose')
         if self.quiet:
-            args.append('--quiet')
+            args.append(u'--quiet')
         if self.batch:
-            args.append('--batch')
+            args.append(u'--batch')
         if self.always_trust:
-            args.append('--always-trust')
+            args.append(u'--always-trust')
         if self.force_v3_sigs:
-            args.append('--force-v3-sigs')
+            args.append(u'--force-v3-sigs')
         if self.rfc1991:
-            args.append('--rfc1991')
+            args.append(u'--rfc1991')
         if self.openpgp:
-            args.append('--openpgp')
+            args.append(u'--openpgp')
 
         for r in self.recipients:
-            args.extend(['--recipient', r])
+            args.extend([u'--recipient', r])
         for r in self.hidden_recipients:
-            args.extend(['--hidden-recipient', r])
+            args.extend([u'--hidden-recipient', r])
         for r in self.encrypt_to:
-            args.extend(['--encrypt-to', r])
+            args.extend([u'--encrypt-to', r])
 
         return args
 
     def get_meta_args(self):
-        """Get a list of generated meta-arguments"""
+        u"""Get a list of generated meta-arguments"""
         args = []
 
         if self.meta_pgp_5_compatible:
-            args.extend(['--compress-algo', '1',
-                         '--force-v3-sigs'])
+            args.extend([u'--compress-algo', u'1',
+                         u'--force-v3-sigs'])
         if self.meta_pgp_2_compatible:
-            args.append('--rfc1991')
+            args.append(u'--rfc1991')
         if not self.meta_interactive:
-            args.extend(['--batch', '--no-tty'])
+            args.extend([u'--batch', u'--no-tty'])
 
         return args
 
 
 class Process:
-    """Objects of this class encompass properties of a GnuPG
+    u"""Objects of this class encompass properties of a GnuPG
     process spawned by GnuPG.run().
 
     # gnupg is a GnuPG object
@@ -677,18 +677,18 @@
         self.returned = None
 
     def wait(self):
-        """
+        u"""
         Wait on threaded_waitpid to exit and examine results.
         Will raise an IOError if the process exits non-zero.
         """
         if self.returned is None:
             self.thread.join()
         if self.returned != 0:
-            raise IOError("GnuPG exited non-zero, with code %d" % (self.returned >> 8))
+            raise IOError(u"GnuPG exited non-zero, with code %d" % (self.returned >> 8))
 
 
 def threaded_waitpid(process):
-    """
+    u"""
     When started as a thread with the Process object, thread
     will execute an immediate waitpid() against the process
     pid and will collect the process termination info.  This
@@ -698,7 +698,7 @@
     try:
         process.returned = os.waitpid(process.pid, 0)[1]
     except:
-        log.Debug(_("GPG process %d terminated before wait()") % process.pid)
+        log.Debug(_(u"GPG process %d terminated before wait()") % process.pid)
         process.returned = 0
 
 
@@ -711,5 +711,5 @@
 # deprecated
 GnuPGInterface = GnuPG
 
-if __name__ == '__main__':
+if __name__ == u'__main__':
     _run_doctests()

=== modified file 'duplicity/lazy.py'
--- duplicity/lazy.py	2018-07-24 11:52:33 +0000
+++ duplicity/lazy.py	2018-09-13 20:09:49 +0000
@@ -19,7 +19,7 @@
 # along with duplicity; if not, write to the Free Software Foundation,
 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 
-"""Define some lazy data structures and functions acting on them"""
+u"""Define some lazy data structures and functions acting on them"""
 from __future__ import print_function
 
 import os
@@ -29,51 +29,51 @@
 
 
 class Iter:
-    """Hold static methods for the manipulation of lazy iterators"""
+    u"""Hold static methods for the manipulation of lazy iterators"""
 
     @staticmethod
     def filter(predicate, iterator):  # @NoSelf
-        """Like filter in a lazy functional programming language"""
+        u"""Like filter in a lazy functional programming language"""
         for i in iterator:
             if predicate(i):
                 yield i
 
     @staticmethod
     def map(function, iterator):  # @NoSelf
-        """Like map in a lazy functional programming language"""
+        u"""Like map in a lazy functional programming language"""
         for i in iterator:
             yield function(i)
 
     @staticmethod
     def foreach(function, iterator):  # @NoSelf
-        """Run function on each element in iterator"""
+        u"""Run function on each element in iterator"""
         for i in iterator:
             function(i)
 
     @staticmethod
     def cat(*iters):  # @NoSelf
-        """Lazily concatenate iterators"""
+        u"""Lazily concatenate iterators"""
         for iter in iters:
             for i in iter:
                 yield i
 
     @staticmethod
     def cat2(iter_of_iters):  # @NoSelf
-        """Lazily concatenate iterators, iterated by big iterator"""
+        u"""Lazily concatenate iterators, iterated by big iterator"""
         for iter in iter_of_iters:
             for i in iter:
                 yield i
 
     @staticmethod
     def empty(iter):  # @NoSelf
-        """True if iterator has length 0"""
+        u"""True if iterator has length 0"""
         for i in iter:  # @UnusedVariable
             return None
         return 1
 
     @staticmethod
     def equal(iter1, iter2, verbose=None, operator=lambda x, y: x == y):  # @NoSelf
-        """True if iterator 1 has same elements as iterator 2
+        u"""True if iterator 1 has same elements as iterator 2
 
         Use equality operator, or == if it is unspecified.
 
@@ -83,23 +83,23 @@
                 i2 = next(iter2)
             except StopIteration:
                 if verbose:
-                    print("End when i1 = %s" % (i1,))
+                    print(u"End when i1 = %s" % (i1,))
                 return None
             if not operator(i1, i2):
                 if verbose:
-                    print("%s not equal to %s" % (i1, i2))
+                    print(u"%s not equal to %s" % (i1, i2))
                 return None
         try:
             i2 = next(iter2)
         except StopIteration:
             return 1
         if verbose:
-            print("End when i2 = %s" % (i2,))
+            print(u"End when i2 = %s" % (i2,))
         return None
 
     @staticmethod
     def Or(iter):  # @NoSelf
-        """True if any element in iterator is true.  Short circuiting"""
+        u"""True if any element in iterator is true.  Short circuiting"""
         i = None
         for i in iter:
             if i:
@@ -108,7 +108,7 @@
 
     @staticmethod
     def And(iter):  # @NoSelf
-        """True if all elements in iterator are true.  Short circuiting"""
+        u"""True if all elements in iterator are true.  Short circuiting"""
         i = 1
         for i in iter:
             if not i:
@@ -117,7 +117,7 @@
 
     @staticmethod
     def len(iter):  # @NoSelf
-        """Return length of iterator"""
+        u"""Return length of iterator"""
         i = 0
         while 1:
             try:
@@ -128,7 +128,7 @@
 
     @staticmethod
     def foldr(f, default, iter):  # @NoSelf
-        """foldr the "fundamental list recursion operator"?"""
+        u"""foldr the "fundamental list recursion operator"?"""
         try:
             next_item = next(iter)
         except StopIteration:
@@ -137,7 +137,7 @@
 
     @staticmethod
     def foldl(f, default, iter):  # @NoSelf
-        """the fundamental list iteration operator.."""
+        u"""the fundamental list iteration operator.."""
         while 1:
             try:
                 next_item = next(iter)
@@ -147,7 +147,7 @@
 
     @staticmethod
     def multiplex(iter, num_of_forks, final_func=None, closing_func=None):  # @NoSelf
-        """Split a single iterater into a number of streams
+        u"""Split a single iterater into a number of streams
 
         The return val will be a list with length num_of_forks, each
         of which will be an iterator like iter.  final_func is the
@@ -175,7 +175,7 @@
         called_closing_func = [None]
 
         def get_next(fork_num):
-            """Return the next element requested by fork_num"""
+            u"""Return the next element requested by fork_num"""
             if forkposition[fork_num] == -1:
                 try:
                     buffer.insert(0, next(iter))
@@ -208,7 +208,7 @@
 
 
 class IterMultiplex2:
-    """Multiplex an iterator into 2 parts
+    u"""Multiplex an iterator into 2 parts
 
     This is a special optimized case of the Iter.multiplex function,
     used when there is no closing_func or final_func, and we only want
@@ -221,7 +221,7 @@
         self.iter = iter
 
     def yielda(self):
-        """Return first iterator"""
+        u"""Return first iterator"""
         buf, iter = self.buffer, self.iter
         while(1):
             if self.a_leading_by >= 0:
@@ -235,7 +235,7 @@
             yield elem
 
     def yieldb(self):
-        """Return second iterator"""
+        u"""Return second iterator"""
         buf, iter = self.buffer, self.iter
         while(1):
             if self.a_leading_by <= 0:
@@ -250,7 +250,7 @@
 
 
 class IterTreeReducer:
-    """Tree style reducer object for iterator - stolen from rdiff-backup
+    u"""Tree style reducer object for iterator - stolen from rdiff-backup
 
     The indicies of a RORPIter form a tree type structure.  This class
     can be used on each element of an iter in sequence and the result
@@ -264,7 +264,7 @@
 
     """
     def __init__(self, branch_class, branch_args):
-        """ITR initializer"""
+        u"""ITR initializer"""
         self.branch_class = branch_class
         self.branch_args = branch_args
         self.index = None
@@ -272,7 +272,7 @@
         self.branches = [self.root_branch]
 
     def finish_branches(self, index):
-        """Run Finish() on all branches index has passed
+        u"""Run Finish() on all branches index has passed
 
         When we pass out of a branch, delete it and process it with
         the parent.  The innermost branches will be the last in the
@@ -295,13 +295,13 @@
                 return 1
 
     def add_branch(self):
-        """Return branch of type self.branch_class, add to branch list"""
+        u"""Return branch of type self.branch_class, add to branch list"""
         branch = self.branch_class(*self.branch_args)
         self.branches.append(branch)
         return branch
 
     def process_w_branch(self, index, branch, args):
-        """Run start_process on latest branch"""
+        u"""Run start_process on latest branch"""
         robust.check_common_error(branch.on_error,
                                   branch.start_process, args)
         if not branch.caught_exception:
@@ -309,7 +309,7 @@
         branch.base_index = index
 
     def Finish(self):
-        """Call at end of sequence to tie everything up"""
+        u"""Call at end of sequence to tie everything up"""
         while 1:
             to_be_finished = self.branches.pop()
             to_be_finished.call_end_proc()
@@ -318,7 +318,7 @@
             self.branches[-1].branch_process(to_be_finished)
 
     def __call__(self, *args):
-        """Process args, where args[0] is current position in iterator
+        u"""Process args, where args[0] is current position in iterator
 
         Returns true if args successfully processed, false if index is
         not in the current tree and thus the final result is
@@ -335,7 +335,7 @@
             return 1
 
         if index <= self.index:
-            log.Warn(_("Warning: oldindex %s >= newindex %s") %
+            log.Warn(_(u"Warning: oldindex %s >= newindex %s") %
                      (util.uindex(self.index), util.uindex(index)))
             return 1
 
@@ -357,7 +357,7 @@
 
 
 class ITRBranch:
-    """Helper class for IterTreeReducer above
+    u"""Helper class for IterTreeReducer above
 
     There are five stub functions below: start_process, end_process,
     branch_process, fast_process, and can_fast_process.  A class that
@@ -370,7 +370,7 @@
     caught_exception = start_successful = None
 
     def call_end_proc(self):
-        """Runs the end_process on self, checking for errors"""
+        u"""Runs the end_process on self, checking for errors"""
         if self.finished or not self.start_successful:
             self.caught_exception = 1
 
@@ -381,45 +381,45 @@
         self.finished = 1
 
     def start_process(self, *args):
-        """Do some initial processing (stub)"""
+        u"""Do some initial processing (stub)"""
         pass
 
     def end_process(self):
-        """Do any final processing before leaving branch (stub)"""
+        u"""Do any final processing before leaving branch (stub)"""
         pass
 
     def branch_process(self, branch):
-        """Process a branch right after it is finished (stub)"""
+        u"""Process a branch right after it is finished (stub)"""
         assert branch.finished
         pass
 
     def can_fast_process(self, *args):
-        """True if object can be processed without new branch (stub)"""
+        u"""True if object can be processed without new branch (stub)"""
         return None
 
     def fast_process(self, *args):
-        """Process args without new child branch (stub)"""
+        u"""Process args without new child branch (stub)"""
         pass
 
     def on_error(self, exc, *args):
-        """This is run on any exception in start/end-process"""
+        u"""This is run on any exception in start/end-process"""
         self.caught_exception = 1
         if args and args[0] and isinstance(args[0], tuple):
             filename = os.path.join(*args[0])
         elif self.index:
             filename = os.path.join(*self.index)  # pylint: disable=not-an-iterable
         else:
-            filename = "."
-        log.Warn(_("Error '%s' processing %s") % (exc, util.fsdecode(filename)),
+            filename = u"."
+        log.Warn(_(u"Error '%s' processing %s") % (exc, util.fsdecode(filename)),
                  log.WarningCode.cannot_process,
                  util.escape(filename))
 
     def log_prev_error(self, index):
-        """Call function if no pending exception"""
+        u"""Call function if no pending exception"""
         if not index:
-            index_str = "."
+            index_str = u"."
         else:
             index_str = os.path.join(*index)
-        log.Warn(_("Skipping %s because of previous error") % util.fsdecode(index_str),
+        log.Warn(_(u"Skipping %s because of previous error") % util.fsdecode(index_str),
                  log.WarningCode.process_skipped,
                  util.escape(index_str))

=== modified file 'duplicity/librsync.py'
--- duplicity/librsync.py	2018-07-24 11:52:33 +0000
+++ duplicity/librsync.py	2018-09-13 20:09:49 +0000
@@ -19,7 +19,7 @@
 # along with duplicity; if not, write to the Free Software Foundation,
 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 
-"""Provides a high-level interface to some librsync functions
+u"""Provides a high-level interface to some librsync functions
 
 This is a python wrapper around the lower-level _librsync module,
 which is written in C.  The goal was to use C as little as possible...
@@ -31,7 +31,7 @@
 import types
 import array
 
-if os.environ.get('READTHEDOCS') == 'True':
+if os.environ.get(u'READTHEDOCS') == u'True':
     import mock
     import duplicity
     duplicity._librsync = mock.MagicMock()
@@ -40,7 +40,7 @@
 
 
 class librsyncError(Exception):
-    """Signifies error in internal librsync processing (bad signature, etc.)
+    u"""Signifies error in internal librsync processing (bad signature, etc.)
 
     underlying _librsync.librsyncError's are regenerated using this
     class because the C-created exceptions are by default
@@ -52,33 +52,33 @@
 
 
 class LikeFile:
-    """File-like object used by SigFile, DeltaFile, and PatchFile"""
-    mode = "rb"
+    u"""File-like object used by SigFile, DeltaFile, and PatchFile"""
+    mode = u"rb"
 
     # This will be replaced in subclasses by an object with
     # appropriate cycle() method
     maker = None
 
     def __init__(self, infile, need_seek=None):
-        """LikeFile initializer - zero buffers, set eofs off"""
+        u"""LikeFile initializer - zero buffers, set eofs off"""
         self.check_file(infile, need_seek)
         self.infile = infile
         self.closed = self.infile_closed = None
-        self.inbuf = ""
-        self.outbuf = array.array('c')
+        self.inbuf = b""
+        self.outbuf = array.array(b'c')
         self.eof = self.infile_eof = None
 
     def check_file(self, file, need_seek=None):
-        """Raise type error if file doesn't have necessary attributes"""
-        if not hasattr(file, "read"):
-            raise TypeError("Basis file must have a read() method")
-        if not hasattr(file, "close"):
-            raise TypeError("Basis file must have a close() method")
-        if need_seek and not hasattr(file, "seek"):
-            raise TypeError("Basis file must have a seek() method")
+        u"""Raise type error if file doesn't have necessary attributes"""
+        if not hasattr(file, u"read"):
+            raise TypeError(u"Basis file must have a read() method")
+        if not hasattr(file, u"close"):
+            raise TypeError(u"Basis file must have a close() method")
+        if need_seek and not hasattr(file, u"seek"):
+            raise TypeError(u"Basis file must have a seek() method")
 
     def read(self, length=-1):
-        """Build up self.outbuf, return first length bytes"""
+        u"""Build up self.outbuf, return first length bytes"""
         if length == -1:
             while not self.eof:
                 self._add_to_outbuf_once()
@@ -93,7 +93,7 @@
         return return_val
 
     def _add_to_outbuf_once(self):
-        """Add one cycle's worth of output to self.outbuf"""
+        u"""Add one cycle's worth of output to self.outbuf"""
         if not self.infile_eof:
             self._add_to_inbuf()
         try:
@@ -104,7 +104,7 @@
         self.outbuf.fromstring(cycle_out)
 
     def _add_to_inbuf(self):
-        """Make sure len(self.inbuf) >= blocksize"""
+        u"""Make sure len(self.inbuf) >= blocksize"""
         assert not self.infile_eof
         while len(self.inbuf) < blocksize:
             new_in = self.infile.read(blocksize)
@@ -116,16 +116,16 @@
             self.inbuf += new_in
 
     def close(self):
-        """Close infile"""
+        u"""Close infile"""
         if not self.infile_closed:
             assert not self.infile.close()
         self.closed = 1
 
 
 class SigFile(LikeFile):
-    """File-like object which incrementally generates a librsync signature"""
+    u"""File-like object which incrementally generates a librsync signature"""
     def __init__(self, infile, blocksize=_librsync.RS_DEFAULT_BLOCK_LEN):
-        """SigFile initializer - takes basis file
+        u"""SigFile initializer - takes basis file
 
         basis file only needs to have read() and close() methods.  It
         will be closed when we come to the end of the signature.
@@ -139,9 +139,9 @@
 
 
 class DeltaFile(LikeFile):
-    """File-like object which incrementally generates a librsync delta"""
+    u"""File-like object which incrementally generates a librsync delta"""
     def __init__(self, signature, new_file):
-        """DeltaFile initializer - call with signature and new file
+        u"""DeltaFile initializer - call with signature and new file
 
         Signature can either be a string or a file with read() and
         close() methods.  New_file also only needs to have read() and
@@ -162,9 +162,9 @@
 
 
 class PatchedFile(LikeFile):
-    """File-like object which applies a librsync delta incrementally"""
+    u"""File-like object which applies a librsync delta incrementally"""
     def __init__(self, basis_file, delta_file):
-        """PatchedFile initializer - call with basis delta
+        u"""PatchedFile initializer - call with basis delta
 
         Here basis_file must be a true Python file, because we may
         need to seek() around in it a lot, and this is done in C.
@@ -173,16 +173,16 @@
         """
         LikeFile.__init__(self, delta_file)
         if not isinstance(basis_file, types.FileType):
-            """ tempfile.TemporaryFile() only guarantees a true file
+            u""" tempfile.TemporaryFile() only guarantees a true file
             object on posix platforms. on cygwin/windows a file-like
             object whose file attribute is the underlying true file
             object is returned.
             """
-            if hasattr(basis_file, 'file') and isinstance(basis_file.file, types.FileType):
+            if hasattr(basis_file, u'file') and isinstance(basis_file.file, types.FileType):
                 basis_file = basis_file.file
             else:
-                raise TypeError(_("basis_file must be a (true) file or an object whose "
-                                  "file attribute is the underlying true file object"))
+                raise TypeError(_(u"basis_file must be a (true) file or an object whose "
+                                  u"file attribute is the underlying true file object"))
         try:
             self.maker = _librsync.new_patchmaker(basis_file)
         except _librsync.librsyncError as e:
@@ -190,33 +190,33 @@
 
 
 class SigGenerator:
-    """Calculate signature.
+    u"""Calculate signature.
 
     Input and output is same as SigFile, but the interface is like md5
     module, not filelike object
 
     """
     def __init__(self, blocksize=_librsync.RS_DEFAULT_BLOCK_LEN):
-        """Return new signature instance"""
+        u"""Return new signature instance"""
         try:
             self.sig_maker = _librsync.new_sigmaker(blocksize)
         except _librsync.librsyncError as e:
             raise librsyncError(str(e))
         self.gotsig = None
-        self.buffer = ""
+        self.buffer = b""
         self.sigstring_list = []
 
     def update(self, buf):
-        """Add buf to data that signature will be calculated over"""
+        u"""Add buf to data that signature will be calculated over"""
         if self.gotsig:
-            raise librsyncError("SigGenerator already provided signature")
+            raise librsyncError(u"SigGenerator already provided signature")
         self.buffer += buf
         while len(self.buffer) >= blocksize:
             if self.process_buffer():
-                raise librsyncError("Premature EOF received from sig_maker")
+                raise librsyncError(u"Premature EOF received from sig_maker")
 
     def process_buffer(self):
-        """Run self.buffer through sig_maker, add to self.sig_string"""
+        u"""Run self.buffer through sig_maker, add to self.sig_string"""
         try:
             eof, len_buf_read, cycle_out = self.sig_maker.cycle(self.buffer)
         except _librsync.librsyncError as e:
@@ -226,7 +226,7 @@
         return eof
 
     def getsig(self):
-        """Return signature over given data"""
+        u"""Return signature over given data"""
         while not self.process_buffer():
             pass  # keep running until eof
-        return ''.join(self.sigstring_list)
+        return b''.join(self.sigstring_list)

=== modified file 'duplicity/log.py'
--- duplicity/log.py	2018-07-24 11:52:33 +0000
+++ duplicity/log.py	2018-09-13 20:09:49 +0000
@@ -21,7 +21,7 @@
 # along with duplicity; if not, write to the Free Software Foundation,
 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 
-"""Log various messages depending on verbosity level"""
+u"""Log various messages depending on verbosity level"""
 
 import os
 import sys
@@ -40,39 +40,39 @@
 
 
 def DupToLoggerLevel(verb):
-    """Convert duplicity level to the logging module's system, where higher is
+    u"""Convert duplicity level to the logging module's system, where higher is
        more severe"""
     return MAX - verb + 1
 
 
 def LoggerToDupLevel(verb):
-    """Convert logging module level to duplicity's system, where lowere is
+    u"""Convert logging module level to duplicity's system, where lowere is
        more severe"""
     return DupToLoggerLevel(verb)
 
 
 def LevelName(level):
     if level >= 9:
-        return "DEBUG"
+        return u"DEBUG"
     elif level >= 5:
-        return "INFO"
+        return u"INFO"
     elif level >= 3:
-        return "NOTICE"
+        return u"NOTICE"
     elif level >= 1:
-        return "WARNING"
+        return u"WARNING"
     else:
-        return "ERROR"
+        return u"ERROR"
 
 
 def Log(s, verb_level, code=1, extra=None, force_print=False):
-    """Write s to stderr if verbosity level low enough"""
+    u"""Write s to stderr if verbosity level low enough"""
     global _logger
     if extra:
-        controlLine = '%d %s' % (code, extra)
+        controlLine = u'%d %s' % (code, extra)
     else:
-        controlLine = '%d' % (code)
+        controlLine = u'%d' % (code)
     if not s:
-        s = ''  # If None is passed, standard logging would render it as 'None'
+        s = u''  # If None is passed, standard logging would render it as 'None'
 
     if force_print:
         initial_level = _logger.getEffectiveLevel()
@@ -83,23 +83,23 @@
     # are handed bytes.  One day we should update the backends.
     # assert isinstance(s, unicode)
     if not isinstance(s, unicode):
-        s = s.decode("utf8", "replace")
+        s = s.decode(u"utf8", u"replace")
 
     _logger.log(DupToLoggerLevel(verb_level), s,
-                extra={'levelName': LevelName(verb_level),
-                       'controlLine': controlLine})
+                extra={u'levelName': LevelName(verb_level),
+                       u'controlLine': controlLine})
 
     if force_print:
         _logger.setLevel(initial_level)
 
 
 def Debug(s):
-    """Shortcut used for debug message (verbosity 9)."""
+    u"""Shortcut used for debug message (verbosity 9)."""
     Log(s, DEBUG)
 
 
 class InfoCode:
-    """Enumeration class to hold info code values.
+    u"""Enumeration class to hold info code values.
        These values should never change, as frontends rely upon them.
        Don't use 0 or negative numbers."""
     generic = 1
@@ -121,16 +121,16 @@
 
 
 def Info(s, code=InfoCode.generic, extra=None):
-    """Shortcut used for info messages (verbosity 5)."""
+    u"""Shortcut used for info messages (verbosity 5)."""
     Log(s, INFO, code, extra)
 
 
 def Progress(s, current, total=None):
-    """Shortcut used for progress messages (verbosity 5)."""
+    u"""Shortcut used for progress messages (verbosity 5)."""
     if total:
-        controlLine = '%d %d' % (current, total)
+        controlLine = u'%d %d' % (current, total)
     else:
-        controlLine = '%d' % current
+        controlLine = u'%d' % current
     Log(s, INFO, InfoCode.progress, controlLine)
 
 
@@ -138,10 +138,10 @@
     tdelta = datetime.timedelta(seconds=secs)
     hours, rem = divmod(tdelta.seconds, 3600)
     minutes, seconds = divmod(rem, 60)
-    fmt = ""
+    fmt = u""
     if tdelta.days > 0:
-        fmt = "%dd," % (tdelta.days)
-    fmt = "%s%02d:%02d:%02d" % (fmt, hours, minutes, seconds)
+        fmt = u"%dd," % (tdelta.days)
+    fmt = u"%s%02d:%02d:%02d" % (fmt, hours, minutes, seconds)
     return fmt
 
 
@@ -149,89 +149,89 @@
     tdelta = datetime.timedelta(seconds=secs)
     hours, rem = divmod(tdelta.seconds, 3600)
     minutes, seconds = divmod(rem, 60)
-    fmt = ""
+    fmt = u""
     if tdelta.days > 0:
-        fmt = "%dd" % (tdelta.days)
+        fmt = u"%dd" % (tdelta.days)
         if hours > 0:
-            fmt = "%s %dh" % (fmt, hours)
+            fmt = u"%s %dh" % (fmt, hours)
         if minutes > 0:
-            fmt = "%s %dmin" % (fmt, minutes)
+            fmt = u"%s %dmin" % (fmt, minutes)
     elif hours > 0:
-        fmt = "%dh" % hours
+        fmt = u"%dh" % hours
         if minutes > 0:
-            fmt = "%s %dmin" % (fmt, minutes)
+            fmt = u"%s %dmin" % (fmt, minutes)
     elif minutes > 5:
-        fmt = "%dmin" % minutes
+        fmt = u"%dmin" % minutes
     elif minutes > 0:
-        fmt = "%dmin" % minutes
+        fmt = u"%dmin" % minutes
         if seconds >= 30:
-            fmt = "%s 30sec" % fmt
+            fmt = u"%s 30sec" % fmt
     elif seconds > 45:
-        fmt = "< 1min"
+        fmt = u"< 1min"
     elif seconds > 30:
-        fmt = "< 45sec"
+        fmt = u"< 45sec"
     elif seconds > 15:
-        fmt = "< 30sec"
+        fmt = u"< 30sec"
     else:
-        fmt = "%dsec" % seconds
+        fmt = u"%dsec" % seconds
     return fmt
 
 
 def TransferProgress(progress, eta, changed_bytes, elapsed, speed, stalled):
-    """Shortcut used for upload progress messages (verbosity 5)."""
+    u"""Shortcut used for upload progress messages (verbosity 5)."""
     dots = int(0.4 * progress)  # int(40.0 * progress / 100.0) -- for 40 chars
     data_amount = float(changed_bytes) / 1024.0
-    data_scale = "KB"
-    if data_amount > 1000.0:
-        data_amount /= 1024.0
-        data_scale = "MB"
-    if data_amount > 1000.0:
-        data_amount /= 1024.0
-        data_scale = "GB"
+    data_scale = u"KB"
+    if data_amount > 1000.0:
+        data_amount /= 1024.0
+        data_scale = u"MB"
+    if data_amount > 1000.0:
+        data_amount /= 1024.0
+        data_scale = u"GB"
     if stalled:
-        eta_str = "Stalled!"
+        eta_str = u"Stalled!"
         speed_amount = 0
-        speed_scale = "B"
+        speed_scale = u"B"
     else:
         eta_str = _RemainingSecs2Str(eta)
         speed_amount = float(speed) / 1024.0
-        speed_scale = "KB"
-        if speed_amount > 1000.0:
-            speed_amount /= 1024.0
-            speed_scale = "MB"
-        if speed_amount > 1000.0:
-            speed_amount /= 1024.0
-            speed_scale = "GB"
-    s = "%.1f%s %s [%.1f%s/s] [%s>%s] %d%% ETA %s" % (data_amount, data_scale,
-                                                      _ElapsedSecs2Str(elapsed),
-                                                      speed_amount, speed_scale,
-                                                      '=' * dots, ' ' * (40 - dots),
-                                                      progress,
-                                                      eta_str
-                                                      )
+        speed_scale = u"KB"
+        if speed_amount > 1000.0:
+            speed_amount /= 1024.0
+            speed_scale = u"MB"
+        if speed_amount > 1000.0:
+            speed_amount /= 1024.0
+            speed_scale = u"GB"
+    s = u"%.1f%s %s [%.1f%s/s] [%s>%s] %d%% ETA %s" % (data_amount, data_scale,
+                                                       _ElapsedSecs2Str(elapsed),
+                                                       speed_amount, speed_scale,
+                                                       u'=' * dots, u' ' * (40 - dots),
+                                                       progress,
+                                                       eta_str
+                                                       )
 
-    controlLine = "%d %d %d %d %d %d" % (changed_bytes, elapsed, progress, eta, speed, stalled)
+    controlLine = u"%d %d %d %d %d %d" % (changed_bytes, elapsed, progress, eta, speed, stalled)
     Log(s, NOTICE, InfoCode.upload_progress, controlLine)
 
 
 def PrintCollectionStatus(col_stats, force_print=False):
-    """Prints a collection status to the log"""
+    u"""Prints a collection status to the log"""
     Log(unicode(col_stats), 8, InfoCode.collection_status,
-        '\n' + '\n'.join(col_stats.to_log_info()), force_print)
+        u'\n' + u'\n'.join(col_stats.to_log_info()), force_print)
 
 
 def PrintCollectionFileChangedStatus(col_stats, filepath, force_print=False):
-    """Prints a collection status to the log"""
+    u"""Prints a collection status to the log"""
     Log(unicode(col_stats.get_file_changed_record(filepath)), 8, InfoCode.collection_status, None, force_print)
 
 
 def Notice(s):
-    """Shortcut used for notice messages (verbosity 3, the default)."""
+    u"""Shortcut used for notice messages (verbosity 3, the default)."""
     Log(s, NOTICE)
 
 
 class WarningCode:
-    """Enumeration class to hold warning code values.
+    u"""Enumeration class to hold warning code values.
        These values should never change, as frontends rely upon them.
        Don't use 0 or negative numbers."""
     generic = 1
@@ -250,12 +250,12 @@
 
 
 def Warn(s, code=WarningCode.generic, extra=None):
-    """Shortcut used for warning messages (verbosity 2)"""
+    u"""Shortcut used for warning messages (verbosity 2)"""
     Log(s, WARNING, code, extra)
 
 
 class ErrorCode:
-    """Enumeration class to hold error code values.
+    u"""Enumeration class to hold error code values.
        These values should never change, as frontends rely upon them.
        Don't use 0 or negative numbers.  This code is returned by duplicity
        to indicate which error occurred via both exit code and log."""
@@ -323,36 +323,36 @@
 
 
 def Error(s, code=ErrorCode.generic, extra=None):
-    """Write error message"""
+    u"""Write error message"""
     Log(s, ERROR, code, extra)
 
 
 def FatalError(s, code=ErrorCode.generic, extra=None):
-    """Write fatal error message and exit"""
+    u"""Write fatal error message and exit"""
     Log(s, ERROR, code, extra)
     shutdown()
     sys.exit(code)
 
 
 class OutFilter(logging.Filter):
-    """Filter that only allows warning or less important messages"""
+    u"""Filter that only allows warning or less important messages"""
     def filter(self, record):
         return record.msg and record.levelno <= DupToLoggerLevel(WARNING)
 
 
 class ErrFilter(logging.Filter):
-    """Filter that only allows messages more important than warnings"""
+    u"""Filter that only allows messages more important than warnings"""
     def filter(self, record):
         return record.msg and record.levelno > DupToLoggerLevel(WARNING)
 
 
 def setup():
-    """Initialize logging"""
+    u"""Initialize logging"""
     global _logger
     if _logger:
         return
 
-    _logger = logging.getLogger("duplicity")
+    _logger = logging.getLogger(u"duplicity")
 
     # Default verbosity allows notices and above
     setverbosity(NOTICE)
@@ -368,7 +368,7 @@
 
 
 class MachineFormatter(logging.Formatter):
-    """Formatter that creates messages in a syntax easily consumable by other
+    u"""Formatter that creates messages in a syntax easily consumable by other
        processes."""
     def __init__(self):
         # 'message' will be appended by format()
@@ -376,7 +376,7 @@
         # standard 'levelname'.  This is because the standard 'levelname' can
         # be adjusted by any library anywhere in our stack without us knowing.
         # But we control 'levelName'.
-        logging.Formatter.__init__(self, "%(levelName)s %(controlLine)s")
+        logging.Formatter.__init__(self, u"%(levelName)s %(controlLine)s")
 
     def format(self, record):
         s = logging.Formatter.format(self, record)
@@ -384,30 +384,30 @@
         # Add user-text hint of 'message' back in, with each line prefixed by a
         # dot, so consumers know it's not part of 'controlLine'
         if record.message:
-            s += ('\n' + record.message).replace('\n', '\n. ')
+            s += (u'\n' + record.message).replace(u'\n', u'\n. ')
 
         # Add a newline so consumers know the message is over.
-        return s + '\n'
+        return s + u'\n'
 
 
 class MachineFilter(logging.Filter):
-    """Filter that only allows levels that are consumable by other processes."""
+    u"""Filter that only allows levels that are consumable by other processes."""
     def filter(self, record):
         # We only want to allow records that have our custom level names
-        return hasattr(record, 'levelName')
+        return hasattr(record, u'levelName')
 
 
 def add_fd(fd):
-    """Add stream to which to write machine-readable logging"""
+    u"""Add stream to which to write machine-readable logging"""
     global _logger
-    handler = logging.StreamHandler(os.fdopen(fd, 'w'))
+    handler = logging.StreamHandler(os.fdopen(fd, u'w'))
     handler.setFormatter(MachineFormatter())
     handler.addFilter(MachineFilter())
     _logger.addHandler(handler)
 
 
 def add_file(filename):
-    """Add file to which to write machine-readable logging"""
+    u"""Add file to which to write machine-readable logging"""
     global _logger
     handler = logging.FileHandler(filename)
     handler.setFormatter(MachineFormatter())
@@ -416,17 +416,17 @@
 
 
 def setverbosity(verb):
-    """Set the verbosity level"""
+    u"""Set the verbosity level"""
     global _logger
     _logger.setLevel(DupToLoggerLevel(verb))
 
 
 def getverbosity():
-    """Get the verbosity level"""
+    u"""Get the verbosity level"""
     global _logger
     return LoggerToDupLevel(_logger.getEffectiveLevel())
 
 
 def shutdown():
-    """Cleanup and flush loggers"""
+    u"""Cleanup and flush loggers"""
     logging.shutdown()

=== modified file 'duplicity/manifest.py'
--- duplicity/manifest.py	2018-07-24 11:52:33 +0000
+++ duplicity/manifest.py	2018-09-13 20:09:49 +0000
@@ -369,7 +369,7 @@
                      (whitespace, index_to_string(self.end_index), (self.end_block or " ")))
         for key in self.hashes:
             slist.append("%sHash %s %s" %
-                         (whitespace, key, self.hashes[key]))
+                         (whitespace, key.encode(), self.hashes[key]))
         return "\n".join(slist)
 
     __str__ = to_string

=== modified file 'duplicity/progress.py'
--- duplicity/progress.py	2018-07-24 11:52:33 +0000
+++ duplicity/progress.py	2018-09-13 20:09:49 +0000
@@ -21,7 +21,7 @@
 #
 # @author: Juan Antonio Moya Vicen <juan@xxxxxxxxxxxxxxxx>
 #
-"""
+u"""
 Functions to compute progress of compress & upload files
 The heuristics try to infer the ratio between the amount of data collected
 by the deltas and the total size of the changing files. It also infers the
@@ -49,35 +49,35 @@
 
 
 class Snapshot(sys_collections.deque):
-    """
+    u"""
     A convenience class for storing snapshots in a space/timing efficient manner
     Stores up to 10 consecutive progress snapshots, one for each volume
     """
 
     @staticmethod
     def unmarshall():
-        """
+        u"""
         De-serializes cached data it if present
         """
         snapshot = Snapshot()
         # If restarting Full, discard marshalled data and start over
         if globals.restart is not None and globals.restart.start_vol >= 1:
             try:
-                progressfd = open('%s/progress' % globals.archive_dir_path.name, 'r')
+                progressfd = open(u'%s/progress' % globals.archive_dir_path.name, u'r')
                 snapshot = pickle.load(progressfd)
                 progressfd.close()
             except:
-                log.Warn("Warning, cannot read stored progress information from previous backup",
+                log.Warn(u"Warning, cannot read stored progress information from previous backup",
                          log.WarningCode.cannot_stat)
                 snapshot = Snapshot()
         # Reached here no cached data found or wrong marshalling
         return snapshot
 
     def marshall(self):
-        """
+        u"""
         Serializes object to cache
         """
-        progressfd = open('%s/progress' % globals.archive_dir_path.name, 'w+')
+        progressfd = open(u'%s/progress' % globals.archive_dir_path.name, u'w+')
         pickle.dump(self, progressfd)
         progressfd.close()
 
@@ -127,7 +127,7 @@
         self.prev_data = None
 
     def snapshot_progress(self, volume):
-        """
+        u"""
         Snapshots the current progress status for each volume into the disk cache
         If backup is interrupted, next restart will deserialize the data and try start
         progress from the snapshot
@@ -137,14 +137,14 @@
             self.prev_data.marshall()
 
     def has_collected_evidence(self):
-        """
+        u"""
         Returns true if the progress computation is on and duplicity has not
         yet started the first dry-run pass to collect some information
         """
         return (self.total_stats is not None)
 
     def log_upload_progress(self):
-        """
+        u"""
         Aproximative and evolving method of computing the progress of upload
         """
         if not globals.progress or not self.has_collected_evidence():
@@ -173,7 +173,7 @@
 
         self.nsteps += 1
 
-        """
+        u"""
         Compute the ratio of information being written for deltas vs file sizes
         Using Knuth algorithm to estimate approximate upper bound in % of completion
         The progress is estimated on the current bytes written vs the total bytes to
@@ -203,7 +203,7 @@
             self.change_r_estimation += change_delta * (change_ratio - self.change_mean_ratio)
             change_sigma = math.sqrt(math.fabs(self.change_r_estimation / float(self.nsteps)))
 
-            """
+            u"""
             Combine variables for progress estimation
             Fit a smoothed curve that covers the most common data density distributions,
             aiming for a large number of incremental changes.
@@ -216,7 +216,7 @@
                 (self.change_mean_ratio - 0.67 * change_sigma) * (1.0 - self.current_estimation) +
                 (self.change_mean_ratio + 0.67 * change_sigma) * self.current_estimation
             )
-            """
+            u"""
             In case that we overpassed the 100%, drop the confidence and trust more the mean as the
             sigma may be large.
             """
@@ -225,13 +225,13 @@
                     (self.change_mean_ratio - 0.33 * change_sigma) * (1.0 - self.current_estimation) +
                     (self.change_mean_ratio + 0.33 * change_sigma) * self.current_estimation
                 )
-            """
+            u"""
             Meh!, if again overpassed the 100%, drop the confidence to 0 and trust only the mean.
             """
             if self.current_estimation > 1.0:
                 self.current_estimation = self.change_mean_ratio * float(changes) / float(total_changes)
 
-        """
+        u"""
         Lastly, just cap it... nothing else we can do to approximate it better.
         Cap it to 99%, as the remaining 1% to 100% we reserve for the last step
         uploading of signature and manifests
@@ -240,7 +240,7 @@
                                                 (1.0 - self.prev_estimation) *
                                                 self.current_estimation, 0.99))
 
-        """
+        u"""
         Estimate the time just as a projection of the remaining time, fit to a
         [(1 - x) / x] curve
         """
@@ -256,7 +256,7 @@
         if self.progress_estimation < last_progress_estimation:
             self.progress_estimation = last_progress_estimation
 
-        """
+        u"""
         Compute Exponential Moving Average of speed as bytes/sec of the last 30 probes
         """
         if elapsed.total_seconds() > 0:
@@ -278,7 +278,7 @@
                              )
 
     def annotate_written_bytes(self, bytecount):
-        """
+        u"""
         Annotate the number of bytes that have been added/changed since last time
         this function was called.
         bytecount param will show the number of bytes since the start of the current
@@ -291,7 +291,7 @@
             self.stall_last_time = datetime.now()
 
     def set_evidence(self, stats, is_full):
-        """
+        u"""
         Stores the collected statistics from a first-pass dry-run, to use this
         information later so as to estimate progress
         """
@@ -304,14 +304,14 @@
         self.progress_estimation = max(0.0, min(self.prev_estimation, 0.99))
 
     def total_elapsed_seconds(self):
-        """
+        u"""
         Elapsed seconds since the first call to log_upload_progress method
         """
         return (datetime.now() - self.start_time).seconds
 
 
 def report_transfer(bytecount, totalbytes):
-    """
+    u"""
     Method to call tracker.annotate_written_bytes from outside
     the class, and to offer the "function(long, long)" signature
     which is handy to pass as callback
@@ -323,7 +323,7 @@
 
 
 class LogProgressThread(threading.Thread):
-    """
+    u"""
     Background thread that reports progress to the log,
     every --progress-rate seconds
     """

=== modified file 'duplicity/robust.py'
--- duplicity/robust.py	2018-07-24 11:52:33 +0000
+++ duplicity/robust.py	2018-09-13 20:09:49 +0000
@@ -28,7 +28,7 @@
 
 
 def check_common_error(error_handler, function, args=()):
-    """Apply function to args, if error, run error_handler on exception
+    u"""Apply function to args, if error, run error_handler on exception
 
     This only catches certain exceptions which seem innocent
     enough.
@@ -47,9 +47,9 @@
         if (not isinstance(exc, EnvironmentError) or
             ((exc[0] in errno.errorcode) and
              errno.errorcode[exc[0]] in
-             ['EPERM', 'ENOENT', 'EACCES', 'EBUSY', 'EEXIST',
-              'ENOTDIR', 'ENAMETOOLONG', 'EINTR', 'ENOTEMPTY',
-              'EIO', 'ETXTBSY', 'ESRCH', 'EINVAL'])):
+             [u'EPERM', u'ENOENT', u'EACCES', u'EBUSY', u'EEXIST',
+              u'ENOTDIR', u'ENAMETOOLONG', u'EINTR', u'ENOTEMPTY',
+              u'EIO', u'ETXTBSY', u'ESRCH', u'EINVAL'])):
             # Log.exception()
             if error_handler:
                 return error_handler(exc, *args)
@@ -59,9 +59,9 @@
 
 
 def listpath(path):
-    """Like path.listdir() but return [] if error, and sort results"""
+    u"""Like path.listdir() but return [] if error, and sort results"""
     def error_handler(exc):
-        log.Warn(_("Error listing directory %s") % path.uc_name)
+        log.Warn(_(u"Error listing directory %s") % path.uc_name)
         return []
     dir_listing = check_common_error(error_handler, path.listdir)
     dir_listing.sort()

=== modified file 'duplicity/tarfile.py'
--- duplicity/tarfile.py	2018-07-24 11:52:33 +0000
+++ duplicity/tarfile.py	2018-09-13 20:09:49 +0000
@@ -18,7 +18,7 @@
 # along with duplicity; if not, write to the Free Software Foundation,
 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 
-"""Like system tarfile but with caching."""
+u"""Like system tarfile but with caching."""
 
 from __future__ import absolute_import
 

=== modified file 'duplicity/tempdir.py'
--- duplicity/tempdir.py	2018-07-24 11:52:33 +0000
+++ duplicity/tempdir.py	2018-09-13 20:09:49 +0000
@@ -19,7 +19,7 @@
 # along with duplicity; if not, write to the Free Software Foundation,
 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 
-"""
+u"""
 Provides temporary file handling cenetered around a single top-level
 securely created temporary directory.
 
@@ -44,7 +44,7 @@
 
 
 def default():
-    """
+    u"""
     Obtain the global default instance of TemporaryDirectory, creating
     it first if necessary. Failures are propagated to caller. Most
     callers are expected to use this function rather than
@@ -68,7 +68,7 @@
 
 
 class TemporaryDirectory:
-    """
+    u"""
     A temporary directory.
 
     An instance of this class is backed by a directory in the file
@@ -114,7 +114,7 @@
     'attackers' file.
     """
     def __init__(self, temproot=None):
-        """
+        u"""
         Create a new TemporaryDirectory backed by a unique and
         securely created file system directory.
 
@@ -127,9 +127,9 @@
             else:
                 global _initialSystemTempRoot
                 temproot = _initialSystemTempRoot
-        self.__dir = tempfile.mkdtemp("-tempdir", "duplicity-", temproot)
+        self.__dir = tempfile.mkdtemp(u"-tempdir", u"duplicity-", temproot)
 
-        log.Info(_("Using temporary directory %s") % util.fsdecode(self.__dir))
+        log.Info(_(u"Using temporary directory %s") % util.fsdecode(self.__dir))
 
         # number of mktemp()/mkstemp() calls served so far
         self.__tempcount = 0
@@ -141,13 +141,13 @@
         self.__lock = threading.Lock()  # protect private resources *AND* mktemp/mkstemp calls
 
     def dir(self):
-        """
+        u"""
         Returns the absolute pathname of the temp folder.
         """
         return self.__dir
 
     def __del__(self):
-        """
+        u"""
         Perform cleanup.
         """
         global _defaultInstance
@@ -155,7 +155,7 @@
             self.cleanup()
 
     def mktemp(self):
-        """
+        u"""
         Return a unique filename suitable for use for a temporary
         file. The file is not created.
 
@@ -170,10 +170,10 @@
         self.__lock.acquire()
         try:
             self.__tempcount = self.__tempcount + 1
-            suffix = "-%d" % (self.__tempcount,)
-            filename = tempfile.mktemp(suffix, "mktemp-", self.__dir)
+            suffix = u"-%d" % (self.__tempcount,)
+            filename = tempfile.mktemp(suffix, u"mktemp-", self.__dir)
 
-            log.Debug(_("Registering (mktemp) temporary file %s") % util.fsdecode(filename))
+            log.Debug(_(u"Registering (mktemp) temporary file %s") % util.fsdecode(filename))
             self.__pending[filename] = None
         finally:
             self.__lock.release()
@@ -181,7 +181,7 @@
         return filename
 
     def mkstemp(self):
-        """
+        u"""
         Returns a filedescriptor and a filename, as per os.mkstemp(),
         but located in the temporary directory and subject to tracking
         and automatic cleanup.
@@ -192,10 +192,10 @@
         self.__lock.acquire()
         try:
             self.__tempcount = self.__tempcount + 1
-            suffix = "-%d" % (self.__tempcount,)
-            fd, filename = tempfile.mkstemp(suffix, "mkstemp-", self.__dir)
+            suffix = u"-%d" % (self.__tempcount,)
+            fd, filename = tempfile.mkstemp(suffix, u"mkstemp-", self.__dir)
 
-            log.Debug(_("Registering (mkstemp) temporary file %s") % util.fsdecode(filename))
+            log.Debug(_(u"Registering (mkstemp) temporary file %s") % util.fsdecode(filename))
             self.__pending[filename] = None
         finally:
             self.__lock.release()
@@ -203,16 +203,16 @@
         return fd, filename
 
     def mkstemp_file(self):
-        """
+        u"""
         Convenience wrapper around mkstemp(), with the file descriptor
         converted into a file object.
         """
         fd, filename = self.mkstemp()
 
-        return os.fdopen(fd, "r+"), filename
+        return os.fdopen(fd, u"r+"), filename
 
     def forget(self, fname):
-        """
+        u"""
         Forget about the given filename previously obtained through
         mktemp() or mkstemp(). This should be called *after* the file
         has been deleted, to stop a future cleanup() from trying to
@@ -227,16 +227,16 @@
         self.__lock.acquire()
         try:
             if fname in self.__pending:
-                log.Debug(_("Forgetting temporary file %s") % util.fsdecode(fname))
+                log.Debug(_(u"Forgetting temporary file %s") % util.fsdecode(fname))
                 del(self.__pending[fname])
             else:
-                log.Warn(_("Attempt to forget unknown tempfile %s - this is probably a bug.") % util.fsdecode(fname))
+                log.Warn(_(u"Attempt to forget unknown tempfile %s - this is probably a bug.") % util.fsdecode(fname))
                 pass
         finally:
             self.__lock.release()
 
     def cleanup(self):
-        """
+        u"""
         Cleanup any files created in the temporary directory (that
         have not been forgotten), and clean up the temporary directory
         itself.
@@ -249,16 +249,16 @@
             if self.__dir is not None:
                 for file in self.__pending.keys():
                     try:
-                        log.Debug(_("Removing still remembered temporary file %s") % util.fsdecode(file))
+                        log.Debug(_(u"Removing still remembered temporary file %s") % util.fsdecode(file))
                         util.ignore_missing(os.unlink, file)
                     except Exception:
-                        log.Info(_("Cleanup of temporary file %s failed") % util.fsdecode(file))
+                        log.Info(_(u"Cleanup of temporary file %s failed") % util.fsdecode(file))
                         pass
                 try:
                     os.rmdir(self.__dir)
                 except Exception:
-                    log.Warn(_("Cleanup of temporary directory %s failed - "
-                               "this is probably a bug.") % util.fsdecode(self.__dir))
+                    log.Warn(_(u"Cleanup of temporary directory %s failed - "
+                               u"this is probably a bug.") % util.fsdecode(self.__dir))
                     pass
                 self.__pending = None
                 self.__dir = None

=== modified file 'po/duplicity.pot'
--- po/duplicity.pot	2018-08-02 15:48:23 +0000
+++ po/duplicity.pot	2018-09-13 20:09:49 +0000
@@ -8,7 +8,7 @@
 msgstr ""
 "Project-Id-Version: PACKAGE VERSION\n"
 "Report-Msgid-Bugs-To: Kenneth Loafman <kenneth@xxxxxxxxxxx>\n"
-"POT-Creation-Date: 2018-08-01 10:53-0500\n"
+"POT-Creation-Date: 2018-09-11 15:34-0500\n"
 "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
 "Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
 "Language-Team: LANGUAGE <LL@xxxxxx>\n"
@@ -311,20 +311,20 @@
 "Use 'ulimit -n 1024' or higher to correct.\n"
 msgstr ""
 
-#: ../bin/duplicity:1434
+#: ../bin/duplicity:1435
 msgid ""
 "RESTART: The first volume failed to upload before termination.\n"
 "         Restart is impossible...starting backup from beginning."
 msgstr ""
 
-#: ../bin/duplicity:1440
+#: ../bin/duplicity:1441
 #, python-format
 msgid ""
 "RESTART: Volumes %d to %d failed to upload before termination.\n"
 "         Restarting backup at volume %d."
 msgstr ""
 
-#: ../bin/duplicity:1447
+#: ../bin/duplicity:1448
 #, python-format
 msgid ""
 "RESTART: Impossible backup state: manifest has %d vols, remote has %d vols.\n"
@@ -333,7 +333,7 @@
 "         backup then restart the backup from the beginning."
 msgstr ""
 
-#: ../bin/duplicity:1468
+#: ../bin/duplicity:1469
 msgid ""
 "\n"
 "PYTHONOPTIMIZE in the environment causes duplicity to fail to\n"
@@ -343,54 +343,54 @@
 "See https://bugs.launchpad.net/duplicity/+bug/931175\n";
 msgstr ""
 
-#: ../bin/duplicity:1491
+#: ../bin/duplicity:1492
 #, python-format
 msgid "Acquiring lockfile %s"
 msgstr ""
 
-#: ../bin/duplicity:1549
+#: ../bin/duplicity:1550
 #, python-format
 msgid "Last %s backup left a partial set, restarting."
 msgstr ""
 
-#: ../bin/duplicity:1553
+#: ../bin/duplicity:1554
 #, python-format
 msgid "Cleaning up previous partial %s backup set, restarting."
 msgstr ""
 
-#: ../bin/duplicity:1565
+#: ../bin/duplicity:1566
 msgid "Last full backup date:"
 msgstr ""
 
-#: ../bin/duplicity:1567
+#: ../bin/duplicity:1568
 msgid "Last full backup date: none"
 msgstr ""
 
-#: ../bin/duplicity:1569
+#: ../bin/duplicity:1570
 msgid "Last full backup is too old, forcing full backup"
 msgstr ""
 
-#: ../bin/duplicity:1618
+#: ../bin/duplicity:1619
 msgid ""
 "When using symmetric encryption, the signing passphrase must equal the "
 "encryption passphrase."
 msgstr ""
 
-#: ../bin/duplicity:1687
+#: ../bin/duplicity:1688
 msgid "INT intercepted...exiting."
 msgstr ""
 
-#: ../bin/duplicity:1695
+#: ../bin/duplicity:1696
 #, python-format
 msgid "GPG error detail: %s"
 msgstr ""
 
-#: ../bin/duplicity:1705
+#: ../bin/duplicity:1706
 #, python-format
 msgid "User error detail: %s"
 msgstr ""
 
-#: ../bin/duplicity:1715
+#: ../bin/duplicity:1716
 #, python-format
 msgid "Backend error detail: %s"
 msgstr ""

=== modified file 'testing/test_code.py'
--- testing/test_code.py	2018-07-24 20:57:03 +0000
+++ testing/test_code.py	2018-09-13 20:09:49 +0000
@@ -112,33 +112,12 @@
                          # TODO Every file from here down needs to be fixed and the exclusion removed
                          os.path.join(_top_dir, u'setup.py'),
                          os.path.join(_top_dir, u'duplicity', u'__init__.py'),
-                         os.path.join(_top_dir, u'duplicity', u'asyncscheduler.py'),
-                         os.path.join(_top_dir, u'duplicity', u'backend.py'),
-                         os.path.join(_top_dir, u'duplicity', u'cached_ops.py'),
-                         os.path.join(_top_dir, u'duplicity', u'collections.py'),
-                         os.path.join(_top_dir, u'duplicity', u'commandline.py'),
                          os.path.join(_top_dir, u'duplicity', u'compilec.py'),
                          os.path.join(_top_dir, u'duplicity', u'diffdir.py'),
-                         os.path.join(_top_dir, u'duplicity', u'dup_temp.py'),
-                         os.path.join(_top_dir, u'duplicity', u'dup_threading.py'),
-                         os.path.join(_top_dir, u'duplicity', u'dup_time.py'),
-                         os.path.join(_top_dir, u'duplicity', u'errors.py'),
-                         os.path.join(_top_dir, u'duplicity', u'file_naming.py'),
-                         os.path.join(_top_dir, u'duplicity', u'filechunkio.py'),
-                         os.path.join(_top_dir, u'duplicity', u'globals.py'),
-                         os.path.join(_top_dir, u'duplicity', u'gpg.py'),
-                         os.path.join(_top_dir, u'duplicity', u'gpginterface.py'),
-                         os.path.join(_top_dir, u'duplicity', u'lazy.py'),
-                         os.path.join(_top_dir, u'duplicity', u'librsync.py'),
-                         os.path.join(_top_dir, u'duplicity', u'log.py'),
                          os.path.join(_top_dir, u'duplicity', u'manifest.py'),
                          os.path.join(_top_dir, u'duplicity', u'patchdir.py'),
                          os.path.join(_top_dir, u'duplicity', u'path.py'),
-                         os.path.join(_top_dir, u'duplicity', u'progress.py'),
-                         os.path.join(_top_dir, u'duplicity', u'robust.py'),
                          os.path.join(_top_dir, u'duplicity', u'statistics.py'),
-                         os.path.join(_top_dir, u'duplicity', u'tarfile.py'),
-                         os.path.join(_top_dir, u'duplicity', u'tempdir.py'),
                          os.path.join(_top_dir, u'duplicity', u'util.py'),
                          os.path.join(_top_dir, u'testing', u'manual', u'__init__.py'),
                          os.path.join(_top_dir, u'testing', u'overrides', u'__init__.py'),


Follow ups