← Back to team overview

duplicity-team team mailing list archive

[Merge] lp:~kenneth-loafman/duplicity/duplicity-pylint into lp:duplicity

 

Kenneth Loafman has proposed merging lp:~kenneth-loafman/duplicity/duplicity-pylint into lp:duplicity.

Commit message:
 * Enable additional pylint warnings. Make 1st pass at correction.
  - unused-argument,
    unused-wildcard-import,
    redefined-builtin,
    bad-indentation,
    mixed-indentation,
    unreachable
* Resolved conflict between duplicity.config and testing.manual.config
* Normalized emacs mode line to have encoding:utf8 on all *.py files



Requested reviews:
  duplicity-team (duplicity-team)

For more details, see:
https://code.launchpad.net/~kenneth-loafman/duplicity/duplicity-pylint/+merge/381005
-- 
Your team duplicity-team is requested to review the proposed merge of lp:~kenneth-loafman/duplicity/duplicity-pylint into lp:duplicity.
=== modified file '.bzrignore'
--- .bzrignore	2020-03-19 19:05:19 +0000
+++ .bzrignore	2020-03-22 12:35:54 +0000
@@ -27,3 +27,4 @@
 testing/gnupg/.gpg-v21-migrated
 testing/gnupg/S.*
 testing/gnupg/private-keys-v1.d
+testing/manual/test_config.py

=== added directory '.dbeaver'
=== modified file 'bin/duplicity'
--- bin/duplicity	2020-02-06 15:27:43 +0000
+++ bin/duplicity	2020-03-22 12:35:54 +0000
@@ -1,5 +1,5 @@
 #!/usr/bin/env python3
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # duplicity -- Encrypted bandwidth efficient backup
 #
@@ -29,27 +29,9 @@
 from __future__ import print_function
 from future import standard_library
 standard_library.install_aliases()
-from builtins import filter
-from builtins import map
-from builtins import next
-from builtins import object
-from builtins import range
 
-import copy
-import fasteners
-import gzip
 import os
-import platform
-import re
-import resource
 import sys
-import time
-import traceback
-import types
-
-
-from datetime import datetime
-from os import statvfs
 
 from duplicity.dup_main import main
 import duplicity.errors

=== modified file 'bin/duplicity.1'
--- bin/duplicity.1	2020-03-06 21:25:13 +0000
+++ bin/duplicity.1	2020-03-22 12:35:54 +0000
@@ -603,9 +603,9 @@
 
 file_blocksize = int((file_len / (2000 * 512)) * 512)
 .br
-return min(file_blocksize, globals.max_blocksize)
+return min(file_blocksize, config.max_blocksize)
 
-where globals.max_blocksize defaults to 2048.
+where config.max_blocksize defaults to 2048.
 If you specify a larger max_blocksize, your difftar files will be larger, but your sigtar files will be smaller.
 If you specify a smaller max_blocksize, the reverse occurs.
 The --max-blocksize option should be in multiples of 512.

=== modified file 'bin/rdiffdir'
--- bin/rdiffdir	2020-02-06 15:27:43 +0000
+++ bin/rdiffdir	2020-03-22 12:35:54 +0000
@@ -1,5 +1,5 @@
 #!/usr/bin/env python3
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # rdiffdir -- Extend rdiff functionality to directories
 #
@@ -36,7 +36,7 @@
 from duplicity import diffdir
 from duplicity import patchdir
 from duplicity import log
-from duplicity import globals
+from duplicity import config
 from duplicity import selection
 from duplicity import path
 from duplicity import util
@@ -96,18 +96,18 @@
             select_opts.append((u"--include-filelist", u"standard input"))
             select_files.append(sys.stdin)
         elif opt == u"--max-blocksize":
-            globals.max_blocksize = int(arg)
+            config.max_blocksize = int(arg)
         elif opt == u"--null-separator":
-            globals.null_separator = 1
+            config.null_separator = 1
         elif opt == u"-V":
-            print(u"rdiffdir", str(globals.version))
+            print(u"rdiffdir", str(config.version))
             sys.exit(0)
         elif opt == u"-v" or opt == u"--verbosity":
             log.setverbosity(int(arg))
         elif opt == u"--write-sig-to" or opt == u"--write-signature-to":
             sig_fileobj = get_fileobj(arg, u"wb")
         elif opt == u"--ignore-errors":
-            globals.ignore_errors = 1
+            config.ignore_errors = 1
         else:
             command_line_error(u"Unknown option %s" % opt)
 

=== modified file 'compilec.py'
--- compilec.py	2019-12-28 21:26:47 +0000
+++ compilec.py	2020-03-22 12:35:54 +0000
@@ -1,5 +1,5 @@
 #!/usr/bin/env python3
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>

=== modified file 'debian/rules'
--- debian/rules	2020-01-06 17:38:21 +0000
+++ debian/rules	2020-03-22 12:35:54 +0000
@@ -1,5 +1,5 @@
 #!/usr/bin/make -f
-# -*- Mode:Makefile; indent-tabs-mode:t; tab-width:4 -*-
+# -*- Mode:Makefile; indent-tabs-mode:t; tab-width:4; encoding:utf8 -*-
 
 # Old versions of dpkg-parsechangelog don't support -SVersion
 UPSTREAM_VERSION=$(shell dpkg-parsechangelog | grep ^Version: | cut -d' ' -f2 | cut -d- -f1)

=== modified file 'dist/makedist'
--- dist/makedist	2020-03-07 15:55:26 +0000
+++ dist/makedist	2020-03-22 12:35:54 +0000
@@ -1,5 +1,5 @@
 #!/usr/bin/env python3
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -29,11 +29,10 @@
 import shutil
 import time
 import sys
-from subprocess import Popen, PIPE, STDOUT
+from subprocess import Popen, PIPE
 
 sys.path.insert(0, os.path.abspath(u"./"))
 
-from duplicity import util
 from duplicity import __version__
 
 bzr = Popen([u"bzr", u"revno"], stdout=PIPE, universal_newlines=True)

=== modified file 'docs/conf.py'
--- docs/conf.py	2020-02-06 16:04:00 +0000
+++ docs/conf.py	2020-03-22 12:35:54 +0000
@@ -1,3 +1,5 @@
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
+#
 # Configuration file for the Sphinx documentation builder.
 #
 # This file only contains a selection of the most common options. For a full
@@ -9,7 +11,6 @@
 # If extensions (or modules to document with autodoc) are in another directory,
 # add these directories to sys.path here. If the directory is relative to the
 # documentation root, use os.path.abspath to make it absolute, like shown here.
-import os
 import sys
 sys.path.insert(0, '/Users/ken/workspace/duplicity-src8/testing')
 sys.path.insert(0, '/Users/ken/workspace/duplicity-src8/duplicity')
@@ -20,7 +21,7 @@
 # -- Project information -----------------------------------------------------
 
 project = 'duplicity'
-copyright = '2020, Kenneth Loafman'
+copyright = '2020, Kenneth Loafman'  # pylint: disable=redefined-builtin
 author = 'Kenneth Loafman'
 
 

=== renamed file 'docs/duplicity.globals.rst' => 'docs/duplicity.config.rst'
--- docs/duplicity.globals.rst	2020-02-01 21:33:23 +0000
+++ docs/duplicity.config.rst	2020-03-22 12:35:54 +0000
@@ -1,7 +1,7 @@
-duplicity.globals module
+duplicity.config module
 ========================
 
-.. automodule:: duplicity.globals
+.. automodule:: duplicity.config
    :members:
    :undoc-members:
    :show-inheritance:

=== modified file 'docs/duplicity.rst'
--- docs/duplicity.rst	2020-02-01 21:33:23 +0000
+++ docs/duplicity.rst	2020-03-22 12:35:54 +0000
@@ -17,6 +17,7 @@
    duplicity.backend
    duplicity.cached_ops
    duplicity.commandline
+   duplicity.config
    duplicity.diffdir
    duplicity.dup_collections
    duplicity.dup_main
@@ -26,7 +27,6 @@
    duplicity.errors
    duplicity.file_naming
    duplicity.filechunkio
-   duplicity.globals
    duplicity.globmatch
    duplicity.gpg
    duplicity.gpginterface

=== modified file 'duplicity/__init__.py'
--- duplicity/__init__.py	2020-03-19 19:07:30 +0000
+++ duplicity/__init__.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -27,4 +27,4 @@
 if sys.version_info.major >= 3:
     gettext.install(u'duplicity', names=[u'ngettext'])
 else:
-    gettext.install(u'duplicity', names=[u'ngettext'], unicode=True)
+    gettext.install(u'duplicity', names=[u'ngettext'], unicode=True)  # pylint: disable=unexpected-keyword-arg

=== modified file 'duplicity/asyncscheduler.py'
--- duplicity/asyncscheduler.py	2020-01-10 16:35:50 +0000
+++ duplicity/asyncscheduler.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>

=== modified file 'duplicity/backend.py'
--- duplicity/backend.py	2020-01-21 13:24:32 +0000
+++ duplicity/backend.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -37,17 +37,15 @@
 import re
 import getpass
 import re
-import types
 import urllib.request  # pylint: disable=import-error
 import urllib.parse  # pylint: disable=import-error
 import urllib.error  # pylint: disable=import-error
 
 from duplicity import dup_temp
 from duplicity import file_naming
-from duplicity import globals
+from duplicity import config
 from duplicity import log
 from duplicity import path
-from duplicity import progress
 from duplicity import util
 
 from duplicity.util import exception_traceback
@@ -220,7 +218,7 @@
 
     Raise InvalidBackendURL if the URL is not a valid URL.
     """
-    if globals.use_gio:
+    if config.use_gio:
         url_string = u'gio+' + url_string
     obj = get_backend_object(url_string)
     if obj:
@@ -370,8 +368,8 @@
     def outer_retry(fn):
         def inner_retry(self, *args):
             global _last_exception
-            errors_fatal, errors_default = globals.are_errors_fatal.get(operation, (True, None))
-            for n in range(1, globals.num_retries + 1):
+            errors_fatal, errors_default = config.are_errors_fatal.get(operation, (True, None))
+            for n in range(1, config.num_retries + 1):
                 try:
                     return fn(self, *args)
                 except FatalBackendException as e:
@@ -391,7 +389,7 @@
                         # retry on anything else
                         log.Debug(_(u"Backtrace of previous error: %s")
                                   % exception_traceback())
-                        at_end = n == globals.num_retries
+                        at_end = n == config.num_retries
                         code = _get_code_from_exception(self.backend, operation, e)
                         if code == log.ErrorCode.backend_not_found:
                             # If we tried to do something, but the file just isn't there,
@@ -413,9 +411,9 @@
                                      % (n, e.__class__.__name__, util.uexc(e)))
                         if not at_end:
                             if isinstance(e, TemporaryLoadException):
-                                time.sleep(3 * globals.backend_retry_delay)  # wait longer before trying again
+                                time.sleep(3 * config.backend_retry_delay)  # wait longer before trying again
                             else:
-                                time.sleep(globals.backend_retry_delay)  # wait a bit before trying again
+                                time.sleep(config.backend_retry_delay)  # wait a bit before trying again
                             if hasattr(self.backend, u'_retry_cleanup'):
                                 self.backend._retry_cleanup()
 

=== modified file 'duplicity/backends/__init__.py'
--- duplicity/backends/__init__.py	2018-07-23 14:55:39 +0000
+++ duplicity/backends/__init__.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>

=== modified file 'duplicity/backends/_boto_multi.py'
--- duplicity/backends/_boto_multi.py	2020-03-03 12:46:33 +0000
+++ duplicity/backends/_boto_multi.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -34,10 +34,10 @@
 import time
 import traceback
 
-from duplicity import globals
+from duplicity import config
 from duplicity import log
 from duplicity import progress
-from duplicity.errors import *  # @UnusedWildImport
+from duplicity.errors import *  # pylint: disable=unused-wildcard-import
 from duplicity.filechunkio import FileChunkIO
 
 from ._boto_single import BotoBackend as BotoSingleBackend
@@ -101,7 +101,7 @@
         self._setup_pool()
 
     def _setup_pool(self):
-        number_of_procs = globals.s3_multipart_max_procs
+        number_of_procs = config.s3_multipart_max_procs
         if not number_of_procs:
             number_of_procs = psutil.cpu_count(logical=False)
 
@@ -124,16 +124,16 @@
     def upload(self, filename, key, headers=None):
         import boto  # pylint: disable=import-error
 
-        chunk_size = globals.s3_multipart_chunk_size
+        chunk_size = config.s3_multipart_chunk_size
 
         # Check minimum chunk size for S3
-        if chunk_size < globals.s3_multipart_minimum_chunk_size:
+        if chunk_size < config.s3_multipart_minimum_chunk_size:
             log.Warn(u"Minimum chunk size is %d, but %d specified." % (
-                globals.s3_multipart_minimum_chunk_size, chunk_size))
-            chunk_size = globals.s3_multipart_minimum_chunk_size
+                config.s3_multipart_minimum_chunk_size, chunk_size))
+            chunk_size = config.s3_multipart_minimum_chunk_size
 
         # Decide in how many chunks to upload
-        bytes = os.path.getsize(filename)
+        bytes = os.path.getsize(filename)  # pylint: disable=redefined-builtin
         if bytes < chunk_size:
             chunks = 1
         else:
@@ -143,12 +143,12 @@
 
         log.Debug(u"Uploading %d bytes in %d chunks" % (bytes, chunks))
 
-        mp = self.bucket.initiate_multipart_upload(key.key, headers, encrypt_key=globals.s3_use_sse)
+        mp = self.bucket.initiate_multipart_upload(key.key, headers, encrypt_key=config.s3_use_sse)
 
         # Initiate a queue to share progress data between the pool
         # workers and a consumer thread, that will collect and report
         queue = None
-        if globals.progress:
+        if config.progress:
             manager = multiprocessing.Manager()
             queue = manager.Queue()
             consumer = ConsumerThread(queue, bytes)
@@ -157,14 +157,14 @@
         for n in range(chunks):
             storage_uri = boto.storage_uri(self.boto_uri_str)
             params = [self.scheme, self.parsed_url, storage_uri, self.bucket_name,
-                      mp.id, filename, n, chunk_size, globals.num_retries,
+                      mp.id, filename, n, chunk_size, config.num_retries,
                       queue]
             tasks.append(self._pool.apply_async(multipart_upload_worker, params))
 
         log.Debug(u"Waiting for the pool to finish processing %s tasks" % len(tasks))
         while tasks:
             try:
-                tasks[0].wait(timeout=globals.s3_multipart_max_timeout)
+                tasks[0].wait(timeout=config.s3_multipart_max_timeout)
                 if tasks[0].ready():
                     if tasks[0].successful():
                         del tasks[0]
@@ -183,7 +183,7 @@
         log.Debug(u"Done waiting for the pool to finish processing")
 
         # Terminate the consumer thread, if any
-        if globals.progress:
+        if config.progress:
             consumer.finish = True
             consumer.join()
 
@@ -195,7 +195,7 @@
 
 
 def multipart_upload_worker(scheme, parsed_url, storage_uri, bucket_name, multipart_id,
-                            filename, offset, bytes, num_retries, queue):
+                            filename, offset, bytes, num_retries, queue):  # pylint: disable=redefined-builtin
     u"""
     Worker method for uploading a file chunk to S3 using multipart upload.
     Note that the file chunk is read into memory, so it's important to keep

=== modified file 'duplicity/backends/_boto_single.py'
--- duplicity/backends/_boto_single.py	2019-11-09 22:22:05 +0000
+++ duplicity/backends/_boto_single.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -25,7 +25,7 @@
 import time
 
 import duplicity.backend
-from duplicity import globals
+from duplicity import config
 from duplicity import log
 from duplicity.errors import FatalBackendException, BackendException
 from duplicity import progress
@@ -72,7 +72,7 @@
             cfs_supported = False
             calling_format = None
 
-        if globals.s3_use_new_style:
+        if config.s3_use_new_style:
             if cfs_supported:
                 calling_format = SubdomainCallingFormat()
             else:
@@ -95,11 +95,11 @@
 
     if not parsed_url.hostname:
         # Use the default host.
-        conn = storage_uri.connect(is_secure=(not globals.s3_unencrypted_connection))
+        conn = storage_uri.connect(is_secure=(not config.s3_unencrypted_connection))
     else:
         assert scheme == u's3'
         conn = storage_uri.connect(host=parsed_url.hostname, port=parsed_url.port,
-                                   is_secure=(not globals.s3_unencrypted_connection))
+                                   is_secure=(not config.s3_unencrypted_connection))
 
     if hasattr(conn, u'calling_format'):
         if calling_format is None:
@@ -166,7 +166,7 @@
         # boto uses scheme://bucket[/name] and specifies hostname on connect()
         self.boto_uri_str = u'://'.join((parsed_url.scheme[:2],
                                          parsed_url.path.lstrip(u'/')))
-        if globals.s3_european_buckets:
+        if config.s3_european_buckets:
             self.my_location = Location.EU
         else:
             self.my_location = u''
@@ -206,8 +206,8 @@
     def _put(self, source_path, remote_filename):
         remote_filename = util.fsdecode(remote_filename)
 
-        if globals.s3_european_buckets:
-            if not globals.s3_use_new_style:
+        if config.s3_european_buckets:
+            if not config.s3_use_new_style:
                 raise FatalBackendException(u"European bucket creation was requested, but not new-style "
                                             u"bucket addressing (--s3-use-new-style)",
                                             code=log.ErrorCode.s3_bucket_not_style)
@@ -224,25 +224,25 @@
 
         key = self.bucket.new_key(self.key_prefix + remote_filename)
 
-        if globals.s3_use_rrs:
+        if config.s3_use_rrs:
             storage_class = u'REDUCED_REDUNDANCY'
-        elif globals.s3_use_ia:
+        elif config.s3_use_ia:
             storage_class = u'STANDARD_IA'
-        elif globals.s3_use_onezone_ia:
+        elif config.s3_use_onezone_ia:
             storage_class = u'ONEZONE_IA'
-        elif globals.s3_use_glacier and u"manifest" not in remote_filename:
+        elif config.s3_use_glacier and u"manifest" not in remote_filename:
             storage_class = u'GLACIER'
         else:
             storage_class = u'STANDARD'
         log.Info(u"Uploading %s/%s to %s Storage" % (self.straight_url, remote_filename, storage_class))
-        if globals.s3_use_sse:
+        if config.s3_use_sse:
             headers = {
                 u'Content-Type': u'application/octet-stream',
                 u'x-amz-storage-class': storage_class,
                 u'x-amz-server-side-encryption': u'AES256'
             }
-        elif globals.s3_use_sse_kms:
-            if globals.s3_kms_key_id is None:
+        elif config.s3_use_sse_kms:
+            if config.s3_kms_key_id is None:
                 raise FatalBackendException(u"S3 USE SSE KMS was requested, but key id not provided "
                                             u"require (--s3-kms-key-id)",
                                             code=log.ErrorCode.s3_kms_no_id)
@@ -250,10 +250,10 @@
                 u'Content-Type': u'application/octet-stream',
                 u'x-amz-storage-class': storage_class,
                 u'x-amz-server-side-encryption': u'aws:kms',
-                u'x-amz-server-side-encryption-aws-kms-key-id': globals.s3_kms_key_id
+                u'x-amz-server-side-encryption-aws-kms-key-id': config.s3_kms_key_id
             }
-            if globals.s3_kms_grant is not None:
-                headers[u'x-amz-grant-full-control'] = globals.s3_kms_grant
+            if config.s3_kms_grant is not None:
+                headers[u'x-amz-grant-full-control'] = config.s3_kms_grant
         else:
             headers = {
                 u'Content-Type': u'application/octet-stream',
@@ -317,7 +317,7 @@
     def upload(self, filename, key, headers):
         key.set_contents_from_filename(filename, headers,
                                        cb=progress.report_transfer,
-                                       num_cb=(max(2, 8 * globals.volsize / (1024 * 1024)))
+                                       num_cb=(max(2, 8 * config.volsize / (1024 * 1024)))
                                        )  # Max num of callbacks = 8 times x megabyte
         key.close()
 

=== modified file 'duplicity/backends/_cf_cloudfiles.py'
--- duplicity/backends/_cf_cloudfiles.py	2018-11-29 19:00:15 +0000
+++ duplicity/backends/_cf_cloudfiles.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2009 Eric EJ Johnson <ej.johnson@xxxxxxxxxxxxx>
 #
@@ -70,7 +70,7 @@
                            log.ErrorCode.connection_failed)
         self.container = conn.create_container(container)
 
-    def _error_code(self, operation, e):
+    def _error_code(self, operation, e):  # pylint: disable=unused-argument
         if isinstance(e, NoSuchObject):
             return log.ErrorCode.backend_not_found
         elif isinstance(e, self.resp_exc):

=== modified file 'duplicity/backends/_cf_pyrax.py'
--- duplicity/backends/_cf_pyrax.py	2018-11-29 19:00:15 +0000
+++ duplicity/backends/_cf_pyrax.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2013 J.P. Krauss <jkrauss@xxxxxxxxxxxxx>
 #
@@ -91,7 +91,7 @@
                                u"Please check your credentials and permissions.",
                                log.ErrorCode.backend_permission_denied)
 
-    def _error_code(self, operation, e):
+    def _error_code(self, operation, e):  # pylint: disable=unused-argument
         if isinstance(e, self.nso_exc):
             return log.ErrorCode.backend_not_found
         elif isinstance(e, self.client_exc):

=== modified file 'duplicity/backends/adbackend.py'
--- duplicity/backends/adbackend.py	2019-08-08 19:31:58 +0000
+++ duplicity/backends/adbackend.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2016 Stefan Breunig <stefan-duplicity@xxxxxxxxxxx>
 # Based on the backend onedrivebackend.py
@@ -31,7 +31,7 @@
 
 import duplicity.backend
 from duplicity.errors import BackendException
-from duplicity import globals
+from duplicity import config
 from duplicity import log
 
 
@@ -67,7 +67,7 @@
         self.backup_target_id = None
         self.backup_target = parsed_url.path.lstrip(u'/')
 
-        if globals.volsize > (10 * 1024 * 1024 * 1024):
+        if config.volsize > (10 * 1024 * 1024 * 1024):
             # https://forums.developer.amazon.com/questions/22713/file-size-limits.html
             # https://forums.developer.amazon.com/questions/22038/support-for-chunked-transfer-encoding.html
             log.FatalError(
@@ -320,8 +320,8 @@
             log.Info(u'%s upload failed with timeout status code=%d. Speculatively '
                      u'waiting for %d seconds to see if Amazon Drive finished the '
                      u'upload anyway' % (remote_filename, response.status_code,
-                                         globals.timeout))
-            tries = globals.timeout / 15
+                                         config.timeout))
+            tries = config.timeout / 15
             while tries >= 0:
                 tries -= 1
                 time.sleep(15)

=== modified file 'duplicity/backends/azurebackend.py'
--- duplicity/backends/azurebackend.py	2019-08-21 12:49:40 +0000
+++ duplicity/backends/azurebackend.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2013 Matthieu Huin <mhu@xxxxxxxxxxxx>
 # Copyright 2015 Scott McKenzie <noizyland@xxxxxxxxx>
@@ -23,7 +23,7 @@
 import os
 
 import duplicity.backend
-from duplicity import globals
+from duplicity import config
 from duplicity import log
 from duplicity.errors import BackendException
 from duplicity.util import fsdecode
@@ -86,23 +86,23 @@
             raise BackendException(
                 u'Neither AZURE_ACCOUNT_KEY nor AZURE_SHARED_ACCESS_SIGNATURE environment variable not set.')
 
-        if globals.azure_max_single_put_size:
+        if config.azure_max_single_put_size:
             # check if we use azure-storage>=0.30.0
             try:
                 _ = self.blob_service.MAX_SINGLE_PUT_SIZE
-                self.blob_service.MAX_SINGLE_PUT_SIZE = globals.azure_max_single_put_size
+                self.blob_service.MAX_SINGLE_PUT_SIZE = config.azure_max_single_put_size
             # fallback for azure-storage<0.30.0
             except AttributeError:
-                self.blob_service._BLOB_MAX_DATA_SIZE = globals.azure_max_single_put_size
+                self.blob_service._BLOB_MAX_DATA_SIZE = config.azure_max_single_put_size
 
-        if globals.azure_max_block_size:
+        if config.azure_max_block_size:
             # check if we use azure-storage>=0.30.0
             try:
                 _ = self.blob_service.MAX_BLOCK_SIZE
-                self.blob_service.MAX_BLOCK_SIZE = globals.azure_max_block_size
+                self.blob_service.MAX_BLOCK_SIZE = config.azure_max_block_size
             # fallback for azure-storage<0.30.0
             except AttributeError:
-                self.blob_service._BLOB_MAX_CHUNK_DATA_SIZE = globals.azure_max_block_size
+                self.blob_service._BLOB_MAX_CHUNK_DATA_SIZE = config.azure_max_block_size
 
     def _create_container(self):
         try:
@@ -118,8 +118,8 @@
     def _put(self, source_path, remote_filename):
         remote_filename = fsdecode(remote_filename)
         kwargs = {}
-        if globals.azure_max_connections:
-            kwargs[u'max_connections'] = globals.azure_max_connections
+        if config.azure_max_connections:
+            kwargs[u'max_connections'] = config.azure_max_connections
 
         # https://azure.microsoft.com/en-us/documentation/articles/storage-python-how-to-use-blob-storage/#upload-a-blob-into-a-container
         try:
@@ -130,9 +130,9 @@
         self._set_tier(remote_filename)
 
     def _set_tier(self, remote_filename):
-        if globals.azure_blob_tier is not None:
+        if config.azure_blob_tier is not None:
             try:
-                self.blob_service.set_standard_blob_tier(self.container, remote_filename, globals.azure_blob_tier)
+                self.blob_service.set_standard_blob_tier(self.container, remote_filename, config.azure_blob_tier)
             except AttributeError:  # might not be available in old API
                 pass
 
@@ -165,7 +165,7 @@
             info = {u'size': int(prop[u'content-length'])}
         return info
 
-    def _error_code(self, operation, e):
+    def _error_code(self, operation, e):  # pylint: disable=unused-argument
         if isinstance(e, self.AzureMissingResourceError):
             return log.ErrorCode.backend_not_found
 

=== modified file 'duplicity/backends/b2backend.py'
--- duplicity/backends/b2backend.py	2019-11-16 17:15:49 +0000
+++ duplicity/backends/b2backend.py	2020-03-22 12:35:54 +0000
@@ -1,3 +1,4 @@
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright (c) 2015 Matthew Bentley
 #
@@ -22,19 +23,17 @@
 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
 # THE SOFTWARE.
 
-from builtins import object
 from future import standard_library
 standard_library.install_aliases()
+from builtins import object
 
-import os
-import hashlib
 from urllib.parse import quote_plus  # pylint: disable=import-error
 
-import duplicity.backend
-from duplicity.errors import BackendException, FatalBackendException
 from duplicity import log
 from duplicity import progress
 from duplicity import util
+from duplicity.errors import BackendException, FatalBackendException
+import duplicity.backend
 
 
 class B2ProgressListener(object):

=== modified file 'duplicity/backends/cfbackend.py'
--- duplicity/backends/cfbackend.py	2018-07-23 14:55:39 +0000
+++ duplicity/backends/cfbackend.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2013 Kenneth Loafman
 #
@@ -19,10 +19,10 @@
 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 
 import duplicity.backend
-from duplicity import globals
+from duplicity import config
 
-if (globals.cf_backend and
-        globals.cf_backend.lower().strip() == u'pyrax'):
+if (config.cf_backend and
+        config.cf_backend.lower().strip() == u'pyrax'):
     from ._cf_pyrax import PyraxBackend as CFBackend
 else:
     from ._cf_cloudfiles import CloudFilesBackend as CFBackend

=== modified file 'duplicity/backends/dpbxbackend.py'
--- duplicity/backends/dpbxbackend.py	2019-08-08 19:31:58 +0000
+++ duplicity/backends/dpbxbackend.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2013 jno <jno@xxxxxxxxx>
 # Copyright 2016 Dmitry Nezhevenko <dion@xxxxxxxxxxx>
@@ -41,11 +41,10 @@
 import urllib.parse  # pylint: disable=import-error
 import urllib.error  # pylint: disable=import-error
 
-from duplicity import log, globals
+from duplicity import log, config
 from duplicity import progress
 from duplicity.errors import BackendException
-from duplicity.globals import num_retries
-from requests.exceptions import ConnectionError
+from requests.exceptions import ConnectionError  # pylint: disable=redefined-builtin
 import duplicity.backend
 
 # This is chunk size for upload using Dpbx chumked API v2. It doesn't
@@ -71,7 +70,7 @@
     f.close()
 
 
-def command(login_required=True):
+def command(login_required=True):  # pylint: disable=unused-argument
     u"""a decorator for handling authentication and exceptions"""
     def decorate(f):
         def wrapper(self, *args):
@@ -194,7 +193,7 @@
         log.Info(u"dpbx: Successfully authenticated as %s" %
                  self.api_account.name.display_name)
 
-    def _error_code(self, operation, e):
+    def _error_code(self, operation, e):  # pylint: disable=unused-argument
         if isinstance(e, ApiError):
             err = e.error
 
@@ -269,7 +268,7 @@
 
             requested_offset = None
             current_chunk_size = DPBX_UPLOAD_CHUNK_SIZE
-            retry_number = globals.num_retries
+            retry_number = config.num_retries
             is_eof = False
 
             # We're doing our own error handling and retrying logic because
@@ -291,7 +290,7 @@
                     # reset temporary status variables
                     requested_offset = None
                     current_chunk_size = DPBX_UPLOAD_CHUNK_SIZE
-                    retry_number = globals.num_retries
+                    retry_number = config.num_retries
 
                     if not is_eof:
                         assert len(buf) != 0

=== modified file 'duplicity/backends/gdocsbackend.py'
--- duplicity/backends/gdocsbackend.py	2020-01-16 13:38:32 +0000
+++ duplicity/backends/gdocsbackend.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2011 Carlos Abalde <carlos.abalde@xxxxxxxxx>
 #
@@ -155,7 +155,7 @@
                 u'http://www.google.com/support/accounts/bin/static.py?page=guide.cs&guide=1056283&topic=1056286 '
                 u'and create your application-specific password to run duplicity backups.')
 
-    def _fetch_entries(self, folder_id, type, title=None):
+    def _fetch_entries(self, folder_id, type, title=None):  # pylint: disable=redefined-builtin
         # Build URI.
         uri = u'/feeds/default/private/full/%s/contents' % folder_id
         if type == u'folder':

=== modified file 'duplicity/backends/giobackend.py'
--- duplicity/backends/giobackend.py	2020-01-06 17:13:39 +0000
+++ duplicity/backends/giobackend.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2009 Michael Terry <mike@xxxxxxxxxxx>
 #
@@ -50,8 +50,8 @@
        URLs look like schema://user@server/path.
     """
     def __init__(self, parsed_url):
-        from gi.repository import Gio  # @UnresolvedImport  # pylint: disable=import-error
-        from gi.repository import GLib  # @UnresolvedImport  # pylint: disable=import-error
+        from gi.repository import Gio   # pylint: disable=import-error
+        from gi.repository import GLib   # pylint: disable=import-error
 
         class DupMountOperation(Gio.MountOperation):
             u"""A simple MountOperation that grabs the password from the environment
@@ -99,8 +99,8 @@
                 raise
 
     def __done_with_mount(self, fileobj, result, loop):
-        from gi.repository import Gio  # @UnresolvedImport  # pylint: disable=import-error
-        from gi.repository import GLib  # @UnresolvedImport  # pylint: disable=import-error
+        from gi.repository import Gio   # pylint: disable=import-error
+        from gi.repository import GLib   # pylint: disable=import-error
         try:
             fileobj.mount_enclosing_volume_finish(result)
         except GLib.GError as e:
@@ -114,7 +114,7 @@
         pass
 
     def __copy_file(self, source, target):
-        from gi.repository import Gio  # @UnresolvedImport  # pylint: disable=import-error
+        from gi.repository import Gio   # pylint: disable=import-error
         # Don't pass NOFOLLOW_SYMLINKS here. Some backends (e.g. google-drive:)
         # use symlinks internally for all files. In the normal course of
         # events, we never deal with symlinks anyway, just tarballs.
@@ -123,8 +123,8 @@
                     None, self.__copy_progress, None)
 
     def _error_code(self, operation, e):
-        from gi.repository import Gio  # @UnresolvedImport  # pylint: disable=import-error
-        from gi.repository import GLib  # @UnresolvedImport  # pylint: disable=import-error
+        from gi.repository import Gio   # pylint: disable=import-error
+        from gi.repository import GLib   # pylint: disable=import-error
         if isinstance(e, GLib.GError):
             if e.code == Gio.IOErrorEnum.FAILED and operation == u'delete':
                 # Sometimes delete will return a generic failure on a file not
@@ -138,19 +138,19 @@
                 return log.ErrorCode.backend_no_space
 
     def _put(self, source_path, remote_filename):
-        from gi.repository import Gio  # @UnresolvedImport  # pylint: disable=import-error
+        from gi.repository import Gio   # pylint: disable=import-error
         source_file = Gio.File.new_for_path(source_path.name)
         target_file = self.remote_file.get_child_for_display_name(util.fsdecode(remote_filename))
         self.__copy_file(source_file, target_file)
 
     def _get(self, filename, local_path):
-        from gi.repository import Gio  # @UnresolvedImport  # pylint: disable=import-error
+        from gi.repository import Gio   # pylint: disable=import-error
         source_file = self.remote_file.get_child_for_display_name(util.fsdecode(filename))
         target_file = Gio.File.new_for_path(local_path.name)
         self.__copy_file(source_file, target_file)
 
     def _list(self):
-        from gi.repository import Gio  # @UnresolvedImport  # pylint: disable=import-error
+        from gi.repository import Gio   # pylint: disable=import-error
         files = []
         # We grab display name, rather than file name because some backends
         # (e.g. google-drive:) use filesystem-specific IDs as file names and
@@ -170,7 +170,7 @@
         target_file.delete(None)
 
     def _query(self, filename):
-        from gi.repository import Gio  # @UnresolvedImport  # pylint: disable=import-error
+        from gi.repository import Gio   # pylint: disable=import-error
         target_file = self.remote_file.get_child_for_display_name(util.fsdecode(filename))
         info = target_file.query_info(Gio.FILE_ATTRIBUTE_STANDARD_SIZE,
                                       Gio.FileQueryInfoFlags.NONE, None)

=== modified file 'duplicity/backends/hsibackend.py'
--- duplicity/backends/hsibackend.py	2020-01-02 12:05:22 +0000
+++ duplicity/backends/hsibackend.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -50,7 +50,6 @@
         self.subprocess_popen(commandline)
 
     def _list(self):
-        import sys
         commandline = u'%s "ls -l %s"' % (hsi_command, self.remote_dir)
         l = self.subprocess_popen(commandline)[2]
         l = l.split(os.linesep.encode())[3:]

=== modified file 'duplicity/backends/hubicbackend.py'
--- duplicity/backends/hubicbackend.py	2018-11-29 19:00:15 +0000
+++ duplicity/backends/hubicbackend.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2013 J.P. Krauss <jkrauss@xxxxxxxxxxxxx>
 #

=== modified file 'duplicity/backends/imapbackend.py'
--- duplicity/backends/imapbackend.py	2019-05-25 19:43:53 +0000
+++ duplicity/backends/imapbackend.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -20,20 +20,21 @@
 # along with duplicity; if not, write to the Free Software Foundation,
 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 
-import sys
 from future import standard_library
 standard_library.install_aliases()
 from builtins import input
+
+import email
+import email.encoders
+import email.mime.multipart
+import getpass
 import imaplib
+import os
 import re
-import os
+import socket
+import sys
 import time
-import socket
-import io
-import getpass
-import email
-import email.encoders
-import email.mime.multipart
+
 from email.parser import Parser
 try:
     from email.policy import default  # pylint: disable=import-error
@@ -45,10 +46,10 @@
     import ssl
     socket.sslerror = ssl.SSLError
 
+from duplicity import config
+from duplicity import log
+from duplicity.errors import *  # pylint: disable=unused-wildcard-import
 import duplicity.backend
-from duplicity import globals
-from duplicity import log
-from duplicity.errors import *  # @UnusedWildImport
 
 
 class ImapBackend(duplicity.backend.Backend):
@@ -104,13 +105,13 @@
         self.remote_dir = re.sub(r'^/', r'', parsed_url.path, 1)
 
         #  Login
-        if (not(globals.imap_full_address)):
+        if (not(config.imap_full_address)):
             self.conn.login(self.username, self.password)
-            self.conn.select(globals.imap_mailbox)
+            self.conn.select(config.imap_mailbox)
             log.Info(u"IMAP connected")
         else:
             self.conn.login(self.username + u"@" + parsed_url.hostname, self.password)
-            self.conn.select(globals.imap_mailbox)
+            self.conn.select(config.imap_mailbox)
             log.Info(u"IMAP connected")
 
     def prepareBody(self, f, rname):
@@ -133,7 +134,7 @@
 
     def _put(self, source_path, remote_filename):
         f = source_path.open(u"rb")
-        allowedTimeout = globals.timeout
+        allowedTimeout = config.timeout
         if (allowedTimeout == 0):
             # Allow a total timeout of 1 day
             allowedTimeout = 2880
@@ -143,8 +144,8 @@
                 body = self.prepareBody(f, remote_filename)
                 # If we don't select the IMAP folder before
                 # append, the message goes into the INBOX.
-                self.conn.select(globals.imap_mailbox)
-                self.conn.append(globals.imap_mailbox, None, None, body.encode())
+                self.conn.select(config.imap_mailbox)
+                self.conn.append(config.imap_mailbox, None, None, body.encode())
                 break
             except (imaplib.IMAP4.abort, socket.error, socket.sslerror):
                 allowedTimeout -= 1
@@ -162,26 +163,26 @@
         log.Info(u"IMAP mail with '%s' subject stored" % remote_filename)
 
     def _get(self, remote_filename, local_path):
-        allowedTimeout = globals.timeout
+        allowedTimeout = config.timeout
         if (allowedTimeout == 0):
             # Allow a total timeout of 1 day
             allowedTimeout = 2880
         while allowedTimeout > 0:
             try:
-                self.conn.select(globals.imap_mailbox)
-                (result, list) = self.conn.search(None, u'Subject', remote_filename)
+                self.conn.select(config.imap_mailbox)
+                (result, flist) = self.conn.search(None, u'Subject', remote_filename)
                 if result != u"OK":
-                    raise Exception(list[0])
+                    raise Exception(flist[0])
 
                 # check if there is any result
-                if list[0] == u'':
+                if flist[0] == u'':
                     raise Exception(u"no mail with subject %s")
 
-                (result, list) = self.conn.fetch(list[0], u"(RFC822)")
+                (result, flist) = self.conn.fetch(flist[0], u"(RFC822)")
 
                 if result != u"OK":
-                    raise Exception(list[0])
-                rawbody = list[0][1]
+                    raise Exception(flist[0])
+                rawbody = flist[0][1]
 
                 p = Parser()
 
@@ -212,26 +213,26 @@
 
     def _list(self):
         ret = []
-        (result, list) = self.conn.select(globals.imap_mailbox)
+        (result, flist) = self.conn.select(config.imap_mailbox)
         if result != u"OK":
-            raise BackendException(list[0])
+            raise BackendException(flist[0])
 
         # Going to find all the archives which have remote_dir in the From
         # address
 
         # Search returns an error if you haven't selected an IMAP folder.
-        (result, list) = self.conn.search(None, u'FROM', self.remote_dir)
+        (result, flist) = self.conn.search(None, u'FROM', self.remote_dir)
         if result != u"OK":
-            raise Exception(list[0])
-        if list[0] == b'':
+            raise Exception(flist[0])
+        if flist[0] == b'':
             return ret
-        nums = list[0].strip().split(b" ")
-        set = b"%s:%s" % (nums[0], nums[-1])
-        (result, list) = self.conn.fetch(set, u"(BODY[HEADER])")
+        nums = flist[0].strip().split(b" ")
+        set = b"%s:%s" % (nums[0], nums[-1])  # pylint: disable=redefined-builtin
+        (result, flist) = self.conn.fetch(set, u"(BODY[HEADER])")
         if result != u"OK":
-            raise Exception(list[0])
+            raise Exception(flist[0])
 
-        for msg in list:
+        for msg in flist:
             if (len(msg) == 1):
                 continue
             if sys.version_info.major >= 3:
@@ -245,33 +246,33 @@
             if (not (header_from is None)):
                 if (re.compile(u"^" + self.remote_dir + u"$").match(header_from)):
                     ret.append(subj)
-                    log.Info(u"IMAP LIST: %s %s" % (subj, header_from))
+                    log.Info(u"IMAP flist: %s %s" % (subj, header_from))
         return ret
 
     def imapf(self, fun, *args):
-        (ret, list) = fun(*args)
+        (ret, flist) = fun(*args)
         if ret != u"OK":
-            raise Exception(list[0])
-        return list
+            raise Exception(flist[0])
+        return flist
 
     def delete_single_mail(self, i):
         self.imapf(self.conn.store, i, u"+FLAGS", u'\\DELETED')
 
     def expunge(self):
-        list = self.imapf(self.conn.expunge)
+        flist = self.imapf(self.conn.expunge)
 
     def _delete_list(self, filename_list):
         for filename in filename_list:
-            list = self.imapf(self.conn.search, None, u"(SUBJECT %s)" % filename)
-            list = list[0].split()
-            if len(list) > 0 and list[0] != u"":
-                self.delete_single_mail(list[0])
+            flist = self.imapf(self.conn.search, None, u"(SUBJECT %s)" % filename)
+            flist = flist[0].split()
+            if len(flist) > 0 and flist[0] != u"":
+                self.delete_single_mail(flist[0])
                 log.Notice(u"marked %s to be deleted" % filename)
         self.expunge()
         log.Notice(u"IMAP expunged %s files" % len(filename_list))
 
     def _close(self):
-        self.conn.select(globals.imap_mailbox)
+        self.conn.select(config.imap_mailbox)
         self.conn.close()
         self.conn.logout()
 

=== modified file 'duplicity/backends/jottacloudbackend.py'
--- duplicity/backends/jottacloudbackend.py	2019-11-21 15:34:26 +0000
+++ duplicity/backends/jottacloudbackend.py	2020-03-22 12:35:54 +0000
@@ -22,14 +22,13 @@
 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 
 # stdlib
+import logging
 import posixpath
-import locale
-import logging
 
-# import duplicity stuff # version 0.6
-import duplicity.backend
+# import duplicity stuff
 from duplicity import log
 from duplicity.errors import BackendException
+import duplicity.backend
 
 
 def get_jotta_device(jfs):

=== modified file 'duplicity/backends/lftpbackend.py'
--- duplicity/backends/lftpbackend.py	2020-01-02 12:05:22 +0000
+++ duplicity/backends/lftpbackend.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -39,7 +39,7 @@
     from pipes import quote as cmd_quote
 
 import duplicity.backend
-from duplicity import globals
+from duplicity import config
 from duplicity import log
 from duplicity import tempdir
 from duplicity import util
@@ -90,14 +90,14 @@
             self.password = self.get_password()
             self.authflag = u"-u '%s,%s'" % (self.username, self.password)
 
-        if globals.ftp_connection == u'regular':
+        if config.ftp_connection == u'regular':
             self.conn_opt = u'off'
         else:
             self.conn_opt = u'on'
 
         # check for cacert file if https
-        self.cacert_file = globals.ssl_cacert_file
-        if self.scheme == u'https' and not globals.ssl_no_check_certificate:
+        self.cacert_file = config.ssl_cacert_file
+        if self.scheme == u'https' and not config.ssl_no_check_certificate:
             cacert_candidates = [u"~/.duplicity/cacert.pem",
                                  u"~/duplicity_cacert.pem",
                                  u"/etc/duplicity/cacert.pem"]
@@ -113,11 +113,11 @@
         self.tempfd, self.tempname = tempdir.default().mkstemp()
         self.tempfile = os.fdopen(self.tempfd, u"w")
         self.tempfile.write(u"set ssl:verify-certificate " +
-                            (u"false" if globals.ssl_no_check_certificate else u"true") + u"\n")
+                            (u"false" if config.ssl_no_check_certificate else u"true") + u"\n")
         if self.cacert_file:
             self.tempfile.write(u"set ssl:ca-file " + cmd_quote(self.cacert_file) + u"\n")
-        if globals.ssl_cacert_path:
-            self.tempfile.write(u"set ssl:ca-path " + cmd_quote(globals.ssl_cacert_path) + u"\n")
+        if config.ssl_cacert_path:
+            self.tempfile.write(u"set ssl:ca-path " + cmd_quote(config.ssl_cacert_path) + u"\n")
         if self.parsed_url.scheme == u'ftps':
             self.tempfile.write(u"set ftp:ssl-allow true\n")
             self.tempfile.write(u"set ftp:ssl-protect-data true\n")
@@ -129,8 +129,8 @@
         else:
             self.tempfile.write(u"set ftp:ssl-allow false\n")
         self.tempfile.write(u"set http:use-propfind true\n")
-        self.tempfile.write(u"set net:timeout %s\n" % globals.timeout)
-        self.tempfile.write(u"set net:max-retries %s\n" % globals.num_retries)
+        self.tempfile.write(u"set net:timeout %s\n" % config.timeout)
+        self.tempfile.write(u"set net:max-retries %s\n" % config.num_retries)
         self.tempfile.write(u"set ftp:passive-mode %s\n" % self.conn_opt)
         if log.getverbosity() >= log.DEBUG:
             self.tempfile.write(u"debug\n")

=== modified file 'duplicity/backends/localbackend.py'
--- duplicity/backends/localbackend.py	2018-07-23 14:55:39 +0000
+++ duplicity/backends/localbackend.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -22,7 +22,6 @@
 import os
 
 import duplicity.backend
-from duplicity import log
 from duplicity import path
 from duplicity.errors import BackendException
 

=== modified file 'duplicity/backends/mediafirebackend.py'
--- duplicity/backends/mediafirebackend.py	2020-02-13 15:46:40 +0000
+++ duplicity/backends/mediafirebackend.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2016 Roman Yepishev <rye@xxxxxxxxxxxxxxx>
 #

=== modified file 'duplicity/backends/megabackend.py'
--- duplicity/backends/megabackend.py	2020-01-03 01:16:01 +0000
+++ duplicity/backends/megabackend.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2017 Tomas Vondra (Launchpad id: tomas-v)
 # Copyright 2017 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -22,11 +22,7 @@
 from __future__ import print_function
 from future import standard_library
 standard_library.install_aliases()
-from builtins import str
-from builtins import range
-from builtins import object
 
-from duplicity import log
 from duplicity import util
 from duplicity.errors import BackendException
 import duplicity.backend

=== modified file 'duplicity/backends/multibackend.py'
--- duplicity/backends/multibackend.py	2020-02-14 19:59:59 +0000
+++ duplicity/backends/multibackend.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2015 Steve Tynor <steve.tynor@xxxxxxxxx>
 # Copyright 2016 Thomas Harning Jr <harningt@xxxxxxxxx>
@@ -27,7 +27,6 @@
 standard_library.install_aliases()
 import os
 import os.path
-import string
 import sys
 import urllib.request  # pylint: disable=import-error
 import urllib.parse  # pylint: disable=import-error
@@ -36,7 +35,7 @@
 
 import duplicity.backend
 from duplicity.errors import BackendException
-from duplicity import globals
+from duplicity import config
 from duplicity import log
 from duplicity import util
 
@@ -243,7 +242,7 @@
         while True:
             store = stores[self.__write_cursor]
             try:
-                next = self.__write_cursor + 1
+                next = self.__write_cursor + 1  # pylint: disable=redefined-builtin
                 if (next > len(stores) - 1):
                     next = 0
                 log.Log(_(u"MultiBackend: _put: write to store #%s (%s)")
@@ -288,8 +287,8 @@
         stores = self._eligible_stores(remote_filename)
 
         for s in stores:
-            list = s.list()
-            if remote_filename in list:
+            flist = s.list()
+            if remote_filename in flist:
                 s.get(remote_filename, local_path)
                 return
             log.Log(_(u"MultiBackend: failed to get %s to %s from %s")
@@ -303,7 +302,7 @@
     def _list(self):
         lists = []
         for s in self.__stores:
-            globals.are_errors_fatal[u'list'] = (False, [])
+            config.are_errors_fatal[u'list'] = (False, [])
             l = s.list()
             log.Notice(_(u"MultiBackend: %s: %d files")
                        % (s.backend.parsed_url.url_string, len(l)))
@@ -333,8 +332,8 @@
         # before we try to delete
         # ENHANCEME: maintain a cached list for each store
         for s in stores:
-            list = s.list()
-            if filename in list:
+            flist = s.list()
+            if filename in flist:
                 if hasattr(s, u'_delete_list'):
                     s._do_delete_list([filename, ])
                 elif hasattr(s, u'_delete'):
@@ -350,7 +349,6 @@
             log.Log(_(u"MultiBackend: failed to delete %s. Tried all backing stores and none succeeded")
                     % (filename),
                     log.ERROR)
-#           raise BackendException("failed to delete")
 
 
 duplicity.backend.register_backend(u'multi', MultiBackend)

=== modified file 'duplicity/backends/ncftpbackend.py'
--- duplicity/backends/ncftpbackend.py	2020-01-02 12:05:22 +0000
+++ duplicity/backends/ncftpbackend.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -28,7 +28,7 @@
 import re
 
 import duplicity.backend
-from duplicity import globals
+from duplicity import config
 from duplicity import log
 from duplicity import tempdir
 from duplicity import util
@@ -82,7 +82,7 @@
 
         self.password = self.get_password()
 
-        if globals.ftp_connection == u'regular':
+        if config.ftp_connection == u'regular':
             self.conn_opt = u'-E'
         else:
             self.conn_opt = u'-F'
@@ -94,7 +94,7 @@
         self.tempfile.write(u"pass %s\n" % self.password)
         self.tempfile.close()
         self.flags = u"-f %s %s -t %s -o useCLNT=0,useHELP_SITE=0 " % \
-            (self.tempname, self.conn_opt, globals.timeout)
+            (self.tempname, self.conn_opt, config.timeout)
         if parsed_url.port is not None and parsed_url.port != 21:
             self.flags += u" -P '%s'" % (parsed_url.port)
 

=== modified file 'duplicity/backends/onedrivebackend.py'
--- duplicity/backends/onedrivebackend.py	2019-12-15 17:09:35 +0000
+++ duplicity/backends/onedrivebackend.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 # vim:tabstop=4:shiftwidth=4:expandtab
 #
 # Copyright 2014 Google Inc.
@@ -34,7 +34,7 @@
 
 import duplicity.backend
 from duplicity.errors import BackendException
-from duplicity import globals
+from duplicity import config
 from duplicity import log
 
 # For documentation on the API, see
@@ -98,7 +98,7 @@
                 u'You did not specify a path. '
                 u'Please specify a path, e.g. onedrive://duplicity_backups'))
 
-        if globals.volsize > (10 * 1024 * 1024 * 1024):
+        if config.volsize > (10 * 1024 * 1024 * 1024):
             raise BackendException((
                 u'Your --volsize is bigger than 10 GiB, which is the maximum '
                 u'file size on OneDrive.'))

=== modified file 'duplicity/backends/par2backend.py'
--- duplicity/backends/par2backend.py	2019-10-27 23:55:56 +0000
+++ duplicity/backends/par2backend.py	2020-03-22 12:35:54 +0000
@@ -1,3 +1,5 @@
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
+#
 # Copyright 2013 Germar Reitze <germar.reitze@xxxxxxxxx>
 #
 # This file is part of duplicity.
@@ -16,14 +18,12 @@
 # along with duplicity; if not, write to the Free Software Foundation,
 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 
-from builtins import filter
-
 import os
 import re
 from duplicity import backend
 from duplicity.errors import BackendException
 from duplicity import log
-from duplicity import globals
+from duplicity import config
 from duplicity import util
 
 
@@ -44,12 +44,12 @@
 
         self.parsed_url = parsed_url
         try:
-            self.redundancy = globals.par2_redundancy
+            self.redundancy = config.par2_redundancy
         except AttributeError:
             self.redundancy = 10
 
         try:
-            self.common_options = globals.par2_options + u" -q -q"
+            self.common_options = config.par2_options + u" -q -q"
         except AttributeError:
             self.common_options = u"-q -q"
 

=== modified file 'duplicity/backends/pcabackend.py'
--- duplicity/backends/pcabackend.py	2020-03-17 11:47:27 +0000
+++ duplicity/backends/pcabackend.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2013 Matthieu Huin <mhu@xxxxxxxxxxxx>
 # Copyright 2017 Xavier Lucas <xavier.lucas@xxxxxxxxxxxx>
@@ -144,7 +144,7 @@
             log.FatalError(u"Container '%s' exists but its storage policy is '%s' not '%s'."
                            % (self.container, container_metadata[policy_header.lower()], policy))
 
-    def _error_code(self, operation, e):  # pylint: disable: unused-argument
+    def _error_code(self, operation, e):  # pylint: disable= unused-argument
         if isinstance(e, self.resp_exc):
             if e.http_status == 404:
                 return log.ErrorCode.backend_not_found

=== modified file 'duplicity/backends/pydrivebackend.py'
--- duplicity/backends/pydrivebackend.py	2019-11-13 02:18:15 +0000
+++ duplicity/backends/pydrivebackend.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2015 Yigal Asnis
 #
@@ -18,13 +18,13 @@
 
 from builtins import next
 from builtins import str
-import string
+
 import os
 
-import duplicity.backend
 from duplicity import log
 from duplicity import util
 from duplicity.errors import BackendException
+import duplicity.backend
 
 
 class PyDriveBackend(duplicity.backend.Backend):
@@ -214,7 +214,7 @@
             size = int(drive_file[u'fileSize'])
         return {u'size': size}
 
-    def _error_code(self, operation, error):
+    def _error_code(self, operation, error):  # pylint: disable=unused-argument
         from pydrive.files import ApiRequestError, FileNotUploadedError  # pylint: disable=import-error
         if isinstance(error, FileNotUploadedError):
             return log.ErrorCode.backend_not_found

=== modified file 'duplicity/backends/pyrax_identity/__init__.py'
--- duplicity/backends/pyrax_identity/__init__.py	2014-12-12 14:39:54 +0000
+++ duplicity/backends/pyrax_identity/__init__.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>

=== modified file 'duplicity/backends/pyrax_identity/hubic.py'
--- duplicity/backends/pyrax_identity/hubic.py	2019-12-28 21:26:47 +0000
+++ duplicity/backends/pyrax_identity/hubic.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,5 @@
-# -*- coding: utf-8 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
+#
 # Copyright (c) 2014 Gu1
 # Licensed under the MIT license
 

=== modified file 'duplicity/backends/rclonebackend.py'
--- duplicity/backends/rclonebackend.py	2020-01-02 12:05:22 +0000
+++ duplicity/backends/rclonebackend.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2019 Francesco Magno
 # Copyright 2019 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -21,18 +21,14 @@
 
 from future import standard_library
 standard_library.install_aliases()
-from builtins import str
-from builtins import range
-from builtins import object
 
 import os
 import os.path
 
-import duplicity.backend
-from duplicity import path
 from duplicity import log
+from duplicity import util
 from duplicity.errors import BackendException
-from duplicity import util
+import duplicity.backend
 
 
 class RcloneBackend(duplicity.backend.Backend):

=== modified file 'duplicity/backends/rsyncbackend.py'
--- duplicity/backends/rsyncbackend.py	2020-01-02 12:05:22 +0000
+++ duplicity/backends/rsyncbackend.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -27,7 +27,7 @@
 
 import duplicity.backend
 from duplicity.errors import InvalidBackendURL
-from duplicity import globals, tempdir, util
+from duplicity import config, tempdir, util
 
 
 class RsyncBackend(duplicity.backend.Backend):
@@ -88,8 +88,8 @@
         if self.over_rsyncd():
             portOption = port
         else:
-            portOption = u"-e 'ssh %s -oBatchMode=yes %s'" % (port, globals.ssh_options)
-        rsyncOptions = globals.rsync_options
+            portOption = u"-e 'ssh %s -oBatchMode=yes %s'" % (port, config.ssh_options)
+        rsyncOptions = config.rsync_options
         # build cmd
         self.cmd = u"rsync %s %s" % (portOption, rsyncOptions)
 
@@ -121,7 +121,7 @@
         self.subprocess_popen(commandline)
 
     def _list(self):
-        def split(str):
+        def split(str):  # pylint: disable=redefined-builtin
             line = str.split()
             if len(line) > 4 and line[4] != u'.':
                 return line[4]
@@ -140,7 +140,7 @@
             else:
                 dont_delete_list.append(file)
 
-        dir = tempfile.mkdtemp()
+        dir = tempfile.mkdtemp()  # pylint: disable=redefined-builtin
         exclude, exclude_name = tempdir.default().mkstemp_file()
         to_delete = [exclude_name]
         for file in dont_delete_list:

=== modified file 'duplicity/backends/s3_boto3_backend.py'
--- duplicity/backends/s3_boto3_backend.py	2019-12-06 16:53:41 +0000
+++ duplicity/backends/s3_boto3_backend.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -21,7 +21,7 @@
 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 
 import duplicity.backend
-from duplicity import globals
+from duplicity import config
 from duplicity import log
 from duplicity.errors import FatalBackendException, BackendException
 from duplicity import util
@@ -108,31 +108,31 @@
         remote_filename = util.fsdecode(remote_filename)
         key = self.key_prefix + remote_filename
 
-        if globals.s3_use_rrs:
+        if config.s3_use_rrs:
             storage_class = u'REDUCED_REDUNDANCY'
-        elif globals.s3_use_ia:
+        elif config.s3_use_ia:
             storage_class = u'STANDARD_IA'
-        elif globals.s3_use_onezone_ia:
+        elif config.s3_use_onezone_ia:
             storage_class = u'ONEZONE_IA'
-        elif globals.s3_use_glacier and u"manifest" not in remote_filename:
+        elif config.s3_use_glacier and u"manifest" not in remote_filename:
             storage_class = u'GLACIER'
-        elif globals.s3_use_deep_archive and u"manifest" not in remote_filename:
+        elif config.s3_use_deep_archive and u"manifest" not in remote_filename:
             storage_class = u'DEEP_ARCHIVE'
         else:
             storage_class = u'STANDARD'
         extra_args = {u'StorageClass': storage_class}
 
-        if globals.s3_use_sse:
+        if config.s3_use_sse:
             extra_args[u'ServerSideEncryption'] = u'AES256'
-        elif globals.s3_use_sse_kms:
-            if globals.s3_kms_key_id is None:
+        elif config.s3_use_sse_kms:
+            if config.s3_kms_key_id is None:
                 raise FatalBackendException(u"S3 USE SSE KMS was requested, but key id not provided "
                                             u"require (--s3-kms-key-id)",
                                             code=log.ErrorCode.s3_kms_no_id)
             extra_args[u'ServerSideEncryption'] = u'aws:kms'
-            extra_args[u'SSEKMSKeyId'] = globals.s3_kms_key_id
-            if globals.s3_kms_grant:
-                extra_args[u'GrantFullControl'] = globals.s3_kms_grant
+            extra_args[u'SSEKMSKeyId'] = config.s3_kms_key_id
+            if config.s3_kms_grant:
+                extra_args[u'GrantFullControl'] = config.s3_kms_grant
 
         # Should the tracker be scoped to the put or the backend?
         # The put seems right to me, but the results look a little more correct
@@ -169,7 +169,6 @@
 
     def _query(self, remote_filename):
         import botocore  # pylint: disable=import-error
-        from botocore.exceptions import ClientError  # pylint: disable=import-error
 
         remote_filename = util.fsdecode(remote_filename)
         key = self.key_prefix + remote_filename

=== modified file 'duplicity/backends/s3_boto_backend.py'
--- duplicity/backends/s3_boto_backend.py	2020-02-06 15:27:43 +0000
+++ duplicity/backends/s3_boto_backend.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -21,9 +21,9 @@
 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 
 import duplicity.backend
-from duplicity import globals
+from duplicity import config
 
-if globals.s3_use_multiprocessing:
+if config.s3_use_multiprocessing:
     from ._boto_multi import BotoBackend
 else:
     from ._boto_single import BotoBackend

=== modified file 'duplicity/backends/ssh_paramiko_backend.py'
--- duplicity/backends/ssh_paramiko_backend.py	2020-03-15 11:52:38 +0000
+++ duplicity/backends/ssh_paramiko_backend.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -39,8 +39,7 @@
 from binascii import hexlify
 
 import duplicity.backend
-from duplicity import globals
-from duplicity import util
+from duplicity import config
 from duplicity.errors import BackendException
 
 global paramiko
@@ -204,7 +203,7 @@
             self.config.update({u'port': 22})
         # parse ssh options for alternative ssh private key, identity file
         m = re.search(r"^(?:.+\s+)?(?:-oIdentityFile=|-i\s+)(([\"'])([^\\2]+)\\2|[\S]+).*",
-                      globals.ssh_options)
+                      config.ssh_options)
         if (m is not None):
             keyfilename = m.group(3) if m.group(3) else m.group(1)
             self.config[u'identityfile'] = keyfilename
@@ -227,7 +226,7 @@
             self.config[u'identityfile'] = None
 
         # get password, enable prompt if askpass is set
-        self.use_getpass = globals.ssh_askpass
+        self.use_getpass = config.ssh_askpass
         # set url values for beautiful login prompt
         parsed_url.username = self.config[u'user']
         parsed_url.hostname = self.config[u'hostname']
@@ -246,7 +245,7 @@
                 self.config[u'user'],
                 self.config[u'hostname'],
                 self.config[u'port'], e))
-        self.client.get_transport().set_keepalive((int)(globals.timeout / 2))
+        self.client.get_transport().set_keepalive((int)(config.timeout / 2))
 
         self.scheme = duplicity.backend.strip_prefix(parsed_url.scheme,
                                                      u'paramiko')
@@ -300,7 +299,7 @@
             f = open(source_path.name, u'rb')
             try:
                 chan = self.client.get_transport().open_session()
-                chan.settimeout(globals.timeout)
+                chan.settimeout(config.timeout)
                 # scp in sink mode uses the arg as base directory
                 chan.exec_command(u"scp -t '%s'" % self.remote_dir)
             except Exception as e:
@@ -332,7 +331,7 @@
         if self.use_scp:
             try:
                 chan = self.client.get_transport().open_session()
-                chan.settimeout(globals.timeout)
+                chan.settimeout(config.timeout)
                 chan.exec_command(u"scp -f '%s/%s'" % (self.remote_dir,
                                                        remote_filename))
             except Exception as e:
@@ -398,7 +397,7 @@
         command and returns stdout of command. throws an exception if exit
         code!=0 and not ignored"""
         try:
-            ch_in, ch_out, ch_err = self.client.exec_command(cmd, -1, globals.timeout)
+            ch_in, ch_out, ch_err = self.client.exec_command(cmd, -1, config.timeout)
             output = ch_out.read(-1)
             return output
         except Exception as e:

=== modified file 'duplicity/backends/ssh_pexpect_backend.py'
--- duplicity/backends/ssh_pexpect_backend.py	2020-01-29 19:09:08 +0000
+++ duplicity/backends/ssh_pexpect_backend.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -29,15 +29,14 @@
 standard_library.install_aliases()
 from builtins import map
 
+import os
 import re
-import string
-import os
 
-import duplicity.backend
-from duplicity import globals
+from duplicity import config
 from duplicity import log
 from duplicity import util
 from duplicity.errors import BackendException
+import duplicity.backend
 
 
 class SSHPExpectBackend(duplicity.backend.Backend):
@@ -57,12 +56,12 @@
         self.retry_delay = 10
 
         self.scp_command = u"scp"
-        if globals.scp_command:
-            self.scp_command = globals.scp_command
+        if config.scp_command:
+            self.scp_command = config.scp_command
 
         self.sftp_command = u"sftp"
-        if globals.sftp_command:
-            self.sftp_command = globals.sftp_command
+        if config.sftp_command:
+            self.sftp_command = config.sftp_command
 
         self.scheme = duplicity.backend.strip_prefix(parsed_url.scheme, u'pexpect')
         self.use_scp = (self.scheme == u'scp')
@@ -81,22 +80,22 @@
         self.remote_prefix = self.remote_dir + u'/'
         # maybe use different ssh port
         if parsed_url.port:
-            globals.ssh_options = globals.ssh_options + u" -oPort=%s" % parsed_url.port
+            config.ssh_options = config.ssh_options + u" -oPort=%s" % parsed_url.port
         # set some defaults if user has not specified already.
-        if u"ServerAliveInterval" not in globals.ssh_options:
-            globals.ssh_options += u" -oServerAliveInterval=%d" % ((int)(globals.timeout / 2))
-        if u"ServerAliveCountMax" not in globals.ssh_options:
-            globals.ssh_options += u" -oServerAliveCountMax=2"
+        if u"ServerAliveInterval" not in config.ssh_options:
+            config.ssh_options += u" -oServerAliveInterval=%d" % ((int)(config.timeout / 2))
+        if u"ServerAliveCountMax" not in config.ssh_options:
+            config.ssh_options += u" -oServerAliveCountMax=2"
 
         # set up password
-        self.use_getpass = globals.ssh_askpass
+        self.use_getpass = config.ssh_askpass
         self.password = self.get_password()
 
     def run_scp_command(self, commandline):
         u""" Run an scp command, responding to password prompts """
         log.Info(u"Running '%s'" % commandline)
         child = pexpect.spawn(commandline, timeout=None)
-        if globals.ssh_askpass:
+        if config.ssh_askpass:
             state = u"authorizing"
         else:
             state = u"copying"
@@ -171,7 +170,7 @@
                      u"open(.*): Failure"]
         max_response_len = max([len(p) for p in responses[1:]])
         log.Info(u"Running '%s'" % (commandline))
-        child = pexpect.spawn(commandline, timeout=None, maxread=maxread, encoding=globals.fsencoding)
+        child = pexpect.spawn(commandline, timeout=None, maxread=maxread, encoding=config.fsencoding)
         cmdloc = 0
         passprompt = 0
         while 1:
@@ -239,13 +238,13 @@
                     u"rename \"%s.%s.part\" \"%s%s\"" %
                     (self.remote_prefix, remote_filename, self.remote_prefix, remote_filename)]
         commandline = (u"%s %s %s" % (self.sftp_command,
-                                      globals.ssh_options,
+                                      config.ssh_options,
                                       self.host_string))
         self.run_sftp_command(commandline, commands)
 
     def put_scp(self, source_path, remote_filename):
         commandline = u"%s %s %s %s:%s%s" % \
-            (self.scp_command, globals.ssh_options, source_path.uc_name, self.host_string,
+            (self.scp_command, config.ssh_options, source_path.uc_name, self.host_string,
              self.remote_prefix, remote_filename)
         self.run_scp_command(commandline)
 
@@ -260,13 +259,13 @@
         commands = [u"get \"%s%s\" \"%s\"" %
                     (self.remote_prefix, remote_filename, local_path.uc_name)]
         commandline = (u"%s %s %s" % (self.sftp_command,
-                                      globals.ssh_options,
+                                      config.ssh_options,
                                       self.host_string))
         self.run_sftp_command(commandline, commands)
 
     def get_scp(self, remote_filename, local_path):
         commandline = u"%s %s %s:%s%s %s" % \
-            (self.scp_command, globals.ssh_options, self.host_string, self.remote_prefix,
+            (self.scp_command, config.ssh_options, self.host_string, self.remote_prefix,
              remote_filename, local_path.uc_name)
         self.run_scp_command(commandline)
 
@@ -285,7 +284,7 @@
 
         commands = mkdir_commands + [u"ls -1"]
         commandline = (u"%s %s %s" % (self.sftp_command,
-                                      globals.ssh_options,
+                                      config.ssh_options,
                                       self.host_string))
 
         l = self.run_sftp_command(commandline, commands).split(u'\n')[1:]
@@ -295,7 +294,7 @@
     def _delete(self, filename):
         commands = [u"cd \"%s\"" % (self.remote_dir,)]
         commands.append(u"rm \"%s\"" % util.fsdecode(filename))
-        commandline = (u"%s %s %s" % (self.sftp_command, globals.ssh_options, self.host_string))
+        commandline = (u"%s %s %s" % (self.sftp_command, config.ssh_options, self.host_string))
         self.run_sftp_command(commandline, commands)
 
 

=== modified file 'duplicity/backends/swiftbackend.py'
--- duplicity/backends/swiftbackend.py	2019-10-28 15:43:01 +0000
+++ duplicity/backends/swiftbackend.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2013 Matthieu Huin <mhu@xxxxxxxxxxxx>
 #
@@ -22,7 +22,7 @@
 import os
 
 import duplicity.backend
-from duplicity import globals
+from duplicity import config
 from duplicity import log
 from duplicity import util
 from duplicity.errors import BackendException
@@ -112,7 +112,7 @@
         else:
             self.prefix = u''
 
-        policy = globals.swift_storage_policy
+        policy = config.swift_storage_policy
         policy_header = u'X-Storage-Policy'
 
         container_metadata = None
@@ -139,7 +139,7 @@
             log.FatalError(u"Container '%s' exists but its storage policy is '%s' not '%s'."
                            % (self.container, container_metadata[policy_header.lower()], policy))
 
-    def _error_code(self, operation, e):
+    def _error_code(self, operation, e):  # pylint: disable=unused-argument
         if isinstance(e, self.resp_exc):
             if e.http_status == 404:
                 return log.ErrorCode.backend_not_found

=== modified file 'duplicity/backends/sxbackend.py'
--- duplicity/backends/sxbackend.py	2020-01-02 12:05:22 +0000
+++ duplicity/backends/sxbackend.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2014 Andrea Grandi <a.grandi@xxxxxxxxx>
 #

=== modified file 'duplicity/backends/tahoebackend.py'
--- duplicity/backends/tahoebackend.py	2020-01-02 12:05:22 +0000
+++ duplicity/backends/tahoebackend.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2008 Francois Deppierraz
 #
@@ -18,10 +18,9 @@
 # along with duplicity; if not, write to the Free Software Foundation,
 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 
-import duplicity.backend
 from duplicity import log
 from duplicity import util
-from duplicity.errors import BackendException
+import duplicity.backend
 
 
 class TAHOEBackend(duplicity.backend.Backend):

=== modified file 'duplicity/backends/webdavbackend.py'
--- duplicity/backends/webdavbackend.py	2019-10-05 18:31:58 +0000
+++ duplicity/backends/webdavbackend.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -36,7 +36,7 @@
 import xml.dom.minidom
 
 import duplicity.backend
-from duplicity import globals
+from duplicity import config
 from duplicity import log
 from duplicity import util
 from duplicity.errors import BackendException, FatalBackendException
@@ -67,7 +67,7 @@
 
         http.client.HTTPSConnection.__init__(self, *args, **kwargs)
 
-        self.cacert_file = globals.ssl_cacert_file
+        self.cacert_file = config.ssl_cacert_file
         self.cacert_candidates = [u"~/.duplicity/cacert.pem",
                                   u"~/duplicity_cacert.pem",
                                   u"/etc/duplicity/cacert.pem"]
@@ -96,11 +96,11 @@
         if u"create_default_context" in dir(ssl):
             context = ssl.create_default_context(ssl.Purpose.SERVER_AUTH,
                                                  cafile=self.cacert_file,
-                                                 capath=globals.ssl_cacert_path)
+                                                 capath=config.ssl_cacert_path)
             self.sock = context.wrap_socket(sock, server_hostname=self.host)
         # the legacy way needing a cert file
         else:
-            if globals.ssl_cacert_path:
+            if config.ssl_cacert_path:
                 raise FatalBackendException(
                     _(u"Option '--ssl-cacert-path' is not supported "
                       u"with python 2.7.8 and below."))
@@ -152,7 +152,7 @@
         self.password = self.get_password()
         self.directory = self.sanitize_path(parsed_url.path)
 
-        log.Info(_(u"Using WebDAV protocol %s") % (globals.webdav_proto,))
+        log.Info(_(u"Using WebDAV protocol %s") % (config.webdav_proto,))
         log.Info(_(u"Using WebDAV host %s port %s") % (parsed_url.hostname,
                                                        parsed_url.port))
         log.Info(_(u"Using WebDAV directory %s") % (self.directory,))
@@ -192,7 +192,7 @@
         if self.parsed_url.scheme in [u'webdav', u'http']:
             self.conn = http.client.HTTPConnection(self.parsed_url.hostname, self.parsed_url.port)
         elif self.parsed_url.scheme in [u'webdavs', u'https']:
-            if globals.ssl_no_check_certificate:
+            if config.ssl_no_check_certificate:
                 self.conn = http.client.HTTPSConnection(self.parsed_url.hostname, self.parsed_url.port)
             else:
                 self.conn = VerifiedHTTPSConnection(self.parsed_url.hostname, self.parsed_url.port)

=== modified file 'duplicity/cached_ops.py'
--- duplicity/cached_ops.py	2018-11-29 19:00:15 +0000
+++ duplicity/cached_ops.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2012 Google Inc.
 #

=== modified file 'duplicity/commandline.py'
--- duplicity/commandline.py	2020-03-06 21:25:13 +0000
+++ duplicity/commandline.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -19,12 +19,11 @@
 # along with duplicity; if not, write to the Free Software Foundation,
 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 
-u"""Parse command line, check for consistency, and set globals"""
+u"""Parse command line, check for consistency, and set config"""
 
 from __future__ import print_function
 from future import standard_library
 standard_library.install_aliases()
-from builtins import filter
 from builtins import str
 from builtins import range
 
@@ -43,7 +42,7 @@
 
 from duplicity import backend
 from duplicity import dup_time
-from duplicity import globals
+from duplicity import config
 from duplicity import gpg
 from duplicity import log
 from duplicity import path
@@ -108,8 +107,8 @@
     u"""
     Return expanded version of archdir joined with backname.
     """
-    assert globals.backup_name is not None, \
-        u"expand_archive_dir() called prior to globals.backup_name being set"
+    assert config.backup_name is not None, \
+        u"expand_archive_dir() called prior to config.backup_name being set"
 
     return expand_fn(os.path.join(archdir, backname))
 
@@ -135,18 +134,18 @@
     return burlhash.hexdigest()
 
 
-def check_file(option, opt, value):
+def check_file(option, opt, value):  # pylint: disable=unused-argument
     return expand_fn(value)
 
 
-def check_time(option, opt, value):
+def check_time(option, opt, value):  # pylint: disable=unused-argument
     try:
         return dup_time.genstrtotime(value)
     except dup_time.TimeException as e:
         raise optparse.OptionValueError(str(e))
 
 
-def check_verbosity(option, opt, value):
+def check_verbosity(option, opt, value):  # pylint: disable=unused-argument
     fail = False
 
     value = value.lower()
@@ -218,17 +217,17 @@
     def set_time_sep(sep, opt):
         if sep == u'-':
             raise optparse.OptionValueError(u"Dash ('-') not valid for time-separator.")
-        globals.time_separator = sep
+        config.time_separator = sep
         old_fn_deprecation(opt)
 
-    def add_selection(o, option, additional_arg, p):
+    def add_selection(o, option, additional_arg, p):  # pylint: disable=unused-argument
         if o.type in (u"string", u"file"):
             addarg = util.fsdecode(additional_arg)
         else:
             addarg = additional_arg
         select_opts.append((util.fsdecode(option), addarg))
 
-    def add_filelist(o, s, filename, p):
+    def add_filelist(o, s, filename, p):  # pylint: disable=unused-argument
         select_opts.append((util.fsdecode(s), util.fsdecode(filename)))
         try:
             select_files.append(io.open(filename, u"rt", encoding=u"UTF-8"))
@@ -236,12 +235,12 @@
             log.FatalError(_(u"Error opening file %s") % filename,
                            log.ErrorCode.cant_open_filelist)
 
-    def print_ver(o, s, v, p):
-        print(u"duplicity %s" % (globals.version))
+    def print_ver(o, s, v, p):  # pylint: disable=unused-argument
+        print(u"duplicity %s" % (config.version))
         sys.exit(0)
 
-    def add_rename(o, s, v, p):
-        globals.rename[os.path.normcase(os.path.normpath(v[0]))] = v[1]
+    def add_rename(o, s, v, p):  # pylint: disable=unused-argument
+        config.rename[os.path.normcase(os.path.normpath(v[0]))] = v[1]
 
     parser = optparse.OptionParser(option_class=DupOption, usage=usage())
 
@@ -282,14 +281,14 @@
     # --encrypt-key <gpg_key_id>
     parser.add_option(u"--encrypt-key", type=u"string", metavar=_(u"gpg-key-id"),
                       dest=u"", action=u"callback",
-                      callback=lambda o, s, v, p: globals.gpg_profile.recipients.append(v))  # @UndefinedVariable
+                      callback=lambda o, s, v, p: config.gpg_profile.recipients.append(v))
 
     # secret keyring in which the private encrypt key can be found
     parser.add_option(u"--encrypt-secret-keyring", type=u"string", metavar=_(u"path"))
 
     parser.add_option(u"--encrypt-sign-key", type=u"string", metavar=_(u"gpg-key-id"),
                       dest=u"", action=u"callback",
-                      callback=lambda o, s, v, p: (globals.gpg_profile.recipients.append(v), set_sign_key(v)))
+                      callback=lambda o, s, v, p: (config.gpg_profile.recipients.append(v), set_sign_key(v)))
 
     # TRANSL: Used in usage help to represent a "glob" style pattern for
     # matching one or more files, as described in the documentation.
@@ -382,7 +381,7 @@
     # --hidden-encrypt-key <gpg_key_id>
     parser.add_option(u"--hidden-encrypt-key", type=u"string", metavar=_(u"gpg-key-id"),
                       dest=u"", action=u"callback",
-                      callback=lambda o, s, v, p: globals.gpg_profile.hidden_recipients.append(v))  # @UndefinedVariable
+                      callback=lambda o, s, v, p: config.gpg_profile.hidden_recipients.append(v))
 
     # ignore (some) errors during operations; supposed to make it more
     # likely that you are able to restore data under problematic
@@ -650,30 +649,30 @@
     # parse the options
     (options, args) = parser.parse_args(arglist)
 
-    # Copy all arguments and their values to the globals module.  Don't copy
+    # Copy all arguments and their values to the config module.  Don't copy
     # attributes that are 'hidden' (start with an underscore) or whose name is
     # the empty string (used for arguments that don't directly store a value
     # by using dest="")
     for f in [x for x in dir(options) if x and not x.startswith(u"_")]:
         v = getattr(options, f)
         # Only set if v is not None because None is the default for all the
-        # variables.  If user didn't set it, we'll use defaults in globals.py
+        # variables.  If user didn't set it, we'll use defaults in config.py
         if v is not None:
-            setattr(globals, f, v)
+            setattr(config, f, v)
 
     # convert file_prefix* string
     if sys.version_info.major >= 3:
-        if isinstance(globals.file_prefix, str):
-            globals.file_prefix = bytes(globals.file_prefix, u'utf-8')
-        if isinstance(globals.file_prefix_manifest, str):
-            globals.file_prefix_manifest = bytes(globals.file_prefix_manifest, u'utf-8')
-        if isinstance(globals.file_prefix_archive, str):
-            globals.file_prefix_archive = bytes(globals.file_prefix_archive, u'utf-8')
-        if isinstance(globals.file_prefix_signature, str):
-            globals.file_prefix_signature = bytes(globals.file_prefix_signature, u'utf-8')
+        if isinstance(config.file_prefix, str):
+            config.file_prefix = bytes(config.file_prefix, u'utf-8')
+        if isinstance(config.file_prefix_manifest, str):
+            config.file_prefix_manifest = bytes(config.file_prefix_manifest, u'utf-8')
+        if isinstance(config.file_prefix_archive, str):
+            config.file_prefix_archive = bytes(config.file_prefix_archive, u'utf-8')
+        if isinstance(config.file_prefix_signature, str):
+            config.file_prefix_signature = bytes(config.file_prefix_signature, u'utf-8')
 
     # todo: this should really NOT be done here
-    socket.setdefaulttimeout(globals.timeout)
+    socket.setdefaulttimeout(config.timeout)
 
     # expect no cmd and two positional args
     cmd = u""
@@ -703,7 +702,7 @@
         full_backup = True
         num_expect = 2
     elif cmd == u"incremental":
-        globals.incremental = True
+        config.incremental = True
         num_expect = 2
     elif cmd == u"list-current-files":
         list_current = True
@@ -713,19 +712,19 @@
             arg = args.pop(0)
         except Exception:
             command_line_error(u"Missing time string for remove-older-than")
-        globals.remove_time = dup_time.genstrtotime(arg)
+        config.remove_time = dup_time.genstrtotime(arg)
         num_expect = 1
     elif cmd == u"remove-all-but-n-full" or cmd == u"remove-all-inc-of-but-n-full":
         if cmd == u"remove-all-but-n-full":
-            globals.remove_all_but_n_full_mode = True
+            config.remove_all_but_n_full_mode = True
         if cmd == u"remove-all-inc-of-but-n-full":
-            globals.remove_all_inc_of_but_n_full_mode = True
+            config.remove_all_inc_of_but_n_full_mode = True
         try:
             arg = args.pop(0)
         except Exception:
             command_line_error(u"Missing count for " + cmd)
-        globals.keep_chains = int(arg)
-        if not globals.keep_chains > 0:
+        config.keep_chains = int(arg)
+        if not config.keep_chains > 0:
             command_line_error(cmd + u" count must be > 0")
         num_expect = 1
     elif cmd == u"verify":
@@ -758,19 +757,19 @@
                 command_line_error(u"Two URLs expected for replicate.")
             src_backend_url, backend_url = args[0], args[1]
         else:
-            lpath, backend_url = args_to_path_backend(args[0], args[1])  # @UnusedVariable
+            lpath, backend_url = args_to_path_backend(args[0], args[1])
     else:
         command_line_error(u"Too many arguments")
 
-    if globals.backup_name is None:
-        globals.backup_name = generate_default_backup_name(backend_url)
+    if config.backup_name is None:
+        config.backup_name = generate_default_backup_name(backend_url)
 
     # set and expand archive dir
-    set_archive_dir(expand_archive_dir(globals.archive_dir,
-                                       globals.backup_name))
+    set_archive_dir(expand_archive_dir(config.archive_dir,
+                                       config.backup_name))
 
-    log.Info(_(u"Using archive dir: %s") % (globals.archive_dir_path.uc_name,))
-    log.Info(_(u"Using backup name: %s") % (globals.backup_name,))
+    log.Info(_(u"Using archive dir: %s") % (config.archive_dir_path.uc_name,))
+    log.Info(_(u"Using backup name: %s") % (config.backup_name,))
 
     return args
 
@@ -788,7 +787,7 @@
     be assumed to be for the benefit of translators, since they can get each string
     (paired with its preceding comment, if any) independently of the others."""
 
-    dict = {
+    trans = {
         # TRANSL: Used in usage help to represent a Unix-style path name. Example:
         # rsync://user[:password]@other_host[:port]//absolute_path
         u'absolute_path': _(u"absolute_path"),
@@ -939,7 +938,7 @@
   duplicity remove-all-inc-of-but-n-full %(count)s [%(options)s] %(target_url)s
   duplicity replicate %(source_url)s %(target_url)s
 
-""" % dict
+""" % trans
 
     # TRANSL: Header in usage help
     msg = msg + _(u"Backends and their URL formats:") + u"""
@@ -972,7 +971,7 @@
   webdav://%(user)s[:%(password)s]@%(other_host)s/%(some_dir)s
   webdavs://%(user)s[:%(password)s]@%(other_host)s/%(some_dir)s
 
-""" % dict
+""" % trans
 
     # TRANSL: Header in usage help
     msg = msg + _(u"Commands:") + u"""
@@ -986,7 +985,7 @@
   remove-all-but-n-full <%(count)s> <%(target_url)s>
   remove-all-inc-of-but-n-full <%(count)s> <%(target_url)s>
   verify <%(target_url)s> <%(source_dir)s>
-  replicate <%(source_url)s> <%(target_url)s>""" % dict
+  replicate <%(source_url)s> <%(target_url)s>""" % trans
 
     return msg
 
@@ -1003,24 +1002,24 @@
         log.FatalError(_(u"Specified archive directory '%s' does not exist, "
                          u"or is not a directory") % (archive_dir_path.uc_name,),
                        log.ErrorCode.bad_archive_dir)
-    globals.archive_dir_path = archive_dir_path
+    config.archive_dir_path = archive_dir_path
 
 
 def set_sign_key(sign_key):
-    u"""Set globals.sign_key assuming proper key given"""
+    u"""Set config.sign_key assuming proper key given"""
     if not re.search(u"^(0x)?([0-9A-Fa-f]{8}|[0-9A-Fa-f]{16}|[0-9A-Fa-f]{40})$", sign_key):
         log.FatalError(_(u"Sign key should be an 8, 16 alt. 40 character hex string, like "
                          u"'AA0E73D2'.\nReceived '%s' instead.") % (sign_key,),
                        log.ErrorCode.bad_sign_key)
-    globals.gpg_profile.sign_key = sign_key
+    config.gpg_profile.sign_key = sign_key
 
 
 def set_selection():
     u"""Return selection iter starting at filename with arguments applied"""
     global select_opts, select_files
-    sel = selection.Select(globals.local_path)
+    sel = selection.Select(config.local_path)
     sel.ParseArgs(select_opts, select_files)
-    globals.select = sel.set_iter()
+    config.select = sel.set_iter()
 
 
 def args_to_path_backend(arg1, arg2):
@@ -1056,7 +1055,7 @@
     """
     path, bend = args_to_path_backend(arg1, arg2)
 
-    globals.backend = backend.get_backend(bend)
+    config.backend = backend.get_backend(bend)
 
     if path == arg2:
         return (None, arg2)  # False?
@@ -1065,10 +1064,10 @@
 
 
 def process_local_dir(action, local_pathname):
-    u"""Check local directory, set globals.local_path"""
+    u"""Check local directory, set config.local_path"""
     local_path = path.Path(path.Path(local_pathname).get_canonical())
     if action == u"restore":
-        if (local_path.exists() and not local_path.isemptydir()) and not globals.force:
+        if (local_path.exists() and not local_path.isemptydir()) and not config.force:
             log.FatalError(_(u"Restore destination directory %s already "
                              u"exists.\nWill not overwrite.") % (local_path.uc_name,),
                            log.ErrorCode.restore_dir_exists)
@@ -1084,7 +1083,7 @@
                            % (local_path.uc_name,),
                            log.ErrorCode.backup_dir_doesnt_exist)
 
-    globals.local_path = local_path
+    config.local_path = local_path
 
 
 def check_consistency(action):
@@ -1102,12 +1101,12 @@
     if action in [u"list-current", u"collection-status",
                   u"cleanup", u"remove-old", u"remove-all-but-n-full", u"remove-all-inc-of-but-n-full", u"replicate"]:
         assert_only_one([list_current, collection_status, cleanup, replicate,
-                         globals.remove_time is not None])
+                         config.remove_time is not None])
     elif action == u"restore" or action == u"verify":
         if full_backup:
             command_line_error(u"--full option cannot be used when "
                                u"restoring or verifying")
-        elif globals.incremental:
+        elif config.incremental:
             command_line_error(u"--incremental option cannot be used when "
                                u"restoring or verifying")
         if select_opts and action == u"restore":
@@ -1119,36 +1118,36 @@
         if verify:
             command_line_error(u"--verify option cannot be used "
                                u"when backing up")
-        if globals.restore_dir:
+        if config.restore_dir:
             command_line_error(u"restore option incompatible with %s backup"
                                % (action,))
-        if sum([globals.s3_use_rrs, globals.s3_use_ia, globals.s3_use_onezone_ia]) >= 2:
+        if sum([config.s3_use_rrs, config.s3_use_ia, config.s3_use_onezone_ia]) >= 2:
             command_line_error(u"only one of --s3-use-rrs, --s3-use-ia, and --s3-use-onezone-ia may be used")
 
 
 def ProcessCommandLine(cmdline_list):
-    u"""Process command line, set globals, return action
+    u"""Process command line, set config, return action
 
     action will be "list-current", "collection-status", "cleanup",
     "remove-old", "restore", "verify", "full", or "inc".
 
     """
     # build initial gpg_profile
-    globals.gpg_profile = gpg.GPGProfile()
+    config.gpg_profile = gpg.GPGProfile()
 
     # parse command line
     args = parse_cmdline_options(cmdline_list)
 
     # if we get a different gpg-binary from the commandline then redo gpg_profile
-    if globals.gpg_binary is not None:
-        src = globals.gpg_profile
-        globals.gpg_profile = gpg.GPGProfile(
+    if config.gpg_binary is not None:
+        src = config.gpg_profile
+        config.gpg_profile = gpg.GPGProfile(
             passphrase=src.passphrase,
             sign_key=src.sign_key,
             recipients=src.recipients,
             hidden_recipients=src.hidden_recipients)
     log.Debug(_(u"GPG binary is %s, version %s") %
-              ((globals.gpg_binary or u'gpg'), globals.gpg_profile.gpg_version))
+              ((config.gpg_binary or u'gpg'), config.gpg_profile.gpg_version))
 
     # we can now try to import all the backends
     backend.import_backends()
@@ -1164,24 +1163,24 @@
             action = u"collection-status"
         elif cleanup:
             action = u"cleanup"
-        elif globals.remove_time is not None:
+        elif config.remove_time is not None:
             action = u"remove-old"
-        elif globals.remove_all_but_n_full_mode:
+        elif config.remove_all_but_n_full_mode:
             action = u"remove-all-but-n-full"
-        elif globals.remove_all_inc_of_but_n_full_mode:
+        elif config.remove_all_inc_of_but_n_full_mode:
             action = u"remove-all-inc-of-but-n-full"
         else:
             command_line_error(u"Too few arguments")
-        globals.backend = backend.get_backend(args[0])
-        if not globals.backend:
+        config.backend = backend.get_backend(args[0])
+        if not config.backend:
             log.FatalError(_(u"""Bad URL '%s'.
 Examples of URL strings are "scp://user@xxxxxxxx:1234/path" and
 "file:///usr/local".  See the man page for more information.""") % (args[0],),
                            log.ErrorCode.bad_url)
     elif len(args) == 2:
         if replicate:
-            globals.src_backend = backend.get_backend(args[0])
-            globals.backend = backend.get_backend(args[1])
+            config.src_backend = backend.get_backend(args[0])
+            config.backend = backend.get_backend(args[1])
             action = u"replicate"
         else:
             # Figure out whether backup or restore

=== renamed file 'duplicity/globals.py' => 'duplicity/config.py'
--- duplicity/globals.py	2020-02-12 19:33:59 +0000
+++ duplicity/config.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>

=== modified file 'duplicity/diffdir.py'
--- duplicity/diffdir.py	2019-12-09 15:47:05 +0000
+++ duplicity/diffdir.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -37,13 +37,13 @@
 from builtins import object
 
 import io
-import types
-import math
+import sys
+
 from duplicity import statistics
 from duplicity import util
-from duplicity import globals
-from duplicity.path import *  # @UnusedWildImport
-from duplicity.lazy import *  # @UnusedWildImport
+from duplicity import config
+from duplicity.path import *  # pylint: disable=unused-wildcard-import,redefined-builtin
+from duplicity.lazy import *  # pylint: disable=unused-wildcard-import,redefined-builtin
 from duplicity import progress
 
 # A StatsObj will be written to this from DirDelta and DirDelta_WriteSig.
@@ -96,13 +96,13 @@
     else:
         sig_iter = sigtar2path_iter(dirsig_fileobj_list)
     delta_iter = get_delta_iter(path_iter, sig_iter)
-    if globals.dry_run or (globals.progress and not progress.tracker.has_collected_evidence()):
+    if config.dry_run or (config.progress and not progress.tracker.has_collected_evidence()):
         return DummyBlockIter(delta_iter)
     else:
         return DeltaTarBlockIter(delta_iter)
 
 
-def delta_iter_error_handler(exc, new_path, sig_path, sig_tar=None):
+def delta_iter_error_handler(exc, new_path, sig_path, sig_tar=None):  # pylint: disable=unused-argument
     u"""
     Called by get_delta_iter, report error in getting delta
     """
@@ -389,7 +389,7 @@
     else:
         sig_path_iter = sigtar2path_iter(sig_infp_list)
     delta_iter = get_delta_iter(path_iter, sig_path_iter, newsig_outfp)
-    if globals.dry_run or (globals.progress and not progress.tracker.has_collected_evidence()):
+    if config.dry_run or (config.progress and not progress.tracker.has_collected_evidence()):
         return DummyBlockIter(delta_iter)
     else:
         return DeltaTarBlockIter(delta_iter)
@@ -502,15 +502,15 @@
         Make tarblock out of tarinfo and file data
         """
         tarinfo.size = len(file_data)
-        headers = tarinfo.tobuf(errors=u'replace', encoding=globals.fsencoding)
-        blocks, remainder = divmod(tarinfo.size, tarfile.BLOCKSIZE)  # @UnusedVariable
+        headers = tarinfo.tobuf(errors=u'replace', encoding=config.fsencoding)
+        blocks, remainder = divmod(tarinfo.size, tarfile.BLOCKSIZE)
         if remainder > 0:
             filler_data = b"\0" * (tarfile.BLOCKSIZE - remainder)
         else:
             filler_data = b""
         return TarBlock(index, b"%s%s%s" % (headers, file_data, filler_data))
 
-    def process(self, val):
+    def process(self, val):  # pylint: disable=unused-argument
         u"""
         Turn next value of input_iter into a TarBlock
         """
@@ -589,7 +589,7 @@
         u"""
         Return closing string for tarfile, reset offset
         """
-        blocks, remainder = divmod(self.offset, tarfile.RECORDSIZE)  # @UnusedVariable
+        blocks, remainder = divmod(self.offset, tarfile.RECORDSIZE)
         self.offset = 0
         return b'\0' * (tarfile.RECORDSIZE - remainder)  # remainder can be 0
 
@@ -767,4 +767,4 @@
     else:
         # Split file into about 2000 pieces, rounding to 512
         file_blocksize = int((file_len / (2000 * 512))) * 512
-        return min(file_blocksize, globals.max_blocksize)
+        return min(file_blocksize, config.max_blocksize)

=== modified file 'duplicity/dup_collections.py'
--- duplicity/dup_collections.py	2020-03-03 16:35:31 +0000
+++ duplicity/dup_collections.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -21,15 +21,12 @@
 
 u"""Classes and functions on collections of backup volumes"""
 
-from past.builtins import cmp
-from builtins import filter
 from builtins import str
 from builtins import zip
 from builtins import map
 from builtins import range
 from builtins import object
 
-import types
 import sys
 
 from duplicity import log
@@ -37,7 +34,7 @@
 from duplicity import path
 from duplicity import util
 from duplicity import dup_time
-from duplicity import globals
+from duplicity import config
 from duplicity import manifest
 from duplicity import util
 from duplicity.gpg import GPGError
@@ -153,7 +150,7 @@
         self.remote_manifest_name = remote_filename
 
         if self.action != u"replicate":
-            local_filename_list = globals.archive_dir_path.listdir()
+            local_filename_list = config.archive_dir_path.listdir()
         else:
             local_filename_list = []
         for local_filename in local_filename_list:
@@ -163,7 +160,7 @@
                     pr.start_time == self.start_time and
                     pr.end_time == self.end_time):
                 self.local_manifest_path = \
-                    globals.archive_dir_path.append(local_filename)
+                    config.archive_dir_path.append(local_filename)
 
                 self.set_files_changed()
                 break
@@ -180,7 +177,7 @@
             log.Debug(_(u"BackupSet.delete: missing %s") % [util.fsdecode(f) for f in rfn])
             pass
         if self.action != u"replicate":
-            local_filename_list = globals.archive_dir_path.listdir()
+            local_filename_list = config.archive_dir_path.listdir()
         else:
             local_filename_list = []
         for lfn in local_filename_list:
@@ -189,7 +186,7 @@
                     pr.start_time == self.start_time and
                     pr.end_time == self.end_time):
                 try:
-                    globals.archive_dir_path.append(lfn).delete()
+                    config.archive_dir_path.append(lfn).delete()
                 except Exception:
                     log.Debug(_(u"BackupSet.delete: missing %s") % [util.fsdecode(f) for f in lfn])
                     pass
@@ -415,16 +412,16 @@
         l = []
         for s in self.get_all_sets():
             if s.time:
-                type = u"full"
+                btype = u"full"
                 time = s.time
             else:
-                type = u"inc"
+                btype = u"inc"
                 time = s.end_time
             if s.encrypted:
                 enc = u"enc"
             else:
                 enc = u"noenc"
-            l.append(u"%s%s %s %d %s" % (prefix, type, dup_time.timetostring(time), (len(s)), enc))
+            l.append(u"%s%s %s %d %s" % (prefix, btype, dup_time.timetostring(time), (len(s)), enc))
         return l
 
     def __str__(self):
@@ -443,12 +440,12 @@
 
         for s in self.get_all_sets():
             if s.time:
-                type = _(u"Full")
+                btype = _(u"Full")
                 time = s.time
             else:
-                type = _(u"Incremental")
+                btype = _(u"Incremental")
                 time = s.end_time
-            l.append(set_schema % (type, dup_time.timetopretty(time), len(s)))
+            l.append(set_schema % (btype, dup_time.timetopretty(time), len(s)))
 
         l.append(u"-------------------------")
         return u"\n".join(l)
@@ -484,14 +481,14 @@
         Return new SignatureChain.
 
         local should be true iff the signature chain resides in
-        globals.archive_dir_path and false if the chain is in
-        globals.backend.
+        config.archive_dir_path and false if the chain is in
+        config.backend.
 
-        @param local: True if sig chain in globals.archive_dir_path
+        @param local: True if sig chain in config.archive_dir_path
         @type local: Boolean
 
         @param location: Where the sig chain is located
-        @type location: globals.archive_dir_path or globals.backend
+        @type location: config.archive_dir_path or config.backend
         """
         if local:
             self.archive_dir_path, self.backend = location, None
@@ -855,7 +852,7 @@
             Try adding filename to existing sets, or make new one
             """
             pr = file_naming.parse(filename)
-            for set in sets:
+            for set in sets:  # pylint: disable=redefined-builtin
                 if set.add_filename(filename, pr):
                     log.Debug(_(u"File %s is part of known set") % (util.fsdecode(filename),))
                     break
@@ -873,7 +870,7 @@
 
         chains, orphaned_sets = [], []
 
-        def add_to_chains(set):
+        def add_to_chains(set):  # pylint: disable=redefined-builtin
             u"""
             Try adding set to existing chains, or make new one
             """
@@ -901,7 +898,7 @@
         Sort set list by end time, return (sorted list, incomplete)
         """
         time_set_pairs, incomplete_sets = [], []
-        for set in set_list:
+        for set in set_list:  # pylint: disable=redefined-builtin
             if not set.is_complete():
                 incomplete_sets.append(set)
             elif set.type == u"full":
@@ -1154,7 +1151,7 @@
         if len(self.all_backup_chains) < n:
             return None
 
-        sorted = self.all_backup_chains[:]
+        sorted = self.all_backup_chains[:]  # pylint: disable=redefined-builtin
         sorted.sort(key=lambda x: x.get_first().time)
 
         sorted.reverse()
@@ -1234,7 +1231,7 @@
             backup_type = s[0]
             backup_set = s[1]
             if backup_set.time:
-                type = _(u"Full")
+                type = _(u"Full")  # pylint: disable=redefined-builtin
             else:
                 type = _(u"Incremental")
             l.append(set_schema % (type, dup_time.timetopretty(backup_set.get_time()), backup_type.title()))

=== modified file 'duplicity/dup_main.py'
--- duplicity/dup_main.py	2020-02-12 19:33:59 +0000
+++ duplicity/dup_main.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # duplicity -- Encrypted bandwidth efficient backup
 #
@@ -28,7 +28,6 @@
 from __future__ import print_function
 from future import standard_library
 standard_library.install_aliases()
-from builtins import filter
 from builtins import map
 from builtins import next
 from builtins import object
@@ -36,18 +35,11 @@
 
 import copy
 import fasteners
-import gzip
 import os
 import platform
-import re
 import resource
 import sys
 import time
-import traceback
-import types
-
-from datetime import datetime
-from os import statvfs
 
 from duplicity import __version__
 from duplicity import asyncscheduler
@@ -56,16 +48,14 @@
 from duplicity import dup_collections
 from duplicity import dup_temp
 from duplicity import dup_time
-from duplicity import errors
 from duplicity import file_naming
-from duplicity import globals
+from duplicity import config
 from duplicity import gpg
 from duplicity import log
 from duplicity import manifest
 from duplicity import patchdir
 from duplicity import path
 from duplicity import progress
-from duplicity import robust
 from duplicity import tempdir
 from duplicity import util
 
@@ -115,15 +105,15 @@
     # check if we can reuse an already set (signing_)passphrase
     # if signing key is also an encryption key assume that the passphrase is identical
     if (for_signing and
-            (globals.gpg_profile.sign_key in globals.gpg_profile.recipients or
-             globals.gpg_profile.sign_key in globals.gpg_profile.hidden_recipients) and
+            (config.gpg_profile.sign_key in config.gpg_profile.recipients or
+             config.gpg_profile.sign_key in config.gpg_profile.hidden_recipients) and
              u'PASSPHRASE' in os.environ):  # noqa
         log.Notice(_(u"Reuse configured PASSPHRASE as SIGN_PASSPHRASE"))
         return os.environ[u'PASSPHRASE']
     # if one encryption key is also the signing key assume that the passphrase is identical
     if (not for_signing and
-            (globals.gpg_profile.sign_key in globals.gpg_profile.recipients or
-             globals.gpg_profile.sign_key in globals.gpg_profile.hidden_recipients) and
+            (config.gpg_profile.sign_key in config.gpg_profile.recipients or
+             config.gpg_profile.sign_key in config.gpg_profile.hidden_recipients) and
              u'SIGN_PASSPHRASE' in os.environ):  # noqa
         log.Notice(_(u"Reuse configured SIGN_PASSPHRASE as PASSPHRASE"))
         return os.environ[u'SIGN_PASSPHRASE']
@@ -136,7 +126,7 @@
     #   - gpg-agent supplies all, no user interaction
 
     # no passphrase if --no-encryption or --use-agent
-    if not globals.encryption or globals.use_agent:
+    if not config.encryption or config.use_agent:
         return u""
 
     # these commands don't need a password
@@ -151,15 +141,15 @@
     # for a full backup, we don't need a password if
     # there is no sign_key and there are recipients
     elif (action == u"full" and
-          (globals.gpg_profile.recipients or globals.gpg_profile.hidden_recipients) and not
-          globals.gpg_profile.sign_key):
+          (config.gpg_profile.recipients or config.gpg_profile.hidden_recipients) and not
+          config.gpg_profile.sign_key):
         return u""
 
     # for an inc backup, we don't need a password if
     # there is no sign_key and there are recipients
     elif (action == u"inc" and
-          (globals.gpg_profile.recipients or globals.gpg_profile.hidden_recipients) and not
-          globals.gpg_profile.sign_key):
+          (config.gpg_profile.recipients or config.gpg_profile.hidden_recipients) and not
+          config.gpg_profile.sign_key):
         return u""
 
     # Finally, ask the user for the passphrase
@@ -171,18 +161,18 @@
             # if the user made a typo in the first passphrase
             if use_cache and n == 2:
                 if for_signing:
-                    pass1 = globals.gpg_profile.signing_passphrase
+                    pass1 = config.gpg_profile.signing_passphrase
                 else:
-                    pass1 = globals.gpg_profile.passphrase
+                    pass1 = config.gpg_profile.passphrase
             else:
                 if for_signing:
-                    if use_cache and globals.gpg_profile.signing_passphrase:
-                        pass1 = globals.gpg_profile.signing_passphrase
+                    if use_cache and config.gpg_profile.signing_passphrase:
+                        pass1 = config.gpg_profile.signing_passphrase
                     else:
                         pass1 = getpass_safe(_(u"GnuPG passphrase for signing key:") + u" ")
                 else:
-                    if use_cache and globals.gpg_profile.passphrase:
-                        pass1 = globals.gpg_profile.passphrase
+                    if use_cache and config.gpg_profile.passphrase:
+                        pass1 = config.gpg_profile.passphrase
                     else:
                         pass1 = getpass_safe(_(u"GnuPG passphrase:") + u" ")
 
@@ -199,8 +189,8 @@
                 use_cache = False
                 continue
 
-            if not pass1 and not (globals.gpg_profile.recipients or
-                                  globals.gpg_profile.hidden_recipients) and not for_signing:
+            if not pass1 and not (config.gpg_profile.recipients or
+                                  config.gpg_profile.hidden_recipients) and not for_signing:
                 log.Log(_(u"Cannot use empty passphrase with symmetric encryption!  Please try again."),
                         log.WARNING, force_print=True)
                 use_cache = False
@@ -242,8 +232,8 @@
     @rtype: int
     @return: constant 0 (zero)
     """
-    last_index = globals.restart.last_index
-    last_block = globals.restart.last_block
+    last_index = config.restart.last_index
+    last_block = config.restart.last_block
     try:
         # Just spin our wheels
         iter_result = next(tarblock_iter)
@@ -326,7 +316,7 @@
         instead of copying.
         """
         putsize = tdp.getsize()
-        if globals.skip_volume != vol_num:  # for testing purposes only
+        if config.skip_volume != vol_num:  # for testing purposes only
             backend.put(tdp, dest_filename)
         validate_block(putsize, dest_filename)
         if tdp.stat:
@@ -345,45 +335,45 @@
         from encrypted to non in the middle of a backup chain), so we check
         that the vol1 filename on the server matches the settings of this run.
         """
-        if ((globals.gpg_profile.recipients or globals.gpg_profile.hidden_recipients) and
-                not globals.gpg_profile.sign_key):
+        if ((config.gpg_profile.recipients or config.gpg_profile.hidden_recipients) and
+                not config.gpg_profile.sign_key):
             # When using gpg encryption without a signing key, we skip this validation
             # step to ensure that we can still backup without needing the secret key
             # on the machine.
             return
 
         vol1_filename = file_naming.get(backup_type, 1,
-                                        encrypted=globals.encryption,
-                                        gzipped=globals.compression)
+                                        encrypted=config.encryption,
+                                        gzipped=config.compression)
         if vol1_filename != backup_set.volume_name_dict[1]:
             log.FatalError(_(u"Restarting backup, but current encryption "
                              u"settings do not match original settings"),
                            log.ErrorCode.enryption_mismatch)
 
         # Settings are same, let's check passphrase itself if we are encrypted
-        if globals.encryption:
-            fileobj = restore_get_enc_fileobj(globals.backend, vol1_filename,
+        if config.encryption:
+            fileobj = restore_get_enc_fileobj(config.backend, vol1_filename,
                                               manifest.volume_info_dict[1])
             fileobj.close()
 
-    if not globals.restart:
+    if not config.restart:
         # normal backup start
         vol_num = 0
         mf = manifest.Manifest(fh=man_outfp)
         mf.set_dirinfo()
     else:
         # restart from last known position
-        mf = globals.restart.last_backup.get_local_manifest()
-        globals.restart.checkManifest(mf)
-        globals.restart.setLastSaved(mf)
-        validate_encryption_settings(globals.restart.last_backup, mf)
+        mf = config.restart.last_backup.get_local_manifest()
+        config.restart.checkManifest(mf)
+        config.restart.setLastSaved(mf)
+        validate_encryption_settings(config.restart.last_backup, mf)
         mf.fh = man_outfp
-        last_block = globals.restart.last_block
+        last_block = config.restart.last_block
         log.Notice(_(u"Restarting after volume %s, file %s, block %s") %
-                   (globals.restart.start_vol,
-                    util.uindex(globals.restart.last_index),
-                    globals.restart.last_block))
-        vol_num = globals.restart.start_vol
+                   (config.restart.start_vol,
+                    util.uindex(config.restart.last_index),
+                    config.restart.last_block))
+        vol_num = config.restart.start_vol
         restart_position_iterator(tarblock_iter)
 
     at_end = 0
@@ -391,7 +381,7 @@
 
     # If --progress option is given, initiate a background thread that will
     # periodically report progress to the Log.
-    if globals.progress:
+    if config.progress:
         progress.tracker.set_start_volume(vol_num + 1)
         progress.progress_thread.start()
 
@@ -403,9 +393,9 @@
     # is an assert put in place to avoid someone accidentally
     # enabling concurrency above 1, before adequate work has been
     # done on the backends to make them support concurrency.
-    assert globals.async_concurrency <= 1
+    assert config.async_concurrency <= 1
 
-    io_scheduler = asyncscheduler.AsyncScheduler(globals.async_concurrency)
+    io_scheduler = asyncscheduler.AsyncScheduler(config.async_concurrency)
     async_waiters = []
 
     while not at_end:
@@ -415,18 +405,18 @@
         # Create volume
         vol_num += 1
         dest_filename = file_naming.get(backup_type, vol_num,
-                                        encrypted=globals.encryption,
-                                        gzipped=globals.compression)
+                                        encrypted=config.encryption,
+                                        gzipped=config.compression)
         tdp = dup_temp.new_tempduppath(file_naming.parse(dest_filename))
 
         # write volume
-        if globals.encryption:
-            at_end = gpg.GPGWriteFile(tarblock_iter, tdp.name, globals.gpg_profile,
-                                      globals.volsize)
-        elif globals.compression:
-            at_end = gpg.GzipWriteFile(tarblock_iter, tdp.name, globals.volsize)
+        if config.encryption:
+            at_end = gpg.GPGWriteFile(tarblock_iter, tdp.name, config.gpg_profile,
+                                      config.volsize)
+        elif config.compression:
+            at_end = gpg.GzipWriteFile(tarblock_iter, tdp.name, config.volsize)
         else:
-            at_end = gpg.PlainWriteFile(tarblock_iter, tdp.name, globals.volsize)
+            at_end = gpg.PlainWriteFile(tarblock_iter, tdp.name, config.volsize)
         tdp.setdata()
 
         # Add volume information to manifest
@@ -452,11 +442,11 @@
         log.Progress(_(u'Processed volume %d') % vol_num, diffdir.stats.SourceFileSize)
         # Snapshot (serialize) progress now as a Volume has been completed.
         # This is always the last restore point when it comes to restart a failed backup
-        if globals.progress:
+        if config.progress:
             progress.tracker.snapshot_progress(vol_num)
 
         # for testing purposes only - assert on inc or full
-        assert globals.fail_on_volume != vol_num, u"Forced assertion for testing at volume %d" % vol_num
+        assert config.fail_on_volume != vol_num, u"Forced assertion for testing at volume %d" % vol_num
 
     # Collect byte count from all asynchronous jobs; also implicitly waits
     # for them all to complete.
@@ -474,7 +464,7 @@
     u"""
     Return a fileobj opened for writing, save results as manifest
 
-    Save manifest in globals.archive_dir_path gzipped.
+    Save manifest in config.archive_dir_path gzipped.
     Save them on the backend encrypted as needed.
 
     @type man_type: string
@@ -492,9 +482,9 @@
                                         manifest=True)
     remote_man_filename = file_naming.get(backup_type,
                                           manifest=True,
-                                          encrypted=globals.encryption)
+                                          encrypted=config.encryption)
 
-    fh = dup_temp.get_fileobj_duppath(globals.archive_dir_path,
+    fh = dup_temp.get_fileobj_duppath(config.archive_dir_path,
                                       part_man_filename,
                                       perm_man_filename,
                                       remote_man_filename)
@@ -505,7 +495,7 @@
     u"""
     Return a fileobj opened for writing, save results as signature
 
-    Save signatures in globals.archive_dir gzipped.
+    Save signatures in config.archive_dir gzipped.
     Save them on the backend encrypted as needed.
 
     @type sig_type: string
@@ -521,10 +511,10 @@
                                         partial=True)
     perm_sig_filename = file_naming.get(sig_type,
                                         gzipped=True)
-    remote_sig_filename = file_naming.get(sig_type, encrypted=globals.encryption,
-                                          gzipped=globals.compression)
+    remote_sig_filename = file_naming.get(sig_type, encrypted=config.encryption,
+                                          gzipped=config.compression)
 
-    fh = dup_temp.get_fileobj_duppath(globals.archive_dir_path,
+    fh = dup_temp.get_fileobj_duppath(config.archive_dir_path,
                                       part_sig_filename,
                                       perm_sig_filename,
                                       remote_sig_filename,
@@ -542,30 +532,30 @@
     @rtype: void
     @return: void
     """
-    if globals.progress:
+    if config.progress:
         progress.tracker = progress.ProgressTracker()
         # Fake a backup to compute total of moving bytes
-        tarblock_iter = diffdir.DirFull(globals.select)
+        tarblock_iter = diffdir.DirFull(config.select)
         dummy_backup(tarblock_iter)
         # Store computed stats to compute progress later
         progress.tracker.set_evidence(diffdir.stats, True)
-        # Reinit the globals.select iterator, so
+        # Reinit the config.select iterator, so
         # the core of duplicity can rescan the paths
         commandline.set_selection()
         progress.progress_thread = progress.LogProgressThread()
 
-    if globals.dry_run:
-        tarblock_iter = diffdir.DirFull(globals.select)
+    if config.dry_run:
+        tarblock_iter = diffdir.DirFull(config.select)
         bytes_written = dummy_backup(tarblock_iter)
         col_stats.set_values(sig_chain_warning=None)
     else:
         sig_outfp = get_sig_fileobj(u"full-sig")
         man_outfp = get_man_fileobj(u"full")
-        tarblock_iter = diffdir.DirFull_WriteSig(globals.select,
+        tarblock_iter = diffdir.DirFull_WriteSig(config.select,
                                                  sig_outfp)
         bytes_written = write_multivol(u"full", tarblock_iter,
                                        man_outfp, sig_outfp,
-                                       globals.backend)
+                                       config.backend)
 
         # close sig file, send to remote, and rename to final
         sig_outfp.close()
@@ -577,7 +567,7 @@
         man_outfp.to_remote()
         man_outfp.to_final()
 
-        if globals.progress:
+        if config.progress:
             # Terminate the background thread now, if any
             progress.progress_thread.finished = True
             progress.progress_thread.join()
@@ -598,7 +588,7 @@
     @param col_stats: collection status
     """
     if not col_stats.matched_chain_pair:
-        if globals.incremental:
+        if config.incremental:
             log.FatalError(_(u"Fatal Error: Unable to start incremental backup.  "
                              u"Old signatures not found and incremental specified"),
                            log.ErrorCode.inc_without_sigs)
@@ -608,14 +598,14 @@
     return col_stats.matched_chain_pair[0]
 
 
-def print_statistics(stats, bytes_written):
+def print_statistics(stats, bytes_written):  # pylint: disable=unused-argument
     u"""
-    If globals.print_statistics, print stats after adding bytes_written
+    If config.print_statistics, print stats after adding bytes_written
 
     @rtype: void
     @return: void
     """
-    if globals.print_statistics:
+    if config.print_statistics:
         diffdir.stats.TotalDestinationSizeChange = bytes_written
         logstring = diffdir.stats.get_stats_logstring(_(u"Backup Statistics"))
         log.Log(logstring, log.NOTICE, force_print=True)
@@ -628,7 +618,7 @@
     @rtype: void
     @return: void
     """
-    if not globals.restart:
+    if not config.restart:
         dup_time.setprevtime(sig_chain.end_time)
         if dup_time.curtime == dup_time.prevtime:
             time.sleep(2)
@@ -636,32 +626,32 @@
             assert dup_time.curtime != dup_time.prevtime, \
                 u"time not moving forward at appropriate pace - system clock issues?"
 
-    if globals.progress:
+    if config.progress:
         progress.tracker = progress.ProgressTracker()
         # Fake a backup to compute total of moving bytes
-        tarblock_iter = diffdir.DirDelta(globals.select,
+        tarblock_iter = diffdir.DirDelta(config.select,
                                          sig_chain.get_fileobjs())
         dummy_backup(tarblock_iter)
         # Store computed stats to compute progress later
         progress.tracker.set_evidence(diffdir.stats, False)
-        # Reinit the globals.select iterator, so
+        # Reinit the config.select iterator, so
         # the core of duplicity can rescan the paths
         commandline.set_selection()
         progress.progress_thread = progress.LogProgressThread()
 
-    if globals.dry_run:
-        tarblock_iter = diffdir.DirDelta(globals.select,
+    if config.dry_run:
+        tarblock_iter = diffdir.DirDelta(config.select,
                                          sig_chain.get_fileobjs())
         bytes_written = dummy_backup(tarblock_iter)
     else:
         new_sig_outfp = get_sig_fileobj(u"new-sig")
         new_man_outfp = get_man_fileobj(u"inc")
-        tarblock_iter = diffdir.DirDelta_WriteSig(globals.select,
+        tarblock_iter = diffdir.DirDelta_WriteSig(config.select,
                                                   sig_chain.get_fileobjs(),
                                                   new_sig_outfp)
         bytes_written = write_multivol(u"inc", tarblock_iter,
                                        new_man_outfp, new_sig_outfp,
-                                       globals.backend)
+                                       config.backend)
 
         # close sig file and rename to final
         new_sig_outfp.close()
@@ -673,7 +663,7 @@
         new_man_outfp.to_remote()
         new_man_outfp.to_final()
 
-        if globals.progress:
+        if config.progress:
             # Terminate the background thread now, if any
             progress.progress_thread.finished = True
             progress.progress_thread.join()
@@ -694,7 +684,7 @@
     @rtype: void
     @return: void
     """
-    time = globals.restore_time or dup_time.curtime
+    time = config.restore_time or dup_time.curtime
     sig_chain = col_stats.get_signature_chain_at_time(time)
     path_iter = diffdir.get_combined_path_iter(sig_chain.get_fileobjs(time))
     for path in path_iter:
@@ -710,7 +700,7 @@
 
 def restore(col_stats):
     u"""
-    Restore archive in globals.backend to globals.local_path
+    Restore archive in config.backend to config.local_path
 
     @type col_stats: CollectionStatus object
     @param col_stats: collection status
@@ -718,13 +708,13 @@
     @rtype: void
     @return: void
     """
-    if globals.dry_run:
+    if config.dry_run:
         return
-    if not patchdir.Write_ROPaths(globals.local_path,
+    if not patchdir.Write_ROPaths(config.local_path,
                                   restore_get_patched_rop_iter(col_stats)):
-        if globals.restore_dir:
+        if config.restore_dir:
             log.FatalError(_(u"%s not found in archive - no files restored.")
-                           % (util.fsdecode(globals.restore_dir)),
+                           % (util.fsdecode(config.restore_dir)),
                            log.ErrorCode.restore_dir_not_found)
         else:
             log.FatalError(_(u"No files found in archive - nothing restored."),
@@ -738,11 +728,11 @@
     @type col_stats: CollectionStatus object
     @param col_stats: collection status
     """
-    if globals.restore_dir:
-        index = tuple(globals.restore_dir.split(b"/"))
+    if config.restore_dir:
+        index = tuple(config.restore_dir.split(b"/"))
     else:
         index = ()
-    time = globals.restore_time or dup_time.curtime
+    time = config.restore_time or dup_time.curtime
     backup_chain = col_stats.get_backup_chain_at_time(time)
     assert backup_chain, col_stats.all_backup_chains
     backup_setlist = backup_chain.get_sets_at_time(time)
@@ -763,14 +753,14 @@
             log.Progress(_(u'Processed volume %d of %d') % (cur_vol[0], num_vols),
                          cur_vol[0], num_vols)
 
-    if hasattr(globals.backend, u'pre_process_download'):
+    if hasattr(config.backend, u'pre_process_download'):
         file_names = []
         for backup_set in backup_setlist:
             manifest = backup_set.get_manifest()
             volumes = manifest.get_containing_volumes(index)
             for vol_num in volumes:
                 file_names.append(backup_set.volume_name_dict[vol_num])
-        globals.backend.pre_process_download(file_names)
+        config.backend.pre_process_download(file_names)
 
     fileobj_iters = list(map(get_fileobj_iter, backup_setlist))
     tarfiles = list(map(patchdir.TarFile_FromFileobjs, fileobj_iters))
@@ -782,7 +772,7 @@
     Return plaintext fileobj from encrypted filename on backend
 
     If volume_info is set, the hash of the file will be checked,
-    assuming some hash is available.  Also, if globals.sign_key is
+    assuming some hash is available.  Also, if config.sign_key is
     set, a fatal error will be raised if file not signed by sign_key.
 
     """
@@ -802,7 +792,7 @@
                        log.ErrorCode.mismatched_hash)
 
     fileobj = tdp.filtered_open_with_delete(u"rb")
-    if parseresults.encrypted and globals.gpg_profile.sign_key:
+    if parseresults.encrypted and config.gpg_profile.sign_key:
         restore_add_sig_check(fileobj)
     return fileobj
 
@@ -837,7 +827,7 @@
         u"""Thunk run when closing volume file"""
         actual_sig = fileobj.fileobj.get_signature()
         actual_sig = u"None" if actual_sig is None else actual_sig
-        sign_key = globals.gpg_profile.sign_key
+        sign_key = config.gpg_profile.sign_key
         sign_key = u"None" if sign_key is None else sign_key
         ofs = -min(len(actual_sig), len(sign_key))
         if actual_sig[ofs:] != sign_key[ofs:]:
@@ -860,7 +850,7 @@
     """
     global exit_val
     collated = diffdir.collate2iters(restore_get_patched_rop_iter(col_stats),
-                                     globals.select)
+                                     config.select)
     diff_count = 0
     total_count = 0
     for backup_ropath, current_path in collated:
@@ -868,7 +858,7 @@
             backup_ropath = path.ROPath(current_path.index)
         if not current_path:
             current_path = path.ROPath(backup_ropath.index)
-        if not backup_ropath.compare_verbose(current_path, globals.compare_data):
+        if not backup_ropath.compare_verbose(current_path, config.compare_data):
             diff_count += 1
         total_count += 1
     # Unfortunately, ngettext doesn't handle multiple number variables, so we
@@ -899,15 +889,15 @@
         return
 
     filestr = u"\n".join(map(util.fsdecode, extraneous))
-    if globals.force:
+    if config.force:
         log.Notice(ngettext(u"Deleting this file from backend:",
                             u"Deleting these files from backend:",
                             len(extraneous)) + u"\n" + filestr)
-        if not globals.dry_run:
+        if not config.dry_run:
             col_stats.backend.delete(ext_remote)
             for fn in ext_local:
                 try:
-                    globals.archive_dir_path.append(fn).delete()
+                    config.archive_dir_path.append(fn).delete()
                 except Exception:
                     pass
     else:
@@ -927,16 +917,16 @@
     @rtype: void
     @return: void
     """
-    assert globals.keep_chains is not None
+    assert config.keep_chains is not None
 
-    globals.remove_time = col_stats.get_nth_last_full_backup_time(globals.keep_chains)
+    config.remove_time = col_stats.get_nth_last_full_backup_time(config.keep_chains)
 
     remove_old(col_stats)
 
 
 def remove_old(col_stats):
     u"""
-    Remove backup files older than globals.remove_time from backend
+    Remove backup files older than config.remove_time from backend
 
     @type col_stats: CollectionStatus object
     @param col_stats: collection status
@@ -944,7 +934,7 @@
     @rtype: void
     @return: void
     """
-    assert globals.remove_time is not None
+    assert config.remove_time is not None
 
     def set_times_str(setlist):
         u"""Return string listing times of sets in setlist"""
@@ -954,7 +944,7 @@
         u"""Return string listing times of chains in chainlist"""
         return u"\n".join([dup_time.timetopretty(s.end_time) for s in chainlist])
 
-    req_list = col_stats.get_older_than_required(globals.remove_time)
+    req_list = col_stats.get_older_than_required(config.remove_time)
     if req_list:
         log.Warn(u"%s\n%s\n%s" %
                  (_(u"There are backup set(s) at time(s):"),
@@ -962,14 +952,14 @@
                   _(u"Which can't be deleted because newer sets depend on them.")))
 
     if (col_stats.matched_chain_pair and
-            col_stats.matched_chain_pair[1].end_time < globals.remove_time):
+            col_stats.matched_chain_pair[1].end_time < config.remove_time):
         log.Warn(_(u"Current active backup chain is older than specified time.  "
                    u"However, it will not be deleted.  To remove all your backups, "
                    u"manually purge the repository."))
 
-    chainlist = col_stats.get_chains_older_than(globals.remove_time)
+    chainlist = col_stats.get_chains_older_than(config.remove_time)
 
-    if globals.remove_all_inc_of_but_n_full_mode:
+    if config.remove_all_inc_of_but_n_full_mode:
         # ignore chains without incremental backups:
         chainlist = list(x for x in chainlist if
                          (isinstance(x, dup_collections.SignatureChain) and x.inclist) or
@@ -978,18 +968,18 @@
     if not chainlist:
         log.Notice(_(u"No old backup sets found, nothing deleted."))
         return
-    if globals.force:
+    if config.force:
         log.Notice(ngettext(u"Deleting backup chain at time:",
                             u"Deleting backup chains at times:",
                             len(chainlist)) +
                    u"\n" + chain_times_str(chainlist))
         # Add signature files too, since they won't be needed anymore
-        chainlist += col_stats.get_signature_chains_older_than(globals.remove_time)
+        chainlist += col_stats.get_signature_chains_older_than(config.remove_time)
         chainlist.reverse()  # save oldest for last
         for chain in chainlist:
             # if remove_all_inc_of_but_n_full_mode mode, remove only
             # incrementals one and not full
-            if globals.remove_all_inc_of_but_n_full_mode:
+            if config.remove_all_inc_of_but_n_full_mode:
                 if isinstance(chain, dup_collections.SignatureChain):
                     chain_desc = _(u"Deleting any incremental signature chain rooted at %s")
                 else:
@@ -1000,8 +990,8 @@
                 else:
                     chain_desc = _(u"Deleting complete backup chain %s")
             log.Notice(chain_desc % dup_time.timetopretty(chain.end_time))
-            if not globals.dry_run:
-                chain.delete(keep_full=globals.remove_all_inc_of_but_n_full_mode)
+            if not config.dry_run:
+                chain.delete(keep_full=config.remove_all_inc_of_but_n_full_mode)
         col_stats.set_values(sig_chain_warning=None)
     else:
         log.Notice(ngettext(u"Found old backup chain at the following time:",
@@ -1019,12 +1009,12 @@
     @return: void
     """
     action = u"replicate"
-    time = globals.restore_time or dup_time.curtime
-    src_stats = dup_collections.CollectionsStatus(globals.src_backend, None, action).set_values(sig_chain_warning=None)
-    tgt_stats = dup_collections.CollectionsStatus(globals.backend, None, action).set_values(sig_chain_warning=None)
+    time = config.restore_time or dup_time.curtime
+    src_stats = dup_collections.CollectionsStatus(config.src_backend, None, action).set_values(sig_chain_warning=None)
+    tgt_stats = dup_collections.CollectionsStatus(config.backend, None, action).set_values(sig_chain_warning=None)
 
-    src_list = globals.src_backend.list()
-    tgt_list = globals.backend.list()
+    src_list = config.src_backend.list()
+    tgt_list = config.backend.list()
 
     src_chainlist = src_stats.get_signature_chains(local=False, filelist=src_list)[0]
     tgt_chainlist = tgt_stats.get_signature_chains(local=False, filelist=tgt_list)[0]
@@ -1054,14 +1044,14 @@
                 dup_time.setprevtime(src_sig.start_time)
             dup_time.setcurtime(src_sig.time or src_sig.end_time)
             log.Notice(_(u"Replicating %s.") % (src_sig_filename,))
-            fileobj = globals.src_backend.get_fileobj_read(src_sig_filename)
-            filename = file_naming.get(src_sig.type, encrypted=globals.encryption, gzipped=globals.compression)
+            fileobj = config.src_backend.get_fileobj_read(src_sig_filename)
+            filename = file_naming.get(src_sig.type, encrypted=config.encryption, gzipped=config.compression)
             tdp = dup_temp.new_tempduppath(file_naming.parse(filename))
             tmpobj = tdp.filtered_open(mode=u'wb')
             util.copyfileobj(fileobj, tmpobj)  # decrypt, compress, (re)-encrypt
             fileobj.close()
             tmpobj.close()
-            globals.backend.put(tdp, filename)
+            config.backend.put(tdp, filename)
             tdp.delete()
 
     src_chainlist = src_stats.get_backup_chains(filename_list=src_list)[0]
@@ -1093,14 +1083,14 @@
             mf = manifest.Manifest(fh=mf_tdp.filtered_open(mode=u'wb'))
             for i, filename in list(src_set.volume_name_dict.items()):
                 log.Notice(_(u"Replicating %s.") % (filename,))
-                fileobj = restore_get_enc_fileobj(globals.src_backend, filename, rmf.volume_info_dict[i])
-                filename = file_naming.get(src_set.type, i, encrypted=globals.encryption, gzipped=globals.compression)
+                fileobj = restore_get_enc_fileobj(config.src_backend, filename, rmf.volume_info_dict[i])
+                filename = file_naming.get(src_set.type, i, encrypted=config.encryption, gzipped=config.compression)
                 tdp = dup_temp.new_tempduppath(file_naming.parse(filename))
                 tmpobj = tdp.filtered_open(mode=u'wb')
                 util.copyfileobj(fileobj, tmpobj)  # decrypt, compress, (re)-encrypt
                 fileobj.close()
                 tmpobj.close()
-                globals.backend.put(tdp, filename)
+                config.backend.put(tdp, filename)
 
                 vi = copy.copy(rmf.volume_info_dict[i])
                 vi.set_hash(u"SHA1", gpg.get_hash(u"SHA1", tdp))
@@ -1113,18 +1103,18 @@
             mf_fileobj = mf_tdp.filtered_open_with_delete(mode=u'rb')
             mf_final_filename = file_naming.get(src_set.type,
                                                 manifest=True,
-                                                encrypted=globals.encryption,
-                                                gzipped=globals.compression)
+                                                encrypted=config.encryption,
+                                                gzipped=config.compression)
             mf_final_tdp = dup_temp.new_tempduppath(file_naming.parse(mf_final_filename))
             mf_final_fileobj = mf_final_tdp.filtered_open(mode=u'wb')
             util.copyfileobj(mf_fileobj, mf_final_fileobj)  # compress, encrypt
             mf_fileobj.close()
             mf_final_fileobj.close()
-            globals.backend.put(mf_final_tdp, mf_final_filename)
+            config.backend.put(mf_final_tdp, mf_final_filename)
             mf_final_tdp.delete()
 
-    globals.src_backend.close()
-    globals.backend.close()
+    config.src_backend.close()
+    config.backend.close()
 
 
 def sync_archive(col_stats):
@@ -1146,13 +1136,13 @@
 
         Otherwise, only the metadata for the target chain needs sync.
         """
-        if globals.metadata_sync_mode == u"full":
+        if config.metadata_sync_mode == u"full":
             return True
-        assert globals.metadata_sync_mode == u"partial"
+        assert config.metadata_sync_mode == u"partial"
         parsed = file_naming.parse(filename)
         try:
             target_chain = col_stats.get_backup_chain_at_time(
-                globals.restore_time or dup_time.curtime)
+                config.restore_time or dup_time.curtime)
         except dup_collections.CollectionsError:
             # With zero or multiple chains at this time, do a full sync
             return True
@@ -1226,7 +1216,7 @@
         return (pr, loc_name, fn)
 
     def remove_local(fn):
-        del_name = globals.archive_dir_path.append(fn).name
+        del_name = config.archive_dir_path.append(fn).name
 
         log.Notice(_(u"Deleting local %s (not authoritative at backend).") %
                    util.fsdecode(del_name))
@@ -1285,7 +1275,7 @@
 
         pr, loc_name, rem_name = resolve_basename(fn)
 
-        fileobj = globals.backend.get_fileobj_read(fn)
+        fileobj = config.backend.get_fileobj_read(fn)
         src_iter = SrcIter(fileobj)
         tdp = dup_temp.new_tempduppath(file_naming.parse(loc_name))
         if pr.manifest:
@@ -1293,14 +1283,14 @@
         else:
             gpg.GzipWriteFile(src_iter, tdp.name, size=sys.maxsize)
         tdp.setdata()
-        tdp.move(globals.archive_dir_path.append(loc_name))
+        tdp.move(config.archive_dir_path.append(loc_name))
 
     # get remote metafile list
-    remlist = globals.backend.list()
+    remlist = config.backend.list()
     remote_metafiles, ignored, rem_needpass = get_metafiles(remlist)
 
     # get local metafile list
-    loclist = globals.archive_dir_path.listdir()
+    loclist = config.archive_dir_path.listdir()
     local_metafiles, local_partials, loc_needpass = get_metafiles(loclist)
 
     # we have the list of metafiles on both sides. remote is always
@@ -1333,15 +1323,15 @@
     else:
         local_missing.sort()
         local_spurious.sort()
-        if not globals.dry_run:
+        if not config.dry_run:
             log.Notice(_(u"Synchronizing remote metadata to local cache..."))
             if local_missing and (rem_needpass or loc_needpass):
                 # password for the --encrypt-key
-                globals.gpg_profile.passphrase = get_passphrase(1, u"sync")
+                config.gpg_profile.passphrase = get_passphrase(1, u"sync")
             for fn in local_spurious:
                 remove_local(fn)
-            if hasattr(globals.backend, u'pre_process_download'):
-                globals.backend.pre_process_download(local_missing)
+            if hasattr(config.backend, u'pre_process_download'):
+                config.backend.pre_process_download(local_missing)
             for fn in local_missing:
                 copy_to_local(fn)
             col_stats.set_values()
@@ -1367,7 +1357,7 @@
     assert col_stats.all_backup_chains
     last_backup_set = col_stats.all_backup_chains[-1].get_last()
     # check remote manifest only if we can decrypt it (see #1729796)
-    check_remote = not globals.encryption or globals.gpg_profile.passphrase
+    check_remote = not config.encryption or config.gpg_profile.passphrase
     last_backup_set.check_manifests(check_remote=check_remote)
 
 
@@ -1399,8 +1389,8 @@
         # Calculate space we need for at least 2 volumes of full or inc
         # plus about 30% of one volume for the signature files.
         freespace = stats.f_frsize * stats.f_bavail
-        needspace = (((globals.async_concurrency + 1) * globals.volsize) +
-                     int(0.30 * globals.volsize))
+        needspace = (((config.async_concurrency + 1) * config.volsize) +
+                     int(0.30 * config.volsize))
         if freespace < needspace:
             log.FatalError(_(u"Temp space has %d available, backup needs approx %d.") %
                            (freespace, needspace), log.ErrorCode.not_enough_freespace)
@@ -1438,7 +1428,7 @@
 class Restart(object):
     u"""
     Class to aid in restart of inc or full backup.
-    Instance in globals.restart if restart in progress.
+    Instance in config.restart if restart in progress.
     """
 
     def __init__(self, last_backup):
@@ -1524,10 +1514,10 @@
     # determine what action we're performing and process command line
     action = commandline.ProcessCommandLine(sys.argv[1:])
 
-    globals.lockpath = os.path.join(globals.archive_dir_path.name, b"lockfile")
-    globals.lockfile = fasteners.process_lock.InterProcessLock(globals.lockpath)
-    log.Debug(_(u"Acquiring lockfile %s") % globals.lockpath)
-    if not globals.lockfile.acquire(blocking=False):
+    config.lockpath = os.path.join(config.archive_dir_path.name, b"lockfile")
+    config.lockfile = fasteners.process_lock.InterProcessLock(config.lockpath)
+    log.Debug(_(u"Acquiring lockfile %s") % config.lockpath)
+    if not config.lockfile.acquire(blocking=False):
         log.FatalError(
             u"Another duplicity instance is already running with this archive directory\n",
             log.ErrorCode.user_error)
@@ -1543,8 +1533,8 @@
 
 def do_backup(action):
     # set the current time strings again now that we have time separator
-    if globals.current_time:
-        dup_time.setcurtime(globals.current_time)
+    if config.current_time:
+        dup_time.setcurtime(config.current_time)
     else:
         dup_time.setcurtime()
 
@@ -1555,8 +1545,8 @@
     check_resources(action)
 
     # get current collection status
-    col_stats = dup_collections.CollectionsStatus(globals.backend,
-                                                  globals.archive_dir_path,
+    col_stats = dup_collections.CollectionsStatus(config.backend,
+                                                  config.archive_dir_path,
                                                   action).set_values()
 
     # check archive synch with remote, fix if needed
@@ -1574,15 +1564,15 @@
             if last_backup.partial:
                 if action in [u"full", u"inc"]:
                     # set restart parms from last_backup info
-                    globals.restart = Restart(last_backup)
+                    config.restart = Restart(last_backup)
                     # (possibly) reset action
-                    action = globals.restart.type
+                    action = config.restart.type
                     # reset the time strings
                     if action == u"full":
-                        dup_time.setcurtime(globals.restart.time)
+                        dup_time.setcurtime(config.restart.time)
                     else:
-                        dup_time.setcurtime(globals.restart.end_time)
-                        dup_time.setprevtime(globals.restart.start_time)
+                        dup_time.setcurtime(config.restart.end_time)
+                        dup_time.setprevtime(config.restart.start_time)
                     # log it -- main restart heavy lifting is done in write_multivol
                     log.Notice(_(u"Last %s backup left a partial set, restarting." % action))
                     break
@@ -1590,8 +1580,8 @@
                     # remove last partial backup and get new collection status
                     log.Notice(_(u"Cleaning up previous partial %s backup set, restarting." % action))
                     last_backup.delete()
-                    col_stats = dup_collections.CollectionsStatus(globals.backend,
-                                                                  globals.archive_dir_path,
+                    col_stats = dup_collections.CollectionsStatus(config.backend,
+                                                                  config.archive_dir_path,
                                                                   action).set_values()
                     continue
             break
@@ -1603,8 +1593,8 @@
         log.Notice(_(u"Last full backup date:") + u" " + dup_time.timetopretty(last_full_time))
     else:
         log.Notice(_(u"Last full backup date: none"))
-    if not globals.restart and action == u"inc" and globals.full_force_time is not None and \
-       last_full_time < globals.full_force_time:
+    if not config.restart and action == u"inc" and config.full_force_time is not None and \
+       last_full_time < config.full_force_time:
         log.Notice(_(u"Last full backup is too old, forcing full backup"))
         action = u"full"
     log.PrintCollectionStatus(col_stats)
@@ -1612,7 +1602,7 @@
     os.umask(0o77)
 
     # get the passphrase if we need to based on action/options
-    globals.gpg_profile.passphrase = get_passphrase(1, action)
+    config.gpg_profile.passphrase = get_passphrase(1, action)
 
     if action == u"restore":
         restore(col_stats)
@@ -1621,10 +1611,10 @@
     elif action == u"list-current":
         list_current(col_stats)
     elif action == u"collection-status":
-        if not globals.file_changed:
+        if not config.file_changed:
             log.PrintCollectionStatus(col_stats, True)
         else:
-            log.PrintCollectionFileChangedStatus(col_stats, globals.file_changed, True)
+            log.PrintCollectionFileChangedStatus(col_stats, config.file_changed, True)
     elif action == u"cleanup":
         cleanup(col_stats)
     elif action == u"remove-old":
@@ -1640,19 +1630,19 @@
         # the passphrase for full and inc is used by --sign-key
         # the sign key can have a different passphrase than the encrypt
         # key, therefore request a passphrase
-        if globals.gpg_profile.sign_key:
-            globals.gpg_profile.signing_passphrase = get_passphrase(1, action, True)
+        if config.gpg_profile.sign_key:
+            config.gpg_profile.signing_passphrase = get_passphrase(1, action, True)
 
         # if there are no recipients (no --encrypt-key), it must be a
         # symmetric key. Therefore, confirm the passphrase
-        if not (globals.gpg_profile.recipients or globals.gpg_profile.hidden_recipients):
-            globals.gpg_profile.passphrase = get_passphrase(2, action)
+        if not (config.gpg_profile.recipients or config.gpg_profile.hidden_recipients):
+            config.gpg_profile.passphrase = get_passphrase(2, action)
             # a limitation in the GPG implementation does not allow for
             # inputting different passphrases, this affects symmetric+sign.
             # Allow an empty passphrase for the key though to allow a non-empty
             # symmetric key
-            if (globals.gpg_profile.signing_passphrase and
-                    globals.gpg_profile.passphrase != globals.gpg_profile.signing_passphrase):
+            if (config.gpg_profile.signing_passphrase and
+                    config.gpg_profile.passphrase != config.gpg_profile.signing_passphrase):
                 log.FatalError(_(
                     u"When using symmetric encryption, the signing passphrase "
                     u"must equal the encryption passphrase."),
@@ -1666,13 +1656,13 @@
             if not sig_chain:
                 full_backup(col_stats)
             else:
-                if not globals.restart:
+                if not config.restart:
                     # only ask for a passphrase if there was a previous backup
                     if col_stats.all_backup_chains:
-                        globals.gpg_profile.passphrase = get_passphrase(1, action)
+                        config.gpg_profile.passphrase = get_passphrase(1, action)
                         check_last_manifest(col_stats)  # not needed for full backups
                 incremental_backup(sig_chain)
-    globals.backend.close()
+    config.backend.close()
     log.shutdown()
     if exit_val is not None:
         sys.exit(exit_val)

=== modified file 'duplicity/dup_temp.py'
--- duplicity/dup_temp.py	2019-09-22 23:44:56 +0000
+++ duplicity/dup_temp.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -31,11 +31,10 @@
 import shutil
 
 from duplicity import log
-from duplicity import util
 from duplicity import path
 from duplicity import file_naming
 from duplicity import tempdir
-from duplicity import globals
+from duplicity import config
 from duplicity import gpg
 
 
@@ -75,7 +74,7 @@
     return fileobject is closed, rename to final position.  filename
     must be a recognizable duplicity data file.
     """
-    if not globals.restart:
+    if not config.restart:
         td = tempdir.TemporaryDirectory(dirpath.name)
         tdpname = td.mktemp()
         tdp = TempDupPath(tdpname, parseresults=file_naming.parse(partname))
@@ -93,7 +92,7 @@
         tdp.rename(dirpath.append(partname))
         td.forget(tdpname)
 
-    if not globals.restart:
+    if not config.restart:
         fh.addhook(rename_and_forget)
 
     return fh
@@ -171,7 +170,7 @@
         u"""
         We have achieved the first checkpoint, make file visible and permanent.
         """
-        assert not globals.restart
+        assert not config.restart
         self.tdp.rename(self.dirpath.append(self.partname))
         self.fileobj.flush()
         del self.hooklist[0]
@@ -188,10 +187,10 @@
         if pr.compressed:
             gpg.GzipWriteFile(src_iter, tgt.name, size=sys.maxsize)
         elif pr.encrypted:
-            gpg.GPGWriteFile(src_iter, tgt.name, globals.gpg_profile, size=sys.maxsize)
+            gpg.GPGWriteFile(src_iter, tgt.name, config.gpg_profile, size=sys.maxsize)
         else:
             shutil.copyfile(src.name, tgt.name)
-        globals.backend.move(tgt)  # @UndefinedVariable
+        config.backend.move(tgt)
 
     def to_final(self):
         u"""

=== modified file 'duplicity/dup_threading.py'
--- duplicity/dup_threading.py	2018-11-29 19:00:15 +0000
+++ duplicity/dup_threading.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -186,7 +186,7 @@
     # not care about hash lookup overhead since this is intended to be
     # used for significant amounts of work.
 
-    cv = threading.Condition()  # @UndefinedVariable
+    cv = threading.Condition()
     state = {u'done': False,
              u'error': None,
              u'trace': None,
@@ -259,7 +259,7 @@
         """
         self.__value = value
 
-        self.__cv = threading.Condition()  # @UndefinedVariable
+        self.__cv = threading.Condition()
 
     def get(self):
         u"""

=== modified file 'duplicity/dup_time.py'
--- duplicity/dup_time.py	2019-08-08 19:31:58 +0000
+++ duplicity/dup_time.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -30,7 +30,7 @@
 import re
 import calendar
 import sys
-from duplicity import globals
+from duplicity import config
 from duplicity import util
 
 # For type testing against both int and long types that works in python 2/3
@@ -94,13 +94,13 @@
 def timetostring(timeinseconds):
     u"""Return w3 or duplicity datetime compliant listing of timeinseconds"""
 
-    if globals.old_filenames:
+    if config.old_filenames:
         # We need to know if DST applies to append the correct offset. So
         #    1. Save the tuple returned by localtime.
         #    2. Pass the DST flag into gettzd
         lcltime = time.localtime(timeinseconds)
-        return time.strftime(u"%Y-%m-%dT%H" + globals.time_separator +
-                             u"%M" + globals.time_separator + u"%S",
+        return time.strftime(u"%Y-%m-%dT%H" + config.time_separator +
+                             u"%M" + config.time_separator + u"%S",
                              lcltime) + gettzd(lcltime[-1])
     else:
         # DST never applies to UTC
@@ -126,7 +126,7 @@
             # old format for filename time
             year, month, day = list(map(int, date.split(u"-")))
             hour, minute, second = list(map(int,
-                                        daytime.split(globals.time_separator)))
+                                        daytime.split(config.time_separator)))
         assert 1900 < year < 2100, year
         assert 1 <= month <= 12
         assert 1 <= day <= 31
@@ -247,7 +247,7 @@
     hours, minutes = list(map(abs, divmod(offset, 60)))
     assert 0 <= hours <= 23
     assert 0 <= minutes <= 59
-    return u"%s%02d%s%02d" % (prefix, hours, globals.time_separator, minutes)
+    return u"%s%02d%s%02d" % (prefix, hours, config.time_separator, minutes)
 
 
 def tzdtoseconds(tzd):
@@ -256,7 +256,7 @@
         return 0
     assert len(tzd) == 6  # only accept forms like +08:00 for now
     assert (tzd[0] == u"-" or tzd[0] == u"+") and \
-        tzd[3] == globals.time_separator
+        tzd[3] == config.time_separator
     return -60 * (60 * int(tzd[:3]) + int(tzd[4:]))
 
 

=== modified file 'duplicity/errors.py'
--- duplicity/errors.py	2018-09-11 21:35:37 +0000
+++ duplicity/errors.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>

=== modified file 'duplicity/file_naming.py'
--- duplicity/file_naming.py	2018-11-29 19:00:15 +0000
+++ duplicity/file_naming.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -26,7 +26,7 @@
 from builtins import object
 import re
 from duplicity import dup_time
-from duplicity import globals
+from duplicity import config
 import sys
 
 full_vol_re = None
@@ -61,47 +61,47 @@
     if full_vol_re and not force:
         return
 
-    full_vol_re = re.compile(b"^" + globals.file_prefix + globals.file_prefix_archive + b"duplicity-full"
+    full_vol_re = re.compile(b"^" + config.file_prefix + config.file_prefix_archive + b"duplicity-full"
                              b"\\.(?P<time>.*?)"
                              b"\\.vol(?P<num>[0-9]+)"
                              b"\\.difftar"
                              b"(?P<partial>(\\.part))?"
                              b"($|\\.)")
 
-    full_vol_re_short = re.compile(b"^" + globals.file_prefix + globals.file_prefix_archive + b"df"
+    full_vol_re_short = re.compile(b"^" + config.file_prefix + config.file_prefix_archive + b"df"
                                    b"\\.(?P<time>[0-9a-z]+?)"
                                    b"\\.(?P<num>[0-9a-z]+)"
                                    b"\\.dt"
                                    b"(?P<partial>(\\.p))?"
                                    b"($|\\.)")
 
-    full_manifest_re = re.compile(b"^" + globals.file_prefix + globals.file_prefix_manifest + b"duplicity-full"
+    full_manifest_re = re.compile(b"^" + config.file_prefix + config.file_prefix_manifest + b"duplicity-full"
                                   b"\\.(?P<time>.*?)"
                                   b"\\.manifest"
                                   b"(?P<partial>(\\.part))?"
                                   b"($|\\.)")
 
-    full_manifest_re_short = re.compile(b"^" + globals.file_prefix + globals.file_prefix_manifest + b"df"
+    full_manifest_re_short = re.compile(b"^" + config.file_prefix + config.file_prefix_manifest + b"df"
                                         b"\\.(?P<time>[0-9a-z]+?)"
                                         b"\\.m"
                                         b"(?P<partial>(\\.p))?"
                                         b"($|\\.)")
 
-    inc_vol_re = re.compile(b"^" + globals.file_prefix + globals.file_prefix_archive + b"duplicity-inc"
+    inc_vol_re = re.compile(b"^" + config.file_prefix + config.file_prefix_archive + b"duplicity-inc"
                             b"\\.(?P<start_time>.*?)"
                             b"\\.to\\.(?P<end_time>.*?)"
                             b"\\.vol(?P<num>[0-9]+)"
                             b"\\.difftar"
                             b"($|\\.)")
 
-    inc_vol_re_short = re.compile(b"^" + globals.file_prefix + globals.file_prefix_archive + b"di"
+    inc_vol_re_short = re.compile(b"^" + config.file_prefix + config.file_prefix_archive + b"di"
                                   b"\\.(?P<start_time>[0-9a-z]+?)"
                                   b"\\.(?P<end_time>[0-9a-z]+?)"
                                   b"\\.(?P<num>[0-9a-z]+)"
                                   b"\\.dt"
                                   b"($|\\.)")
 
-    inc_manifest_re = re.compile(b"^" + globals.file_prefix + globals.file_prefix_manifest + b"duplicity-inc"
+    inc_manifest_re = re.compile(b"^" + config.file_prefix + config.file_prefix_manifest + b"duplicity-inc"
                                  b"\\.(?P<start_time>.*?)"
                                  b"\\.to"
                                  b"\\.(?P<end_time>.*?)"
@@ -109,26 +109,26 @@
                                  b"(?P<partial>(\\.part))?"
                                  b"(\\.|$)")
 
-    inc_manifest_re_short = re.compile(b"^" + globals.file_prefix + globals.file_prefix_manifest + b"di"
+    inc_manifest_re_short = re.compile(b"^" + config.file_prefix + config.file_prefix_manifest + b"di"
                                        b"\\.(?P<start_time>[0-9a-z]+?)"
                                        b"\\.(?P<end_time>[0-9a-z]+?)"
                                        b"\\.m"
                                        b"(?P<partial>(\\.p))?"
                                        b"(\\.|$)")
 
-    full_sig_re = re.compile(b"^" + globals.file_prefix + globals.file_prefix_signature + b"duplicity-full-signatures"
+    full_sig_re = re.compile(b"^" + config.file_prefix + config.file_prefix_signature + b"duplicity-full-signatures"
                              b"\\.(?P<time>.*?)"
                              b"\\.sigtar"
                              b"(?P<partial>(\\.part))?"
                              b"(\\.|$)")
 
-    full_sig_re_short = re.compile(b"^" + globals.file_prefix + globals.file_prefix_signature + b"dfs"
+    full_sig_re_short = re.compile(b"^" + config.file_prefix + config.file_prefix_signature + b"dfs"
                                    b"\\.(?P<time>[0-9a-z]+?)"
                                    b"\\.st"
                                    b"(?P<partial>(\\.p))?"
                                    b"(\\.|$)")
 
-    new_sig_re = re.compile(b"^" + globals.file_prefix + globals.file_prefix_signature + b"duplicity-new-signatures"
+    new_sig_re = re.compile(b"^" + config.file_prefix + config.file_prefix_signature + b"duplicity-new-signatures"
                             b"\\.(?P<start_time>.*?)"
                             b"\\.to"
                             b"\\.(?P<end_time>.*?)"
@@ -136,7 +136,7 @@
                             b"(?P<partial>(\\.part))?"
                             b"(\\.|$)")
 
-    new_sig_re_short = re.compile(b"^" + globals.file_prefix + globals.file_prefix_signature + b"dns"
+    new_sig_re_short = re.compile(b"^" + config.file_prefix + config.file_prefix_signature + b"dns"
                                   b"\\.(?P<start_time>[0-9a-z]+?)"
                                   b"\\.(?P<end_time>[0-9a-z]+?)"
                                   b"\\.st"
@@ -189,12 +189,12 @@
     if encrypted:
         gzipped = False
     if encrypted:
-        if globals.short_filenames:
+        if config.short_filenames:
             suffix = b'.g'
         else:
             suffix = b".gpg"
     elif gzipped:
-        if globals.short_filenames:
+        if config.short_filenames:
             suffix = b".z"
         else:
             suffix = b'.gz'
@@ -203,7 +203,7 @@
     return suffix
 
 
-def get(type, volume_number=None, manifest=False,
+def get(type, volume_number=None, manifest=False,  # pylint: disable=redefined-builtin
         encrypted=False, gzipped=False, partial=False):
     u"""
     Return duplicity filename of specified type
@@ -217,7 +217,7 @@
         gzipped = False
     suffix = get_suffix(encrypted, gzipped)
     part_string = b""
-    if globals.short_filenames:
+    if config.short_filenames:
         if partial:
             part_string = b".p"
     else:
@@ -228,23 +228,23 @@
         assert not volume_number and not manifest
         assert not (volume_number and part_string)
         if type == u"full-sig":
-            if globals.short_filenames:
-                return (globals.file_prefix + globals.file_prefix_signature +
+            if config.short_filenames:
+                return (config.file_prefix + config.file_prefix_signature +
                         b"dfs.%s.st%s%s" %
                         (to_base36(dup_time.curtime), part_string, suffix))
             else:
-                return (globals.file_prefix + globals.file_prefix_signature +
+                return (config.file_prefix + config.file_prefix_signature +
                         b"duplicity-full-signatures.%s.sigtar%s%s" %
                         (dup_time.curtimestr.encode(), part_string, suffix))
         elif type == u"new-sig":
-            if globals.short_filenames:
-                return (globals.file_prefix + globals.file_prefix_signature +
+            if config.short_filenames:
+                return (config.file_prefix + config.file_prefix_signature +
                         b"dns.%s.%s.st%s%s" %
                         (to_base36(dup_time.prevtime),
                          to_base36(dup_time.curtime),
                          part_string, suffix))
             else:
-                return (globals.file_prefix + globals.file_prefix_signature +
+                return (config.file_prefix + config.file_prefix_signature +
                         b"duplicity-new-signatures.%s.to.%s.sigtar%s%s" %
                         (dup_time.prevtimestr.encode(), dup_time.curtimestr.encode(),
                          part_string, suffix))
@@ -252,30 +252,30 @@
         assert volume_number or manifest
         assert not (volume_number and manifest)
 
-        prefix = globals.file_prefix
+        prefix = config.file_prefix
 
         if volume_number:
-            if globals.short_filenames:
+            if config.short_filenames:
                 vol_string = b"%s.dt" % to_base36(volume_number)
             else:
                 vol_string = b"vol%d.difftar" % volume_number
-            prefix += globals.file_prefix_archive
+            prefix += config.file_prefix_archive
         else:
-            if globals.short_filenames:
+            if config.short_filenames:
                 vol_string = b"m"
             else:
                 vol_string = b"manifest"
-            prefix += globals.file_prefix_manifest
+            prefix += config.file_prefix_manifest
 
         if type == u"full":
-            if globals.short_filenames:
+            if config.short_filenames:
                 return (b"%sdf.%s.%s%s%s" % (prefix, to_base36(dup_time.curtime),
                                              vol_string, part_string, suffix))
             else:
                 return (b"%sduplicity-full.%s.%s%s%s" % (prefix, dup_time.curtimestr.encode(),
                                                          vol_string, part_string, suffix))
         elif type == u"inc":
-            if globals.short_filenames:
+            if config.short_filenames:
                 return (b"%sdi.%s.%s.%s%s%s" % (prefix, to_base36(dup_time.prevtime),
                                                 to_base36(dup_time.curtime),
                                                 vol_string, part_string, suffix))
@@ -324,7 +324,7 @@
         short = True
         m1 = full_vol_re_short.search(filename)
         m2 = full_manifest_re_short.search(filename)
-        if not m1 and not m2 and not globals.short_filenames:
+        if not m1 and not m2 and not config.short_filenames:
             short = False
             m1 = full_vol_re.search(filename)
             m2 = full_manifest_re.search(filename)
@@ -347,7 +347,7 @@
         short = True
         m1 = inc_vol_re_short.search(filename)
         m2 = inc_manifest_re_short.search(filename)
-        if not m1 and not m2 and not globals.short_filenames:
+        if not m1 and not m2 and not config.short_filenames:
             short = False
             m1 = inc_vol_re.search(filename)
             m2 = inc_manifest_re.search(filename)
@@ -370,7 +370,7 @@
         prepare_regex()
         short = True
         m = full_sig_re_short.search(filename)
-        if not m and not globals.short_filenames:
+        if not m and not config.short_filenames:
             short = False
             m = full_sig_re.search(filename)
         if m:
@@ -383,7 +383,7 @@
 
         short = True
         m = new_sig_re_short.search(filename)
-        if not m and not globals.short_filenames:
+        if not m and not config.short_filenames:
             short = False
             m = new_sig_re.search(filename)
         if m:
@@ -399,13 +399,13 @@
         Set encryption and compression flags in ParseResults pr
         """
         if (filename.endswith(b'.z') or
-                not globals.short_filenames and filename.endswith(b'gz')):
+                not config.short_filenames and filename.endswith(b'gz')):
             pr.compressed = 1
         else:
             pr.compressed = None
 
         if (filename.endswith(b'.g') or
-                not globals.short_filenames and filename.endswith(b'.gpg')):
+                not config.short_filenames and filename.endswith(b'.gpg')):
             pr.encrypted = 1
         else:
             pr.encrypted = None
@@ -425,7 +425,7 @@
     u"""
     Hold information taken from a duplicity filename
     """
-    def __init__(self, type, manifest=None, volume_number=None,
+    def __init__(self, type, manifest=None, volume_number=None,  # pylint: disable=redefined-builtin
                  time=None, start_time=None, end_time=None,
                  encrypted=None, compressed=None, partial=False):
 

=== modified file 'duplicity/filechunkio.py'
--- duplicity/filechunkio.py	2018-09-11 21:35:37 +0000
+++ duplicity/filechunkio.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2011 Fabian Topfstedt <topfstedt@xxxxxxxxxxxxxxxxxxx>
 #
@@ -18,8 +18,8 @@
     u"""
     A class that allows you reading only a chunk of a file.
     """
-    def __init__(self, name, mode=u'r', closefd=True, offset=0, bytes=None,
-                 *args, **kwargs):
+    def __init__(self, name, mode=u'r', closefd=True, offset=0, bytes=None,  # pylint: disable=redefined-builtin
+                 *args, **kwargs):  # pylint: disable=redefined-builtin
         u"""
         Open a file chunk. The mode can only be 'r' for reading. Offset
         is the amount of bytes that the chunks starts after the real file's

=== modified file 'duplicity/globmatch.py'
--- duplicity/globmatch.py	2018-11-29 19:00:15 +0000
+++ duplicity/globmatch.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>

=== modified file 'duplicity/gpg.py'
--- duplicity/gpg.py	2020-02-06 15:27:43 +0000
+++ duplicity/gpg.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -30,16 +30,13 @@
 from builtins import object
 import os
 import sys
-import types
 import tempfile
 import re
 import gzip
 import locale
-import platform
 
-from duplicity import globals
+from duplicity import config
 from duplicity import gpginterface
-from duplicity import log
 from duplicity import tempdir
 from duplicity import util
 
@@ -92,7 +89,7 @@
         else:
             self.hidden_recipients = []
 
-        self.gpg_version = self.get_gpg_version(globals.gpg_binary)
+        self.gpg_version = self.get_gpg_version(config.gpg_binary)
 
     rc = re.compile
     _version_re = rc(b'^gpg.*\\(GnuPG(?:/MacGPG2)?\\) (?P<maj>[0-9]+)\\.(?P<min>[0-9]+)\\.(?P<bug>[0-9]+)(-.+)?$')
@@ -103,8 +100,8 @@
             gnupg.call = binary
 
         # user supplied options
-        if globals.gpg_options:
-            for opt in globals.gpg_options.split():
+        if config.gpg_options:
+            for opt in config.gpg_options.split():
                 gnupg.options.extra_args.append(opt)
 
         # get gpg version
@@ -143,15 +140,15 @@
         # Start GPG process - copied from GnuPGInterface docstring.
         gnupg = gpginterface.GnuPG()
         # overrides default gpg binary 'gpg'
-        if globals.gpg_binary is not None:
-            gnupg.call = globals.gpg_binary
+        if config.gpg_binary is not None:
+            gnupg.call = config.gpg_binary
         gnupg.options.meta_interactive = 0
         gnupg.options.extra_args.append(u'--no-secmem-warning')
         gnupg.options.extra_args.append(u'--ignore-mdc-error')
 
         # Support three versions of gpg present 1.x, 2.0.x, 2.1.x
         if profile.gpg_version[:1] == (1,):
-            if globals.use_agent:
+            if config.use_agent:
                 # gpg1 agent use is optional
                 gnupg.options.extra_args.append(u'--use-agent')
 
@@ -159,7 +156,7 @@
             pass
 
         elif profile.gpg_version[:2] >= (2, 1):
-            if not globals.use_agent:
+            if not config.use_agent:
                 # This forces gpg2 to ignore the agent.
                 # Necessary to enforce truly non-interactive operation.
                 gnupg.options.extra_args.append(u'--pinentry-mode=loopback')
@@ -168,8 +165,8 @@
             raise GPGError(u"Unsupported GNUPG version, %s" % profile.gpg_version)
 
         # user supplied options
-        if globals.gpg_options:
-            for opt in globals.gpg_options.split():
+        if config.gpg_options:
+            for opt in config.gpg_options.split():
                 gnupg.options.extra_args.append(opt)
 
         cmdlist = []
@@ -200,7 +197,7 @@
                 # use integrity protection
                 gnupg.options.extra_args.append(u'--force-mdc')
             # Skip the passphrase if using the agent
-            if globals.use_agent:
+            if config.use_agent:
                 gnupg_fhs = [u'stdin', ]
             else:
                 gnupg_fhs = [u'stdin', u'passphrase']
@@ -208,7 +205,7 @@
                            attach_fhs={u'stdout': encrypt_path.open(u"wb"),
                                        u'stderr': self.stderr_fp,
                                        u'logger': self.logger_fp})
-            if not globals.use_agent:
+            if not config.use_agent:
                 p1.handles[u'passphrase'].write(passphrase)
                 p1.handles[u'passphrase'].close()
             self.gpg_input = p1.handles[u'stdin']
@@ -218,7 +215,7 @@
                 cmdlist.append(profile.encrypt_secring)
             self.status_fp = tempfile.TemporaryFile(dir=tempdir.default().dir())
             # Skip the passphrase if using the agent
-            if globals.use_agent:
+            if config.use_agent:
                 gnupg_fhs = [u'stdout', ]
             else:
                 gnupg_fhs = [u'stdout', u'passphrase']
@@ -227,7 +224,7 @@
                                        u'status': self.status_fp,
                                        u'stderr': self.stderr_fp,
                                        u'logger': self.logger_fp})
-            if not(globals.use_agent):
+            if not(config.use_agent):
                 p1.handles[u'passphrase'].write(passphrase)
                 p1.handles[u'passphrase'].close()
             self.gpg_output = p1.handles[u'stdout']
@@ -365,16 +362,16 @@
     # workaround for circular module imports
     from duplicity import path
 
-    def top_off(bytes, file):
+    def top_off(bytelen, file):
         u"""
-        Add bytes of incompressible data to to_gpg_fp
+        Add bytelen of incompressible data to to_gpg_fp
 
         In this case we take the incompressible data from the
         beginning of filename (it should contain enough because size
         >> largest block size).
         """
         incompressible_fp = open(filename, u"rb")
-        assert util.copyfileobj(incompressible_fp, file.gpg_input, bytes) == bytes
+        assert util.copyfileobj(incompressible_fp, file.gpg_input, bytelen) == bytelen
         incompressible_fp.close()
 
     def get_current_size():
@@ -476,7 +473,7 @@
     return GzipWriteFile(block_iter, filename, size, gzipped)
 
 
-def get_hash(hash, path, hex=1):
+def get_hash(hash, path, hex=1):  # pylint: disable=redefined-builtin
     u"""
     Return hash of path
 

=== modified file 'duplicity/gpginterface.py'
--- duplicity/gpginterface.py	2018-11-29 19:00:15 +0000
+++ duplicity/gpginterface.py	2020-03-22 12:35:54 +0000
@@ -1,3 +1,5 @@
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
+#
 u"""Interface to GNU Privacy Guard (GnuPG)
 
 !!! This was renamed to gpginterface.py.
@@ -234,7 +236,7 @@
 try:
     import threading
 except ImportError:
-    import dummy_threading  # @UnusedImport
+    import dummy_threading as threading
     log.Warn(_(u"Threading not available -- zombie processes may appear"))
 
 __author__ = u"Frank J. Tobin, ftobin@xxxxxxxxxxxxxxx"
@@ -708,7 +710,6 @@
 
 def _run_doctests():
     import doctest
-    from . import gpginterface
     return doctest.testmod(GnuPGInterface)
 
 

=== modified file 'duplicity/lazy.py'
--- duplicity/lazy.py	2019-02-21 21:48:53 +0000
+++ duplicity/lazy.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -20,13 +20,16 @@
 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 
 u"""Define some lazy data structures and functions acting on them"""
+
 from __future__ import print_function
 
 from builtins import map
 from builtins import next
 from builtins import range
 from builtins import object
+
 import os
+
 from duplicity import log
 from duplicity import robust
 from duplicity import util
@@ -36,47 +39,47 @@
     u"""Hold static methods for the manipulation of lazy iterators"""
 
     @staticmethod
-    def filter(predicate, iterator):  # @NoSelf
+    def filter(predicate, iterator):
         u"""Like filter in a lazy functional programming language"""
         for i in iterator:
             if predicate(i):
                 yield i
 
     @staticmethod
-    def map(function, iterator):  # @NoSelf
+    def map(function, iterator):
         u"""Like map in a lazy functional programming language"""
         for i in iterator:
             yield function(i)
 
     @staticmethod
-    def foreach(function, iterator):  # @NoSelf
+    def foreach(function, iterator):
         u"""Run function on each element in iterator"""
         for i in iterator:
             function(i)
 
     @staticmethod
-    def cat(*iters):  # @NoSelf
+    def cat(*iters):
         u"""Lazily concatenate iterators"""
-        for iter in iters:
+        for iter in iters:  # pylint: disable=redefined-builtin
             for i in iter:
                 yield i
 
     @staticmethod
-    def cat2(iter_of_iters):  # @NoSelf
+    def cat2(iter_of_iters):
         u"""Lazily concatenate iterators, iterated by big iterator"""
-        for iter in iter_of_iters:
+        for iter in iter_of_iters:  # pylint: disable=redefined-builtin
             for i in iter:
                 yield i
 
     @staticmethod
-    def empty(iter):  # @NoSelf
+    def empty(iter):  # pylint: disable=redefined-builtin
         u"""True if iterator has length 0"""
-        for i in iter:  # @UnusedVariable
+        for i in iter:
             return None
         return 1
 
     @staticmethod
-    def equal(iter1, iter2, verbose=None, operator=lambda x, y: x == y):  # @NoSelf
+    def equal(iter1, iter2, verbose=None, operator=lambda x, y: x == y):
         u"""True if iterator 1 has same elements as iterator 2
 
         Use equality operator, or == if it is unspecified.
@@ -102,7 +105,7 @@
         return None
 
     @staticmethod
-    def Or(iter):  # @NoSelf
+    def Or(iter):  # pylint: disable=redefined-builtin
         u"""True if any element in iterator is true.  Short circuiting"""
         i = None
         for i in iter:
@@ -111,7 +114,7 @@
         return i
 
     @staticmethod
-    def And(iter):  # @NoSelf
+    def And(iter):  # pylint: disable=redefined-builtin
         u"""True if all elements in iterator are true.  Short circuiting"""
         i = 1
         for i in iter:
@@ -120,7 +123,7 @@
         return i
 
     @staticmethod
-    def len(iter):  # @NoSelf
+    def len(iter):  # pylint: disable=redefined-builtin
         u"""Return length of iterator"""
         i = 0
         while 1:
@@ -131,7 +134,7 @@
             i = i + 1
 
     @staticmethod
-    def foldr(f, default, iter):  # @NoSelf
+    def foldr(f, default, iter):  # pylint: disable=redefined-builtin
         u"""foldr the "fundamental list recursion operator"?"""
         try:
             next_item = next(iter)
@@ -140,7 +143,7 @@
         return f(next_item, Iter.foldr(f, default, iter))
 
     @staticmethod
-    def foldl(f, default, iter):  # @NoSelf
+    def foldl(f, default, iter):  # pylint: disable=redefined-builtin
         u"""the fundamental list iteration operator.."""
         while 1:
             try:
@@ -150,7 +153,7 @@
             default = f(default, next_item)
 
     @staticmethod
-    def multiplex(iter, num_of_forks, final_func=None, closing_func=None):  # @NoSelf
+    def multiplex(iter, num_of_forks, final_func=None, closing_func=None):  # pylint: disable=redefined-builtin
         u"""Split a single iterater into a number of streams
 
         The return val will be a list with length num_of_forks, each
@@ -223,14 +226,14 @@
     to split it into 2.  By profiling, this is a time sensitive class.
 
     """
-    def __init__(self, iter):
+    def __init__(self, iter):  # pylint: disable=redefined-builtin
         self.a_leading_by = 0  # How many places a is ahead of b
         self.buffer = []
         self.iter = iter
 
     def yielda(self):
         u"""Return first iterator"""
-        buf, iter = self.buffer, self.iter
+        buf, iter = self.buffer, self.iter  # pylint: disable=redefined-builtin
         while(1):
             if self.a_leading_by >= 0:
                 # a is in front, add new element
@@ -247,7 +250,7 @@
 
     def yieldb(self):
         u"""Return second iterator"""
-        buf, iter = self.buffer, self.iter
+        buf, iter = self.buffer, self.iter  # pylint: disable=redefined-builtin
         while(1):
             if self.a_leading_by <= 0:
                 # b is in front, add new element
@@ -407,7 +410,7 @@
         assert branch.finished
         pass
 
-    def can_fast_process(self, *args):
+    def can_fast_process(self, *args):  # pylint: disable=unused-argument
         u"""True if object can be processed without new branch (stub)"""
         return None
 

=== modified file 'duplicity/librsync.py'
--- duplicity/librsync.py	2018-11-29 19:00:15 +0000
+++ duplicity/librsync.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -26,13 +26,14 @@
 
 """
 
+from builtins import object
 from builtins import str
-from builtins import object
+
+import array
 import os
 import sys
+
 from . import _librsync
-import types
-import array
 
 if os.environ.get(u'READTHEDOCS') == u'True':
     import mock  # pylint: disable=import-error

=== modified file 'duplicity/log.py'
--- duplicity/log.py	2019-08-08 19:31:58 +0000
+++ duplicity/log.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>

=== modified file 'duplicity/manifest.py'
--- duplicity/manifest.py	2019-06-29 21:05:41 +0000
+++ duplicity/manifest.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -21,7 +21,6 @@
 
 u"""Create and edit manifest for session contents"""
 
-from builtins import filter
 from builtins import map
 from builtins import range
 from builtins import object
@@ -29,9 +28,9 @@
 import re
 import sys
 
-from duplicity import globals
+from duplicity import config
 from duplicity import log
-from duplicity import globals
+from duplicity import config
 from duplicity import util
 
 
@@ -64,14 +63,14 @@
 
     def set_dirinfo(self):
         u"""
-        Set information about directory from globals,
+        Set information about directory from config,
         and write to manifest file.
 
         @rtype: Manifest
         @return: manifest
         """
-        self.hostname = globals.hostname
-        self.local_dirname = globals.local_path.name  # @UndefinedVariable
+        self.hostname = config.hostname
+        self.local_dirname = config.local_path.name
         if self.fh:
             if self.hostname:
                 self.fh.write(b"Hostname %s\n" % self.hostname.encode())
@@ -89,23 +88,23 @@
         @rtype: string
         @return: None or error message
         """
-        if globals.allow_source_mismatch:
+        if config.allow_source_mismatch:
             return
 
-        if self.hostname and self.hostname != globals.hostname:
+        if self.hostname and self.hostname != config.hostname:
             errmsg = _(u"Fatal Error: Backup source host has changed.\n"
                        u"Current hostname: %s\n"
-                       u"Previous hostname: %s") % (globals.hostname, self.hostname)
+                       u"Previous hostname: %s") % (config.hostname, self.hostname)
             code = log.ErrorCode.hostname_mismatch
-            code_extra = u"%s %s" % (util.escape(globals.hostname), util.escape(self.hostname))
+            code_extra = u"%s %s" % (util.escape(config.hostname), util.escape(self.hostname))
 
-        elif (self.local_dirname and self.local_dirname != globals.local_path.name):  # @UndefinedVariable
+        elif (self.local_dirname and self.local_dirname != config.local_path.name):
             errmsg = _(u"Fatal Error: Backup source directory has changed.\n"
                        u"Current directory: %s\n"
-                       u"Previous directory: %s") % (globals.local_path.name, self.local_dirname)  # @UndefinedVariable
+                       u"Previous directory: %s") % (config.local_path.name, self.local_dirname)
             code = log.ErrorCode.source_dir_mismatch
-            code_extra = u"%s %s" % (util.escape(globals.local_path.name),
-                                     util.escape(self.local_dirname))  # @UndefinedVariable
+            code_extra = u"%s %s" % (util.escape(config.local_path.name),
+                                     util.escape(self.local_dirname))
         else:
             return
 
@@ -223,7 +222,7 @@
 
         # Get file changed list - not needed if --file-changed not present
         filecount = 0
-        if globals.file_changed is not None:
+        if config.file_changed is not None:
             filelist_regexp = re.compile(b"(^|\\n)filelist\\s([0-9]+)\\n(.*?)(\\nvolume\\s|$)", re.I | re.S)
             match = filelist_regexp.search(s)
             if match:

=== modified file 'duplicity/patchdir.py'
--- duplicity/patchdir.py	2019-06-29 21:05:41 +0000
+++ duplicity/patchdir.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -20,26 +20,24 @@
 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 
 from builtins import map
-from builtins import filter
 from builtins import next
+from builtins import object
 from builtins import range
-from builtins import object
 
-import re  # @UnusedImport
-import types
-import os
+import re
 import sys
 import tempfile
 
-from duplicity import tarfile  # @UnusedImport
-from duplicity import librsync  # @UnusedImport
-from duplicity import log  # @UnusedImport
 from duplicity import diffdir
+from duplicity import config
+from duplicity import librsync
+from duplicity import log
 from duplicity import selection
+from duplicity import tarfile
 from duplicity import tempdir
-from duplicity import util  # @UnusedImport
-from duplicity.path import *  # @UnusedWildImport
-from duplicity.lazy import *  # @UnusedWildImport
+from duplicity import util
+from duplicity.lazy import *  # pylint: disable=unused-wildcard-import,redefined-builtin
+from duplicity.path import *  # pylint: disable=unused-wildcard-import,redefined-builtin
 
 u"""Functions for patching of directories"""
 
@@ -236,8 +234,7 @@
         u"""Add next chunk to buffer"""
         if self.at_end:
             return None
-        index, difftype, multivol = get_index_from_tarinfo(  # @UnusedVariable
-            self.tarinfo_list[0])
+        index, difftype, multivol = get_index_from_tarinfo(self.tarinfo_list[0])
         if not multivol or index != self.index:
             # we've moved on
             # the following communicates next tarinfo to difftar2path_iter
@@ -295,7 +292,7 @@
         if self.dir_diff_ropath:
             self.dir_diff_ropath.copy_attribs(self.dir_basis_path)
 
-    def can_fast_process(self, index, basis_path, diff_ropath):
+    def can_fast_process(self, index, basis_path, diff_ropath):  # pylint: disable=unused-argument
         u"""No need to recurse if diff_ropath isn't a directory"""
         return not (diff_ropath and diff_ropath.isdir())
 
@@ -608,7 +605,7 @@
                 ropath.copy(new_path)
 
         self.dir_new_path = self.base_path.new_index(index)
-        if self.dir_new_path.exists() and not globals.force:
+        if self.dir_new_path.exists() and not config.force:
             # base may exist, but nothing else
             assert index == (), index
         else:
@@ -620,7 +617,7 @@
         if self.dir_diff_ropath:
             self.dir_diff_ropath.copy_attribs(self.dir_new_path)
 
-    def can_fast_process(self, index, ropath):
+    def can_fast_process(self, index, ropath):  # pylint: disable=unused-argument
         u"""Can fast process (no recursion) if ropath isn't a directory"""
         log.Info(_(u"Writing %s of type %s") %
                  (util.fsdecode(ropath.get_relative_path()), ropath.type),

=== modified file 'duplicity/path.py'
--- duplicity/path.py	2020-02-06 15:27:43 +0000
+++ duplicity/path.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -29,29 +29,28 @@
 from __future__ import print_function
 from future import standard_library
 standard_library.install_aliases()
-from builtins import filter
 from builtins import str
 from builtins import object
 
-import stat
 import errno
+import gzip
+import os
+import re
+import shutil
 import socket
+import stat
 import time
-import re
-import gzip
-import shutil
-import sys
 
-from duplicity import tarfile
+from duplicity import cached_ops
+from duplicity import config
+from duplicity import dup_time
 from duplicity import file_naming
-from duplicity import globals
 from duplicity import gpg
+from duplicity import librsync
+from duplicity import log
+from duplicity import tarfile
 from duplicity import util
-from duplicity import librsync
-from duplicity import log  # @UnusedImport
-from duplicity import dup_time
-from duplicity import cached_ops
-from duplicity.lazy import *  # @UnusedWildImport
+from duplicity.lazy import *  # pylint: disable=unused-wildcard-import,redefined-builtin
 
 _copy_blocksize = 64 * 1024
 _tmp_path_counter = 1
@@ -75,7 +74,7 @@
     have a name.  They are required to be indexed though.
 
     """
-    def __init__(self, index, stat=None):
+    def __init__(self, index, stat=None):  # pylint: disable=unused-argument
         u"""ROPath initializer"""
         self.opened, self.fileobj = None, None
         self.index = index
@@ -99,7 +98,7 @@
         elif stat.S_ISSOCK(st_mode):
             raise PathException(util.fsdecode(self.get_relative_path()) +
                                 u"is a socket, unsupported by tar")
-            self.type = u"sock"
+            self.type = u"sock"  # pylint: disable=unreachable
         elif stat.S_ISCHR(st_mode):
             self.type = u"chr"
         elif stat.S_ISBLK(st_mode):
@@ -196,7 +195,7 @@
     def init_from_tarinfo(self, tarinfo):
         u"""Set data from tarinfo object (part of tarfile module)"""
         # Set the typepp
-        type = tarinfo.type
+        type = tarinfo.type  # pylint: disable=redefined-builtin
         if type == tarfile.REGTYPE or type == tarfile.AREGTYPE:
             self.type = u"reg"
         elif type == tarfile.LNKTYPE:
@@ -228,13 +227,13 @@
         --numeric-owner is set
         """
         try:
-            if globals.numeric_owner:
+            if config.numeric_owner:
                 raise KeyError
             self.stat.st_uid = cached_ops.getpwnam(tarinfo.uname)[2]
         except KeyError:
             self.stat.st_uid = tarinfo.uid
         try:
-            if globals.numeric_owner:
+            if config.numeric_owner:
                 raise KeyError
             self.stat.st_gid = cached_ops.getgrnam(tarinfo.gname)[2]
         except KeyError:
@@ -504,15 +503,15 @@
     regex_chars_to_quote = re.compile(u"[\\\\\\\"\\$`]")
 
     def rename_index(self, index):
-        if not globals.rename or not index:
+        if not config.rename or not index:
             return index  # early exit
         path = os.path.normcase(os.path.join(*index))
         tail = []
-        while path and path not in globals.rename:
+        while path and path not in config.rename:
             path, extra = os.path.split(path)
             tail.insert(0, extra)
         if path:
-            return globals.rename[path].split(os.sep) + tail
+            return config.rename[path].split(os.sep) + tail
         else:
             return index  # no rename found
 
@@ -543,7 +542,7 @@
         try:
             # We may be asked to look at the target of symlinks rather than
             # the link itself.
-            if globals.copy_links:
+            if config.copy_links:
                 self.stat = os.stat(self.name)
             else:
                 self.stat = os.lstat(self.name)
@@ -591,7 +590,7 @@
             result = open(self.name, mode)
         return result
 
-    def makedev(self, type, major, minor):
+    def makedev(self, type, major, minor):  # pylint: disable=redefined-builtin
         u"""Make a device file with specified type, major/minor nums"""
         cmdlist = [u'mknod', self.name, type, str(major), str(minor)]
         if os.spawnvp(os.P_WAIT, u'mknod', cmdlist) != 0:
@@ -604,7 +603,7 @@
         try:
             os.makedirs(self.name)
         except OSError:
-            if (not globals.force):
+            if (not config.force):
                 raise PathException(u"Error creating directory %s" % self.uc_name, 7)
         self.setdata()
 
@@ -786,7 +785,7 @@
         Return fileobj with appropriate encryption/compression
 
         If encryption is specified but no gpg_profile, use
-        globals.default_profile.
+        config.default_profile.
         """
         assert not self.opened and not self.fileobj
         assert not (self.pr.encrypted and self.pr.compressed)
@@ -797,7 +796,7 @@
             return gzip.GzipFile(self.name, mode)
         elif self.pr.encrypted:
             if not gpg_profile:
-                gpg_profile = globals.gpg_profile
+                gpg_profile = config.gpg_profile
             if mode == u"rb":
                 return gpg.GPGFile(False, self, gpg_profile)
             elif mode == u"wb":
@@ -808,14 +807,14 @@
 
 class PathDeleter(ITRBranch):
     u"""Delete a directory.  Called by Path.deltree"""
-    def start_process(self, index, path):
+    def start_process(self, index, path):  # pylint: disable=unused-argument
         self.path = path
 
     def end_process(self):
         self.path.delete()
 
-    def can_fast_process(self, index, path):
+    def can_fast_process(self, index, path):  # pylint: disable=unused-argument
         return not path.isdir()
 
-    def fast_process(self, index, path):
+    def fast_process(self, index, path):  # pylint: disable=unused-argument
         path.delete()

=== modified file 'duplicity/progress.py'
--- duplicity/progress.py	2019-08-08 19:31:58 +0000
+++ duplicity/progress.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -34,17 +34,17 @@
 
 from __future__ import absolute_import
 from __future__ import division
-
 from builtins import object
+
+from datetime import datetime, timedelta
 import collections as sys_collections
 import math
+import pickle
 import threading
 import time
-from datetime import datetime, timedelta
-from duplicity import globals
+
+from duplicity import config
 from duplicity import log
-import pickle
-import os
 
 tracker = None
 progress_thread = None
@@ -63,9 +63,9 @@
         """
         snapshot = Snapshot()
         # If restarting Full, discard marshalled data and start over
-        if globals.restart is not None and globals.restart.start_vol >= 1:
+        if config.restart is not None and config.restart.start_vol >= 1:
             try:
-                progressfd = open(u'%s/progress' % globals.archive_dir_path.name, u'r')
+                progressfd = open(u'%s/progress' % config.archive_dir_path.name, u'r')
                 snapshot = pickle.load(progressfd)
                 progressfd.close()
             except:
@@ -79,7 +79,7 @@
         u"""
         Serializes object to cache
         """
-        progressfd = open(b'%s/progress' % globals.archive_dir_path.name, u'wb+')
+        progressfd = open(b'%s/progress' % config.archive_dir_path.name, u'wb+')
         pickle.dump(self, progressfd)
         progressfd.close()
 
@@ -149,7 +149,7 @@
         u"""
         Aproximative and evolving method of computing the progress of upload
         """
-        if not globals.progress or not self.has_collected_evidence():
+        if not config.progress or not self.has_collected_evidence():
             return
 
         current_time = datetime.now()
@@ -164,7 +164,7 @@
         # Detect (and report) a stallment if no changing data for more than 5 seconds
         if self.stall_last_time is None:
             self.stall_last_time = current_time
-        if (current_time - self.stall_last_time).seconds > max(5, 2 * globals.progress_rate):
+        if (current_time - self.stall_last_time).seconds > max(5, 2 * config.progress_rate):
             log.TransferProgress(100.0 * self.progress_estimation,
                                  self.time_estimation, self.total_bytecount,
                                  (current_time - self.start_time).seconds,
@@ -312,7 +312,7 @@
         return (datetime.now() - self.start_time).seconds
 
 
-def report_transfer(bytecount, totalbytes):
+def report_transfer(bytecount, totalbytes):  # pylint: disable=unused-argument
     u"""
     Method to call tracker.annotate_written_bytes from outside
     the class, and to offer the "function(long, long)" signature
@@ -336,7 +336,7 @@
 
     def run(self):
         global tracker
-        if not globals.dry_run and globals.progress and tracker.has_collected_evidence():
+        if not config.dry_run and config.progress and tracker.has_collected_evidence():
             while not self.finished:
                 tracker.log_upload_progress()
-                time.sleep(globals.progress_rate)
+                time.sleep(config.progress_rate)

=== modified file 'duplicity/robust.py'
--- duplicity/robust.py	2018-11-29 19:00:15 +0000
+++ duplicity/robust.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -20,9 +20,9 @@
 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 
 import errno
+
 from duplicity import librsync
 from duplicity import log
-from duplicity import util
 
 tmp_file_index = 1
 
@@ -60,7 +60,7 @@
 
 def listpath(path):
     u"""Like path.listdir() but return [] if error, and sort results"""
-    def error_handler(exc):
+    def error_handler(exc):  # pylint: disable=unused-argument
         log.Warn(_(u"Error listing directory %s") % path.uc_name)
         return []
     dir_listing = check_common_error(error_handler, path.listdir)

=== modified file 'duplicity/selection.py'
--- duplicity/selection.py	2020-03-06 16:15:34 +0000
+++ duplicity/selection.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -23,20 +23,18 @@
 from builtins import next
 from builtins import str
 from builtins import object
-from builtins import filter, map
 
-import os  # @UnusedImport
-import stat  # @UnusedImport
+import os
+import stat
 import sys
 import re
 
-from duplicity.path import *  # @UnusedWildImport
-from duplicity import log  # @Reimport
-from duplicity import globals  # @Reimport
+from duplicity import config
 from duplicity import diffdir
-from duplicity import util  # @Reimport
-from duplicity.globmatch import GlobbingError, FilePrefixError, \
-    select_fn_from_glob
+from duplicity import log
+from duplicity import util
+from duplicity.globmatch import GlobbingError, FilePrefixError, select_fn_from_glob
+from duplicity.path import *  # pylint: disable=unused-wildcard-import,redefined-builtin
 
 u"""Iterate exactly the requested files in a directory
 
@@ -113,7 +111,7 @@
 
         """
         # Only called by set_iter. Internal.
-        def error_handler(exc, path, filename):
+        def error_handler(exc, path, filename):  # pylint: disable=unused-argument
             fullpath = os.path.join(path.name, filename)
             try:
                 mode = os.stat(fullpath)[stat.ST_MODE]
@@ -140,7 +138,7 @@
             """
             # Only called by Iterate. Internal.
             # todo: get around circular dependency issue by importing here
-            from duplicity import robust  # @Reimport
+            from duplicity import robust
             for filename in robust.listpath(path):
                 new_path = robust.check_common_error(
                     error_handler, Path.append, (path, filename))
@@ -362,7 +360,7 @@
         """
         # Internal. Used by ParseArgs.
         log.Notice(_(u"Reading globbing filelist %s") % list_name)
-        separator = globals.null_separator and u"\0" or u"\n"
+        separator = config.null_separator and u"\0" or u"\n"
         filelist_fp.seek(0)
         for line in filelist_fp.read().split(separator):
             line, include = self.filelist_sanitise_line(line, inc_default)

=== modified file 'duplicity/statistics.py'
--- duplicity/statistics.py	2019-10-04 20:49:18 +0000
+++ duplicity/statistics.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>

=== modified file 'duplicity/tarfile.py'
--- duplicity/tarfile.py	2018-09-11 21:35:37 +0000
+++ duplicity/tarfile.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2013 Michael Terry <mike@xxxxxxxxxxx>
 #

=== modified file 'duplicity/tempdir.py'
--- duplicity/tempdir.py	2019-10-04 20:49:18 +0000
+++ duplicity/tempdir.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -37,7 +37,7 @@
 import tempfile
 import threading
 
-from duplicity import globals
+from duplicity import config
 from duplicity import log
 from duplicity import util
 
@@ -66,7 +66,7 @@
     _defaultLock.acquire()
     try:
         if _defaultInstance is None or _defaultInstance.dir() is None:
-            _defaultInstance = TemporaryDirectory(temproot=globals.temproot)
+            _defaultInstance = TemporaryDirectory(temproot=config.temproot)
             # set the temp dir to be the default in tempfile module from now on
             tempfile.tempdir = _defaultInstance.dir()
         return _defaultInstance
@@ -142,8 +142,8 @@
             return False
 
         if temproot is None:
-            if globals.temproot:
-                temproot = globals.temproot
+            if config.temproot:
+                temproot = config.temproot
             else:
                 global _initialSystemTempRoot
                 temproot = _initialSystemTempRoot

=== modified file 'duplicity/util.py'
--- duplicity/util.py	2020-01-21 13:24:32 +0000
+++ duplicity/util.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -39,12 +39,12 @@
 import atexit
 
 from duplicity import tarfile
-import duplicity.globals as globals
+import duplicity.config as config
 import duplicity.log as log
 
 try:
     # For paths, just use path.name/uname rather than converting with these
-    from os import fsencode, fsdecode
+    from os import fsencode, fsdecode  # pylint: disable=unused-import
 except ImportError:
     # Most likely Python version < 3.2, so define our own fsencode/fsdecode.
     # These are functions that encode/decode unicode paths to filesystem encoding,
@@ -71,7 +71,7 @@
         # If we are not doing any cleverness with non-unicode filename bytes,
         # decoding using system encoding is good enough. Use "ignore" as
         # Linux paths can contain non-Unicode characters
-        return bytes_filename.decode(globals.fsencoding, u"replace")
+        return bytes_filename.decode(config.fsencoding, u"replace")
 
 
 def exception_traceback(limit=50):
@@ -79,7 +79,7 @@
     @return A string representation in typical Python format of the
             currently active/raised exception.
     """
-    type, value, tb = sys.exc_info()
+    type, value, tb = sys.exc_info()  # pylint: disable=redefined-builtin
 
     lines = traceback.format_tb(tb, limit)
     lines.extend(traceback.format_exception_only(type, value))
@@ -140,7 +140,7 @@
     try:
         return fn()
     except Exception as e:
-        if globals.ignore_errors:
+        if config.ignore_errors:
             log.Warn(_(u"IGNORED_ERROR: Warning: ignoring error as requested: %s: %s")
                      % (e.__class__.__name__, uexc(e)))
             return None
@@ -207,10 +207,10 @@
 
 @atexit.register
 def release_lockfile():
-    if globals.lockfile:
-        log.Debug(_(u"Releasing lockfile %s") % globals.lockpath)
+    if config.lockfile:
+        log.Debug(_(u"Releasing lockfile %s") % config.lockpath)
         try:
-            globals.lockfile.release()
+            config.lockfile.release()
         except Exception:
             pass
 
@@ -253,7 +253,7 @@
     def is_exe(fpath):
         return os.path.isfile(fpath) and os.path.isabs(fpath) and os.access(fpath, os.X_OK)
 
-    fpath, fname = os.path.split(program)  # @UnusedVariable
+    fpath, fname = os.path.split(program)
     if fpath:
         if is_exe(program):
             return program

=== modified file 'po/POTFILES.in'
--- po/POTFILES.in	2020-02-06 15:27:43 +0000
+++ po/POTFILES.in	2020-03-22 12:35:54 +0000
@@ -39,6 +39,7 @@
 duplicity/backends/webdavbackend.py
 duplicity/cached_ops.py
 duplicity/commandline.py
+duplicity/config.py
 duplicity/diffdir.py
 duplicity/dup_collections.py
 duplicity/dup_main.py
@@ -48,7 +49,6 @@
 duplicity/errors.py
 duplicity/file_naming.py
 duplicity/filechunkio.py
-duplicity/globals.py
 duplicity/gpg.py
 duplicity/gpginterface.py
 duplicity/lazy.py

=== modified file 'po/duplicity.pot'
--- po/duplicity.pot	2020-03-07 15:55:26 +0000
+++ po/duplicity.pot	2020-03-22 12:35:54 +0000
@@ -8,7 +8,7 @@
 msgstr ""
 "Project-Id-Version: PACKAGE VERSION\n"
 "Report-Msgid-Bugs-To: Kenneth Loafman <kenneth@xxxxxxxxxxx>\n"
-"POT-Creation-Date: 2020-03-07 09:49-0600\n"
+"POT-Creation-Date: 2020-03-19 14:16-0500\n"
 "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
 "Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
 "Language-Team: LANGUAGE <LL@xxxxxx>\n"

=== modified file 'pylintrc'
--- pylintrc	2020-03-14 14:56:17 +0000
+++ pylintrc	2020-03-22 12:35:54 +0000
@@ -9,3 +9,10 @@
         no-name-in-module,
         no-member,
         not-callable
+
+enable=unused-argument,
+       unused-wildcard-import,
+       redefined-builtin,
+       bad-indentation,
+       mixed-indentation,
+       unreachable

=== modified file 'setup.py'
--- setup.py	2020-02-06 20:33:05 +0000
+++ setup.py	2020-03-22 12:35:54 +0000
@@ -1,5 +1,5 @@
 #!/usr/bin/env python3
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -29,7 +29,6 @@
 from setuptools import setup, Extension
 from setuptools.command.test import test
 from setuptools.command.install import install
-from setuptools.command.sdist import sdist
 from distutils.command.build_scripts import build_scripts
 from subprocess import Popen, PIPE
 

=== modified file 'testing/__init__.py'
--- testing/__init__.py	2020-01-17 15:27:48 +0000
+++ testing/__init__.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2012 Canonical Ltd
 #
@@ -28,7 +28,7 @@
 import unittest
 
 from duplicity import backend
-from duplicity import globals
+from duplicity import config
 from duplicity import log
 
 _testing_dir = os.path.dirname(os.path.abspath(__file__))
@@ -67,11 +67,11 @@
     def setUp(self):
         super(DuplicityTestCase, self).setUp()
         self.savedEnviron = {}
-        self.savedGlobals = {}
+        self.savedConfig = {}
 
         log.setup()
         log.setverbosity(log.WARNING)
-        self.set_global(u'print_statistics', 0)
+        self.set_config(u'print_statistics', 0)
         backend.import_backends()
 
         # Have all file references in tests relative to our testing dir
@@ -80,8 +80,8 @@
     def tearDown(self):
         for key in self.savedEnviron:
             self._update_env(key, self.savedEnviron[key])
-        for key in self.savedGlobals:
-            setattr(globals, key, self.savedGlobals[key])
+        for key in self.savedConfig:
+            setattr(config, key, self.savedConfig[key])
         assert not os.system(u"rm -rf testfiles")
         super(DuplicityTestCase, self).tearDown()
 
@@ -101,8 +101,8 @@
             self.savedEnviron[key] = os.environ.get(key)
         self._update_env(key, value)
 
-    def set_global(self, key, value):
-        assert hasattr(globals, key)
-        if key not in self.savedGlobals:
-            self.savedGlobals[key] = getattr(globals, key)
-        setattr(globals, key, value)
+    def set_config(self, key, value):
+        assert hasattr(config, key)
+        if key not in self.savedConfig:
+            self.savedConfig[key] = getattr(config, key)
+        setattr(config, key, value)

=== modified file 'testing/conftest.py'
--- testing/conftest.py	2019-06-29 15:35:07 +0000
+++ testing/conftest.py	2020-03-22 12:35:54 +0000
@@ -1,3 +1,5 @@
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
+#
 from __future__ import print_function
 from future import standard_library
 standard_library.install_aliases()

=== modified file 'testing/docker/duplicity_test/Dockerfile-18.04'
--- testing/docker/duplicity_test/Dockerfile-18.04	2020-01-29 12:45:01 +0000
+++ testing/docker/duplicity_test/Dockerfile-18.04	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2019 Nils Tekampe <nils@xxxxxxxxxxx>,
 # Kenneth Loafman <kenneth@xxxxxxxxxxx> and Aaron Whitehouse <code@xxxxxxxxxxxxxxxxxx>

=== modified file 'testing/docker/duplicity_test/Dockerfile-18.10'
--- testing/docker/duplicity_test/Dockerfile-18.10	2020-01-29 12:45:01 +0000
+++ testing/docker/duplicity_test/Dockerfile-18.10	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2019 Nils Tekampe <nils@xxxxxxxxxxx>,
 # Kenneth Loafman <kenneth@xxxxxxxxxxx> and Aaron Whitehouse <code@xxxxxxxxxxxxxxxxxx>

=== modified file 'testing/docker/duplicity_test/Dockerfile-19.04'
--- testing/docker/duplicity_test/Dockerfile-19.04	2020-01-29 12:45:01 +0000
+++ testing/docker/duplicity_test/Dockerfile-19.04	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2019 Nils Tekampe <nils@xxxxxxxxxxx>,
 # Kenneth Loafman <kenneth@xxxxxxxxxxx> and Aaron Whitehouse <code@xxxxxxxxxxxxxxxxxx>

=== modified file 'testing/docker/duplicity_test/Dockerfile-19.10'
--- testing/docker/duplicity_test/Dockerfile-19.10	2020-01-29 12:45:01 +0000
+++ testing/docker/duplicity_test/Dockerfile-19.10	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2019 Nils Tekampe <nils@xxxxxxxxxxx>,
 # Kenneth Loafman <kenneth@xxxxxxxxxxx> and Aaron Whitehouse <code@xxxxxxxxxxxxxxxxxx>

=== modified file 'testing/docker/duplicity_test/Dockerfile-20.04'
--- testing/docker/duplicity_test/Dockerfile-20.04	2020-01-29 19:45:30 +0000
+++ testing/docker/duplicity_test/Dockerfile-20.04	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2019 Nils Tekampe <nils@xxxxxxxxxxx>,
 # Kenneth Loafman <kenneth@xxxxxxxxxxx> and Aaron Whitehouse <code@xxxxxxxxxxxxxxxxxx>

=== modified file 'testing/docker/ftp_server/Dockerfile'
--- testing/docker/ftp_server/Dockerfile	2019-07-10 20:29:33 +0000
+++ testing/docker/ftp_server/Dockerfile	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2017 Nils Tekampe <nils@xxxxxxxxxxx>
 #

=== modified file 'testing/docker/ssh_server/Dockerfile'
--- testing/docker/ssh_server/Dockerfile	2017-05-21 19:45:56 +0000
+++ testing/docker/ssh_server/Dockerfile	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2017 Nils Tekampe <nils@xxxxxxxxxxx>
 # Thanks to Aleksandar Diklic "https://github.com/rastasheep";

=== modified file 'testing/find_unadorned_strings.py'
--- testing/find_unadorned_strings.py	2019-12-28 21:26:47 +0000
+++ testing/find_unadorned_strings.py	2020-03-22 12:35:54 +0000
@@ -1,5 +1,5 @@
 #!/usr/bin/env python3
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2018 Aaron Whitehouse <aaron@xxxxxxxxxxxxxxxxxx>
 #

=== modified file 'testing/fix_unadorned_strings.py'
--- testing/fix_unadorned_strings.py	2019-12-28 21:26:47 +0000
+++ testing/fix_unadorned_strings.py	2020-03-22 12:35:54 +0000
@@ -1,5 +1,5 @@
 #!/usr/bin/env python3
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2018 Aaron Whitehouse <aaron@xxxxxxxxxxxxxxxxxx>
 #

=== modified file 'testing/functional/__init__.py'
--- testing/functional/__init__.py	2019-08-18 19:21:02 +0000
+++ testing/functional/__init__.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2012 Canonical Ltd
 #
@@ -170,7 +170,7 @@
             print(u"...return_val:", return_val, file=sys.stderr)
             raise CmdError(return_val)
 
-    def backup(self, type, input_dir, options=[], **kwargs):
+    def backup(self, type, input_dir, options=[], **kwargs):  # pylint: disable=redefined-builtin
         u"""Run duplicity backup to default directory"""
         options = [type, input_dir, self.backend_url, u"--volsize", u"1"] + options
         before_files = self.get_backend_files()
@@ -182,7 +182,7 @@
         if self.last_backup == int(now):
             time.sleep(1)
 
-        result = self.run_duplicity(options=options, **kwargs)
+        self.run_duplicity(options=options, **kwargs)
         self.last_backup = int(time.time())
 
         after_files = self.get_backend_files()

=== modified file 'testing/functional/test_badupload.py'
--- testing/functional/test_badupload.py	2019-06-29 15:35:07 +0000
+++ testing/functional/test_badupload.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>

=== modified file 'testing/functional/test_cleanup.py'
--- testing/functional/test_cleanup.py	2019-06-29 15:35:07 +0000
+++ testing/functional/test_cleanup.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>

=== modified file 'testing/functional/test_final.py'
--- testing/functional/test_final.py	2019-08-18 19:21:02 +0000
+++ testing/functional/test_final.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -71,7 +71,7 @@
                      restore_options=restore_options)
 
         # Test restoring various sub files
-        for filename, time, dir in [(u'symbolic_link', 99999, u'dir1'),
+        for filename, time, dir in [(u'symbolic_link', 99999, u'dir1'),  # pylint: disable=redefined-builtin
                                     (u'directory_to_file', 100100, u'dir1'),
                                     (u'directory_to_file', 200100, u'dir2'),
                                     (u'largefile', 300000, u'dir3')]:

=== modified file 'testing/functional/test_log.py'
--- testing/functional/test_log.py	2020-02-10 19:58:36 +0000
+++ testing/functional/test_log.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2008 Michael Terry <mike@xxxxxxxxxxx>
 #
@@ -65,7 +65,7 @@
             linecount += 1
             if linecount == 1:
                 assert(line == u"ERROR 2\n")
-            elif line != u"\n":
+            elif line[0] != u"\n":
                 assert(line.startswith(r". "))
             else:
                 lastline = True

=== modified file 'testing/functional/test_rdiffdir.py'
--- testing/functional/test_rdiffdir.py	2020-01-17 15:27:48 +0000
+++ testing/functional/test_rdiffdir.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>

=== modified file 'testing/functional/test_replicate.py'
--- testing/functional/test_replicate.py	2019-08-18 19:21:02 +0000
+++ testing/functional/test_replicate.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -24,11 +24,8 @@
 from future import standard_library
 standard_library.install_aliases()
 
-import os
-import unittest
-
 from duplicity import path
-from . import CmdError, FunctionalTestCase
+from . import FunctionalTestCase
 
 
 class ReplicateTest(FunctionalTestCase):

=== modified file 'testing/functional/test_restart.py'
--- testing/functional/test_restart.py	2020-01-17 15:27:48 +0000
+++ testing/functional/test_restart.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>

=== modified file 'testing/functional/test_selection.py'
--- testing/functional/test_selection.py	2020-01-17 16:51:15 +0000
+++ testing/functional/test_selection.py	2020-03-22 12:35:54 +0000
@@ -1,5 +1,5 @@
 # -*- coding: utf-8 -*-
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2014 Aaron Whitehouse <aaron@xxxxxxxxxxxxxxxxxx>
 #

=== modified file 'testing/functional/test_verify.py'
--- testing/functional/test_verify.py	2019-06-29 15:35:07 +0000
+++ testing/functional/test_verify.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>

=== modified file 'testing/manual/__init__.py'
--- testing/manual/__init__.py	2014-05-06 18:10:52 +0000
+++ testing/manual/__init__.py	2020-03-22 12:35:54 +0000
@@ -0,0 +1,20 @@
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
+#
+# Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
+# Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
+#
+# This file is part of duplicity.
+#
+# Duplicity is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the
+# Free Software Foundation; either version 2 of the License, or (at your
+# option) any later version.
+#
+# Duplicity is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with duplicity; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA

=== renamed file 'testing/manual/backendtest' => 'testing/manual/backendtest.py'
--- testing/manual/backendtest	2019-12-28 21:26:47 +0000
+++ testing/manual/backendtest.py	2020-03-22 12:35:54 +0000
@@ -1,5 +1,5 @@
 #!/usr/bin/env python3
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -22,26 +22,31 @@
 
 import os
 import sys
+import traceback
 import unittest
 
-_top_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', '..')
+_top_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), u'..', u'..')
 sys.path.insert(0, _top_dir)
+
+from duplicity import config
+
 try:
-    from testing.manual import config
-except ImportError:
+    from testing.manual import test_config
+except ImportError as e:
     # It's OK to not have copied config.py.tmpl over yet, if user is just
     # calling us directly to test a specific backend.  If they aren't, we'll
     # fail later when config.blah is used.
+    traceback.print_exc()
     pass
 from testing.unit.test_backend_instance import BackendInstanceBase
 import duplicity.backend
 
 # undo the overrides support that our testing framework adds
-sys.path = [x for x in sys.path if '/overrides' not in x]
-os.environ['PATH'] = ':'.join([x for x in os.environ['PATH'].split(':')
-                               if '/overrides' not in x])
-os.environ['PYTHONPATH'] = ':'.join([x for x in os.environ['PYTHONPATH'].split(':')
-                                     if '/overrides' not in x])
+sys.path = [x for x in sys.path if u'/overrides' not in x]
+os.environ[u'PATH'] = u':'.join([x for x in os.environ[u'PATH'].split(u':')
+                               if u'/overrides' not in x])
+os.environ[u'PYTHONPATH'] = u':'.join([x for x in os.environ[u'PYTHONPATH'].split(u':')
+                                     if u'/overrides' not in x])
 
 
 class ManualBackendBase(BackendInstanceBase):
@@ -51,17 +56,17 @@
 
     def setUp(self):
         super(ManualBackendBase, self).setUp()
-        self.set_global('num_retries', 1)
-        self.set_global('ssl_no_check_certificate', True)
+        self.set_config(u'num_retries', 1)
+        self.set_config(u'ssl_no_check_certificate', True)
         self.setBackendInfo()
         if self.password is not None:
-            self.set_environ("FTP_PASSWORD", self.password)
+            self.set_environ(u"FTP_PASSWORD", self.password)
         if self.url_string is not None:
             self.backend = duplicity.backend.get_backend_object(self.url_string)
 
         # Clear out backend first
         if self.backend is not None:
-            if hasattr(self.backend, '_delete_list'):
+            if hasattr(self.backend, u'_delete_list'):
                 self.backend._delete_list(self.backend._list())
             else:
                 for x in self.backend._list():
@@ -73,168 +78,158 @@
 
 class sshParamikoTest(ManualBackendBase):
     def setBackendInfo(self):
-        from duplicity.backends import _ssh_paramiko
-        duplicity.backend._backends['ssh'] = _ssh_paramiko.SSHParamikoBackend
-        self.set_global('use_scp', False)
-        self.url_string = config.ssh_url
-        self.password = config.ssh_password
+        from duplicity.backends import ssh_paramiko_backend
+        duplicity.backend._backends[u'ssh'] = ssh_paramiko_backend.SSHParamikoBackend
+        self.url_string = test_config.ssh_url
+        self.password = test_config.ssh_password
 
 
 class sshParamikoScpTest(ManualBackendBase):
     def setBackendInfo(self):
-        from duplicity.backends import _ssh_paramiko
-        duplicity.backend._backends['ssh'] = _ssh_paramiko.SSHParamikoBackend
-        self.set_global('use_scp', True)
-        self.url_string = config.ssh_url
-        self.password = config.ssh_password
+        from duplicity.backends import ssh_paramiko_backend
+        duplicity.backend._backends[u'scp'] = ssh_paramiko_backend.SSHParamikoBackend
+        self.url_string = test_config.ssh_url
+        self.password = test_config.ssh_password
 
 
 class sshPexpectTest(ManualBackendBase):
     def setBackendInfo(self):
-        from duplicity.backends import _ssh_pexpect
-        duplicity.backend._backends['ssh'] = _ssh_pexpect.SSHPExpectBackend
-        self.set_global('use_scp', False)
-        self.url_string = config.ssh_url
-        self.password = config.ssh_password
+        from duplicity.backends import ssh_pexpect_backend
+        duplicity.backend._backends[u'ssh'] = ssh_pexpect_backend.SSHPExpectBackend
+        self.url_string = test_config.ssh_url
+        self.password = test_config.ssh_password
 
 
 class sshPexpectScpTest(ManualBackendBase):
     def setBackendInfo(self):
-        from duplicity.backends import _ssh_pexpect
-        duplicity.backend._backends['ssh'] = _ssh_pexpect.SSHPExpectBackend
-        self.set_global('use_scp', True)
-        self.url_string = config.ssh_url
-        self.password = config.ssh_password
+        from duplicity.backends import ssh_pexpect_backend
+        duplicity.backend._backends[u'scp'] = ssh_pexpect_backend.SSHPExpectBackend
+        self.url_string = test_config.ssh_url
+        self.password = test_config.ssh_password
 
 
 class ftpTest(ManualBackendBase):
     def setBackendInfo(self):
-        self.url_string = config.ftp_url
-        self.password = config.ftp_password
+        self.url_string = test_config.ftp_url
+        self.password = test_config.ftp_password
 
 
 class ftpsTest(ManualBackendBase):
     def setBackendInfo(self):
-        self.url_string = config.ftp_url.replace('ftp://', 'ftps://') if config.ftp_url else None
-        self.password = config.ftp_password
+        self.url_string = test_config.ftp_url.replace(u'ftp://', u'ftps://') if test_config.ftp_url else None
+        self.password = test_config.ftp_password
 
 
 class gsTest(ManualBackendBase):
     def setBackendInfo(self):
-        self.url_string = config.gs_url
-        self.set_environ("GS_ACCESS_KEY_ID", config.gs_access_key)
-        self.set_environ("GS_SECRET_ACCESS_KEY", config.gs_secret_key)
+        self.url_string = test_config.gs_url
+        self.set_environ(u"GS_ACCESS_KEY_ID", test_config.gs_access_key)
+        self.set_environ(u"GS_SECRET_ACCESS_KEY", test_config.gs_secret_key)
 
 
 class s3SingleTest(ManualBackendBase):
     def setBackendInfo(self):
         from duplicity.backends import _boto_single
-        duplicity.backend._backends['s3+http'] = _boto_single.BotoBackend
-        self.set_global('s3_use_new_style', True)
-        self.set_environ("AWS_ACCESS_KEY_ID", config.s3_access_key)
-        self.set_environ("AWS_SECRET_ACCESS_KEY", config.s3_secret_key)
-        self.url_string = config.s3_url
+        duplicity.backend._backends[u's3+http'] = _boto_single.BotoBackend
+        self.set_config(u's3_use_new_style', True)
+        self.set_environ(u"AWS_ACCESS_KEY_ID", test_config.s3_access_key)
+        self.set_environ(u"AWS_SECRET_ACCESS_KEY", test_config.s3_secret_key)
+        self.url_string = test_config.s3_url
 
 
 class s3MultiTest(ManualBackendBase):
     def setBackendInfo(self):
         from duplicity.backends import _boto_multi
-        duplicity.backend._backends['s3+http'] = _boto_multi.BotoBackend
-        self.set_global('s3_use_new_style', True)
-        self.set_environ("AWS_ACCESS_KEY_ID", config.s3_access_key)
-        self.set_environ("AWS_SECRET_ACCESS_KEY", config.s3_secret_key)
-        self.url_string = config.s3_url
+        duplicity.backend._backends[u's3+http'] = _boto_multi.BotoBackend
+        self.set_config(u's3_use_new_style', True)
+        self.set_environ(u"AWS_ACCESS_KEY_ID", test_config.s3_access_key)
+        self.set_environ(u"AWS_SECRET_ACCESS_KEY", test_config.s3_secret_key)
+        self.url_string = test_config.s3_url
 
 
 class cfCloudfilesTest(ManualBackendBase):
     def setBackendInfo(self):
         from duplicity.backends import _cf_cloudfiles
-        duplicity.backend._backends['cf+http'] = _cf_cloudfiles.CloudFilesBackend
-        self.set_environ("CLOUDFILES_USERNAME", config.cf_username)
-        self.set_environ("CLOUDFILES_APIKEY", config.cf_api_key)
-        self.url_string = config.cf_url
+        duplicity.backend._backends[u'cf+http'] = _cf_cloudfiles.CloudFilesBackend
+        self.set_environ(u"CLOUDFILES_USERNAME", test_config.cf_username)
+        self.set_environ(u"CLOUDFILES_APIKEY", test_config.cf_api_key)
+        self.url_string = test_config.cf_url
 
 
 class cfPyraxTest(ManualBackendBase):
     def setBackendInfo(self):
         from duplicity.backends import _cf_pyrax
-        duplicity.backend._backends['cf+http'] = _cf_pyrax.PyraxBackend
-        self.set_environ("CLOUDFILES_USERNAME", config.cf_username)
-        self.set_environ("CLOUDFILES_APIKEY", config.cf_api_key)
-        self.url_string = config.cf_url
+        duplicity.backend._backends[u'cf+http'] = _cf_pyrax.PyraxBackend
+        self.set_environ(u"CLOUDFILES_USERNAME", test_config.cf_username)
+        self.set_environ(u"CLOUDFILES_APIKEY", test_config.cf_api_key)
+        self.url_string = test_config.cf_url
 
 
 class swiftTest(ManualBackendBase):
     def setBackendInfo(self):
-        self.url_string = config.swift_url
-        self.set_environ("SWIFT_USERNAME", config.swift_username)
-        self.set_environ("SWIFT_PASSWORD", config.swift_password)
-        self.set_environ("SWIFT_TENANTNAME", config.swift_tenant)
+        self.url_string = test_config.swift_url
+        self.set_environ(u"SWIFT_USERNAME", test_config.swift_username)
+        self.set_environ(u"SWIFT_PASSWORD", test_config.swift_password)
+        self.set_environ(u"SWIFT_TENANTNAME", test_config.swift_tenant)
         # Assumes you're just using the same storage as your cloudfiles config above
-        self.set_environ("SWIFT_AUTHURL", 'https://identity.api.rackspacecloud.com/v2.0/')
-        self.set_environ("SWIFT_AUTHVERSION", '2')
+        self.set_environ(u"SWIFT_AUTHURL", u'https://identity.api.rackspacecloud.com/v2.0/')
+        self.set_environ(u"SWIFT_AUTHVERSION", u'2')
 
 
 class megaTest(ManualBackendBase):
     def setBackendInfo(self):
-        self.url_string = config.mega_url
-        self.password = config.mega_password
+        self.url_string = test_config.mega_url
+        self.password = test_config.mega_password
 
 
 class webdavTest(ManualBackendBase):
     def setBackendInfo(self):
-        self.url_string = config.webdav_url
-        self.password = config.webdav_password
+        self.url_string = test_config.webdav_url
+        self.password = test_config.webdav_password
 
 
 class webdavsTest(ManualBackendBase):
     def setBackendInfo(self):
-        self.url_string = config.webdavs_url
-        self.password = config.webdavs_password
-        self.set_global('ssl_no_check_certificate', True)
+        self.url_string = test_config.webdavs_url
+        self.password = test_config.webdavs_password
+        self.set_config(u'ssl_no_check_certificate', True)
 
 
 class gdocsTest(ManualBackendBase):
     def setBackendInfo(self):
-        self.url_string = config.gdocs_url
-        self.password = config.gdocs_password
+        self.url_string = test_config.gdocs_url
+        self.password = test_config.gdocs_password
 
 
 class dpbxTest(ManualBackendBase):
     def setBackendInfo(self):
-        self.url_string = config.dpbx_url
+        self.url_string = test_config.dpbx_url
 
 
 class imapTest(ManualBackendBase):
     def setBackendInfo(self):
-        self.url_string = config.imap_url
-        self.set_environ("IMAP_PASSWORD", config.imap_password)
-        self.set_global('imap_mailbox', 'deja-dup-testing')
+        self.url_string = test_config.imap_url
+        self.set_environ(u"IMAP_PASSWORD", test_config.imap_password)
+        self.set_config(u'imap_mailbox', u'deja-dup-testing')
 
 
 class gioSSHTest(ManualBackendBase):
     def setBackendInfo(self):
-        self.url_string = 'gio+' + config.ssh_url if config.ssh_url else None
-        self.password = config.ssh_password
+        self.url_string = u'gio+' + test_config.ssh_url if test_config.ssh_url else None
+        self.password = test_config.ssh_password
 
 
 class gioFTPTest(ManualBackendBase):
     def setBackendInfo(self):
-        self.url_string = 'gio+' + config.ftp_url if config.ftp_url else None
-        self.password = config.ftp_password
-
-
-class copyTest(ManualBackendBase):
-    def setBackendInfo(self):
-        self.url_string = config.copy_url
-        self.password = config.copy_password
-
-
-if __name__ == "__main__":
+        self.url_string = u'gio+' + test_config.ftp_url if test_config.ftp_url else None
+        self.password = test_config.ftp_password
+
+
+if __name__ == u"__main__":
     defaultTest = None
     if len(sys. argv) > 1:
         class manualTest(ManualBackendBase):
             def setBackendInfo(self):
                 self.url_string = sys.argv[1]
-        defaultTest = 'manualTest'
+        defaultTest = u'manualTest'
     unittest.main(argv=[sys.argv[0]], defaultTest=defaultTest)

=== renamed file 'testing/manual/roottest' => 'testing/manual/roottest.py'
--- testing/manual/roottest	2014-05-06 18:10:52 +0000
+++ testing/manual/roottest.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -21,17 +21,17 @@
 
 import config
 import sys, unittest
-sys.path.insert(0, "../")
+sys.path.insert(0, u"../")
 
 from duplicity import diffdir
 from duplicity import patchdir
 from duplicity import selection
-from duplicity.path import * #@UnusedWildImport
+from duplicity.path import *  # pylint: disable=unused-wildcard-import,redefined-builtin
 
 config.setup()
 
 class RootTest(unittest.TestCase):
-    """Test doing operations that only root can"""
+    u"""Test doing operations that only root can"""
 
     def setUp(self):
         # must run with euid/egid of root
@@ -39,13 +39,13 @@
         # make sure uid/gid match euid/egid
         os.setuid(os.geteuid())
         os.setgid(os.getegid())
-        assert not os.system("tar xzf manual/rootfiles.tar.gz > /dev/null 2>&1")
+        assert not os.system(u"tar xzf manual/rootfiles.tar.gz > /dev/null 2>&1")
 
     def tearDown(self):
-        assert not os.system("rm -rf testfiles tempdir temp2.tar")
+        assert not os.system(u"rm -rf testfiles tempdir temp2.tar")
 
     def copyfileobj(self, infp, outfp):
-        """Copy in fileobj to out, closing afterwards"""
+        u"""Copy in fileobj to out, closing afterwards"""
         blocksize = 32 * 1024
         while 1:
             buf = infp.read(blocksize)
@@ -55,23 +55,23 @@
         assert not outfp.close()
 
     def deltmp(self):
-        """Delete temporary directories"""
-        assert not os.system("rm -rf testfiles/output")
-        os.mkdir("testfiles/output")
+        u"""Delete temporary directories"""
+        assert not os.system(u"rm -rf testfiles/output")
+        os.mkdir(u"testfiles/output")
 
     def get_sel(self, path):
-        """Get selection iter over the given directory"""
+        u"""Get selection iter over the given directory"""
         return selection.Select(path).set_iter()
 
     def total_sequence(self, filelist):
-        """Test signatures, diffing, and patching on directory list"""
+        u"""Test signatures, diffing, and patching on directory list"""
         assert len(filelist) >= 2
         self.deltmp()
-        assert not os.system("cp -pR %s testfiles/output/sequence" %
+        assert not os.system(u"cp -pR %s testfiles/output/sequence" %
                              (filelist[0],))
-        seq_path = Path("testfiles/output/sequence")
-        sig = Path("testfiles/output/sig.tar")
-        diff = Path("testfiles/output/diff.tar")
+        seq_path = Path(u"testfiles/output/sequence")
+        sig = Path(u"testfiles/output/sig.tar")
+        diff = Path(u"testfiles/output/diff.tar")
         for dirname in filelist[1:]:
             new_path = Path(dirname)
             diffdir.write_block_iter(
@@ -79,51 +79,51 @@
 
             diffdir.write_block_iter(
                 diffdir.DirDelta(selection.Select(new_path).set_iter(),
-                                 sig.open("rb")),
+                                 sig.open(u"rb")),
                 diff)
 
-            patchdir.Patch(seq_path, diff.open("rb"))
+            patchdir.Patch(seq_path, diff.open(u"rb"))
 
             assert seq_path.compare_recursive(new_path, 1)
 
     def test_basic_cycle(self):
-        """Test cycle on dir with devices, changing uid/gid, etc."""
-        self.total_sequence(['testfiles/root1', 'testfiles/root2'])
+        u"""Test cycle on dir with devices, changing uid/gid, etc."""
+        self.total_sequence([u'testfiles/root1', u'testfiles/root2'])
 
     def test_patchdir(self):
-        """Test changing uid/gid, devices"""
+        u"""Test changing uid/gid, devices"""
         self.deltmp()
-        os.system("cp -pR testfiles/root1 testfiles/output/sequence")
-        seq_path = Path("testfiles/output/sequence")
-        new_path = Path("testfiles/root2")
-        sig = Path("testfiles/output/sig.tar")
-        diff = Path("testfiles/output/diff.tar")
+        os.system(u"cp -pR testfiles/root1 testfiles/output/sequence")
+        seq_path = Path(u"testfiles/output/sequence")
+        new_path = Path(u"testfiles/root2")
+        sig = Path(u"testfiles/output/sig.tar")
+        diff = Path(u"testfiles/output/diff.tar")
 
         diffdir.write_block_iter(diffdir.DirSig(self.get_sel(seq_path)), sig)
-        deltablock = diffdir.DirDelta(self.get_sel(new_path), sig.open("rb"))
+        deltablock = diffdir.DirDelta(self.get_sel(new_path), sig.open(u"rb"))
         diffdir.write_block_iter(deltablock, diff)
 
-        patchdir.Patch(seq_path, diff.open("rb"))
+        patchdir.Patch(seq_path, diff.open(u"rb"))
 
         # since we are not running as root, don't even both comparing,
         # just make sure file5 exists and file4 doesn't.
-        file5 = seq_path.append("file5")
+        file5 = seq_path.append(u"file5")
         assert file5.isreg()
-        file4 = seq_path.append("file4")
+        file4 = seq_path.append(u"file4")
         assert file4.type is None
 
     def test_patchdir2(self):
-        """Again test files we don't have access to, this time Tar_WriteSig"""
+        u"""Again test files we don't have access to, this time Tar_WriteSig"""
         self.deltmp()
-        sig_path = Path("testfiles/output/sig.sigtar")
-        tar_path = Path("testfiles/output/tar.tar")
-        basis_path = Path("testfiles/root1")
+        sig_path = Path(u"testfiles/output/sig.sigtar")
+        tar_path = Path(u"testfiles/output/tar.tar")
+        basis_path = Path(u"testfiles/root1")
 
         deltablock = diffdir.DirFull_WriteSig(self.get_sel(basis_path),
-                                              sig_path.open("wb"))
+                                              sig_path.open(u"wb"))
         diffdir.write_block_iter(deltablock, tar_path)
 
 def runtests(): unittest.main()
 
-if __name__ == "__main__":
+if __name__ == u"__main__":
     unittest.main()

=== renamed file 'testing/manual/config.py.tmpl' => 'testing/manual/test_config.py.tmpl'
--- testing/manual/config.py.tmpl	2014-06-16 13:52:10 +0000
+++ testing/manual/test_config.py.tmpl	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -31,6 +31,7 @@
 
 rsync_abspath_url = None
 rsync_relpath_url = None
+
 rsync_module_url = None
 rsync_password = None
 
@@ -66,6 +67,3 @@
 
 gdocs_url = None
 gdocs_password = None
-
-copy_url = None
-copy_password = None

=== modified file 'testing/overrides/__init__.py'
--- testing/overrides/__init__.py	2019-06-29 21:05:41 +0000
+++ testing/overrides/__init__.py	2020-03-22 12:35:54 +0000
@@ -1,3 +1,5 @@
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
+#
 from __future__ import print_function
 from future import standard_library
 standard_library.install_aliases()

=== modified file 'testing/overrides/gettext.py'
--- testing/overrides/gettext.py	2018-11-29 19:00:15 +0000
+++ testing/overrides/gettext.py	2020-03-22 12:35:54 +0000
@@ -23,14 +23,14 @@
 # if we ever get a unicode->ascii translation by accident.
 
 
-def install(*args, **kwargs):
+def install(*args, **kwargs):  # pylint: disable=unused-argument
     ZWSP = u"​"  # ZERO WIDTH SPACE, basically an invisible space separator
     import sys
     if sys.version_info.major >= 3:
         import builtins
         b = builtins
     else:
-        import __builtin__
+        import __builtin__  # pylint: disable=import-error
         b = __builtin__
     b.__dict__[u'_'] = lambda x: x + ZWSP
     b.__dict__[u'ngettext'] = lambda one, more, n: one + ZWSP if n == 1 else more + ZWSP

=== modified file 'testing/test_code.py'
--- testing/test_code.py	2020-03-14 14:56:17 +0000
+++ testing/test_code.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2014 Michael Terry <michael.terry@xxxxxxxxxxxxx>
 #
@@ -34,7 +34,7 @@
     # do not run the tests (e.g. the build servers)
     import pycodestyle
 
-from . import _top_dir, DuplicityTestCase  # @IgnorePep8
+from . import _top_dir, DuplicityTestCase
 from . import find_unadorned_strings
 
 skipCodeTest = pytest.mark.skipif(not os.getenv(u'RUN_CODE_TESTS', None) == u'1',

=== modified file 'testing/unit/__init__.py'
--- testing/unit/__init__.py	2019-06-29 15:35:07 +0000
+++ testing/unit/__init__.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2012 Canonical Ltd
 #

=== modified file 'testing/unit/test_backend.py'
--- testing/unit/test_backend.py	2019-06-29 15:35:07 +0000
+++ testing/unit/test_backend.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -27,10 +27,9 @@
 import unittest
 
 import duplicity.backend
-import duplicity.backends  # @UnusedImport
-from duplicity.errors import *  # @UnusedWildImport
-from duplicity import globals
-from duplicity import path
+import duplicity.backends
+from duplicity.errors import *  # pylint: disable=unused-wildcard-import
+from duplicity import config
 from . import UnitTestCase
 
 
@@ -150,7 +149,7 @@
 
     @mock.patch(u'sys.exit')
     def test_default_error_exit(self, exit_mock):
-        self.set_global(u'num_retries', 1)
+        self.set_config(u'num_retries', 1)
         try:
             del self.mock._error_code
         except:
@@ -163,7 +162,7 @@
 
     @mock.patch(u'sys.exit')
     def test_translates_code(self, exit_mock):
-        self.set_global(u'num_retries', 1)
+        self.set_config(u'num_retries', 1)
         self.mock._error_code.return_value = 12345
         self.mock._put.side_effect = Exception
         self.backend.put(self.local, self.remote)
@@ -171,7 +170,7 @@
 
     @mock.patch(u'sys.exit')
     def test_uses_exception_code(self, exit_mock):
-        self.set_global(u'num_retries', 1)
+        self.set_config(u'num_retries', 1)
         self.mock._error_code.return_value = 12345
         self.mock._put.side_effect = BackendException(u'error', code=54321)
         self.backend.put(self.local, self.remote)
@@ -179,8 +178,8 @@
 
     @mock.patch(u'sys.exit')
     @mock.patch(u'time.sleep')  # so no waiting
-    def test_cleans_up(self, exit_mock, time_mock):
-        self.set_global(u'num_retries', 2)
+    def test_cleans_up(self, exit_mock, time_mock):  # pylint: disable=unused-argument
+        self.set_config(u'num_retries', 2)
         self.mock._retry_cleanup.return_value = None
         self.mock._put.side_effect = Exception
         self.backend.put(self.local, self.remote)
@@ -213,28 +212,28 @@
 
     @mock.patch(u'sys.exit')
     @mock.patch(u'time.sleep')  # so no waiting
-    def test_retries(self, exit_mock, time_mock):
-        self.set_global(u'num_retries', 2)
+    def test_retries(self, exit_mock, time_mock):  # pylint: disable=unused-argument
+        self.set_config(u'num_retries', 2)
 
         self.mock._get.side_effect = Exception
         self.backend.get(self.remote, self.local)
-        self.assertEqual(self.mock._get.call_count, globals.num_retries)
+        self.assertEqual(self.mock._get.call_count, config.num_retries)
 
         self.mock._put.side_effect = Exception
         self.backend.put(self.local, self.remote)
-        self.assertEqual(self.mock._put.call_count, globals.num_retries)
+        self.assertEqual(self.mock._put.call_count, config.num_retries)
 
         self.mock._list.side_effect = Exception
         self.backend.list()
-        self.assertEqual(self.mock._list.call_count, globals.num_retries)
+        self.assertEqual(self.mock._list.call_count, config.num_retries)
 
         self.mock._delete_list.side_effect = Exception
         self.backend.delete([self.remote])
-        self.assertEqual(self.mock._delete_list.call_count, globals.num_retries)
+        self.assertEqual(self.mock._delete_list.call_count, config.num_retries)
 
         self.mock._query_list.side_effect = Exception
         self.backend.query_info([self.remote])
-        self.assertEqual(self.mock._query_list.call_count, globals.num_retries)
+        self.assertEqual(self.mock._query_list.call_count, config.num_retries)
 
         try:
             del self.mock._delete_list
@@ -242,7 +241,7 @@
             return
         self.mock._delete.side_effect = Exception
         self.backend.delete([self.remote])
-        self.assertEqual(self.mock._delete.call_count, globals.num_retries)
+        self.assertEqual(self.mock._delete.call_count, config.num_retries)
 
         try:
             del self.mock._query_list
@@ -250,11 +249,11 @@
             return
         self.mock._query.side_effect = Exception
         self.backend.query_info([self.remote])
-        self.assertEqual(self.mock._query.call_count, globals.num_retries)
+        self.assertEqual(self.mock._query.call_count, config.num_retries)
 
         self.mock._move.side_effect = Exception
         self.backend.move(self.local, self.remote)
-        self.assertEqual(self.mock._move.call_count, globals.num_retries)
+        self.assertEqual(self.mock._move.call_count, config.num_retries)
 
     def test_move(self):
         self.mock._move.return_value = True

=== modified file 'testing/unit/test_backend_instance.py'
--- testing/unit/test_backend_instance.py	2020-02-24 18:48:50 +0000
+++ testing/unit/test_backend_instance.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2014 Canonical Ltd
 #
@@ -24,16 +24,14 @@
 
 import os
 import io
-import sys
 import unittest
 
-import duplicity.backend
+from . import UnitTestCase
 from duplicity import log
-from duplicity import globals
 from duplicity import path
 from duplicity import util
 from duplicity.errors import BackendException
-from . import UnitTestCase
+import duplicity.backend
 
 
 class BackendInstanceBase(UnitTestCase):

=== modified file 'testing/unit/test_collections.py'
--- testing/unit/test_collections.py	2020-02-12 19:33:59 +0000
+++ testing/unit/test_collections.py	2020-03-22 12:35:54 +0000
@@ -1,5 +1,5 @@
 from __future__ import print_function
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -24,19 +24,17 @@
 from future import standard_library
 standard_library.install_aliases()
 
-import os
-import sys
+import pytest
 import random
-import pytest
 import unittest
 
+from . import UnitTestCase
+from duplicity import backend
+from duplicity import config
 from duplicity import dup_collections
-from duplicity import backend
-from duplicity import globals
+from duplicity import dup_time
+from duplicity import gpg
 from duplicity import path
-from duplicity import gpg
-from duplicity import dup_time
-from . import UnitTestCase
 
 filename_list1 = [b"duplicity-full.2002-08-17T16:17:01-07:00.manifest.gpg",
                   b"duplicity-full.2002-08-17T16:17:01-07:00.vol1.difftar.gpg",
@@ -87,7 +85,7 @@
 
         col_test_dir = path.Path(u"testfiles/collectionstest")
         archive_dir_path = col_test_dir.append(u"archive_dir")
-        self.set_global(u'archive_dir_path', archive_dir_path)
+        self.set_config(u'archive_dir_path', archive_dir_path)
         self.archive_dir_backend = backend.get_backend(u"file://testfiles/collectionstest"
                                                        u"/archive_dir")
 
@@ -98,13 +96,13 @@
 
     def set_gpg_profile(self):
         u"""Set gpg profile to standard "foobar" sym"""
-        self.set_global(u'gpg_profile', gpg.GPGProfile(passphrase=u"foobar"))
+        self.set_config(u'gpg_profile', gpg.GPGProfile(passphrase=u"foobar"))
 
     def test_backup_chains(self):
         u"""Test basic backup chain construction"""
         random.shuffle(filename_list1)
-        cs = dup_collections.CollectionsStatus(None, globals.archive_dir_path, u"full")
-        chains, orphaned, incomplete = cs.get_backup_chains(filename_list1)  # @UnusedVariable
+        cs = dup_collections.CollectionsStatus(None, config.archive_dir_path, u"full")
+        chains, orphaned, incomplete = cs.get_backup_chains(filename_list1)          
         if len(chains) != 1 or len(orphaned) != 0:
             print(chains)
             print(orphaned)
@@ -124,26 +122,26 @@
             assert cs.matched_chain_pair[0].end_time == 1029826800
             assert len(cs.all_backup_chains) == 1, cs.all_backup_chains
 
-        cs = dup_collections.CollectionsStatus(self.real_backend, globals.archive_dir_path, u"full").set_values()
+        cs = dup_collections.CollectionsStatus(self.real_backend, config.archive_dir_path, u"full").set_values()
         check_cs(cs)
         assert cs.matched_chain_pair[0].islocal()
 
     def test_sig_chain(self):
         u"""Test a single signature chain"""
-        chain = dup_collections.SignatureChain(1, globals.archive_dir_path)
+        chain = dup_collections.SignatureChain(1, config.archive_dir_path)
         for filename in local_sigchain_filename_list:
             assert chain.add_filename(filename)
         assert not chain.add_filename(b"duplicity-new-signatures.2002-08-18T00:04:30-07:00.to.2002-08-20T00:00:00-07:00.sigtar.gpg")
 
     def test_sig_chains(self):
         u"""Test making signature chains from filename list"""
-        cs = dup_collections.CollectionsStatus(None, globals.archive_dir_path, u"full")
+        cs = dup_collections.CollectionsStatus(None, config.archive_dir_path, u"full")
         chains, orphaned_paths = cs.get_signature_chains(local=1)
         self.sig_chains_helper(chains, orphaned_paths)
 
     def test_sig_chains2(self):
         u"""Test making signature chains from filename list on backend"""
-        cs = dup_collections.CollectionsStatus(self.archive_dir_backend, globals.archive_dir_path, u"full")
+        cs = dup_collections.CollectionsStatus(self.archive_dir_backend, config.archive_dir_path, u"full")
         chains, orphaned_paths = cs.get_signature_chains(local=None)
         self.sig_chains_helper(chains, orphaned_paths)
 
@@ -159,7 +157,7 @@
     def sigchain_fileobj_get(self, local):
         u"""Return chain, local if local is true with filenames added"""
         if local:
-            chain = dup_collections.SignatureChain(1, globals.archive_dir_path)
+            chain = dup_collections.SignatureChain(1, config.archive_dir_path)
             for filename in local_sigchain_filename_list:
                 assert chain.add_filename(filename)
         else:
@@ -201,7 +199,7 @@
             p = self.output_dir.append(filename)
             p.touch()
 
-        cs = dup_collections.CollectionsStatus(self.output_dir_backend, globals.archive_dir_path, u"full")
+        cs = dup_collections.CollectionsStatus(self.output_dir_backend, config.archive_dir_path, u"full")
         cs.set_values()
         return cs
 
@@ -217,7 +215,7 @@
                       b"duplicity-full.2002-08-15T01:01:01-07:00.vol1.difftar.gpg",
                       b"duplicity-inc.2000-08-17T16:17:01-07:00.to.2000-08-18T00:04:30-07:00.manifest.gpg",
                       b"duplicity-inc.2000-08-17T16:17:01-07:00.to.2000-08-18T00:04:30-07:00.vol1.difftar.gpg"]
-        local_received_list, remote_received_list = cs.get_extraneous()  # @UnusedVariable
+        local_received_list, remote_received_list = cs.get_extraneous()          
         errors = []
         for filename in remote_received_list:
             if filename not in right_list:

=== modified file 'testing/unit/test_diffdir.py'
--- testing/unit/test_diffdir.py	2019-08-18 19:21:02 +0000
+++ testing/unit/test_diffdir.py	2020-03-22 12:35:54 +0000
@@ -1,5 +1,5 @@
 from __future__ import print_function
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -21,17 +21,18 @@
 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 
 from __future__ import print_function
-from builtins import object
 from future import standard_library
 standard_library.install_aliases()
+from builtins import object
 
+import os
 import unittest
 
-from duplicity.path import *  # @UnusedWildImport
 from duplicity import diffdir
 from duplicity import selection
+from duplicity import tarfile
 from duplicity import util
-from duplicity import tarfile  # @Reimport
+from duplicity.path import *  # pylint: disable=unused-wildcard-import,redefined-builtin
 from . import UnitTestCase
 
 
@@ -60,8 +61,7 @@
         diffdir.write_block_iter(sigtar, u"testfiles/output/sigtar")
 
         i = 0
-        for tarinfo in tarfile.TarFile(u"testfiles/output/sigtar", u"r"):  # @UnusedVariable
-            i += 1
+        for tarinfo in tarfile.TarFile(u"testfiles/output/sigtar", u"r"):              i += 1
         assert i >= 5, u"There should be at least 5 files in sigtar"
 
     def empty_diff_schema(self, dirname):
@@ -100,8 +100,7 @@
         diffdir.write_block_iter(diffdir.DirDelta(select2, sigtar_fp),
                                  u"testfiles/output/difftar")
 
-        size = os.stat(u"testfiles/output/difftar").st_size  # @UnusedVariable
-
+        size = os.stat(u"testfiles/output/difftar").st_size
     def test_empty_diff2(self):
         u"""Test producing diff against directories of special files"""
         self.empty_diff_schema(u"testfiles/special_cases/neg_mtime")
@@ -169,8 +168,8 @@
         those produced by DirDelta_WriteSig and other methods.
 
         """
-        deltadir1 = Path(u"testfiles/output/dir.deltatar1")  # @UnusedVariable
-        deltadir2 = Path(u"testfiles/output/dir.deltatar2")  # @UnusedVariable
+        deltadir1 = Path(u"testfiles/output/dir.deltatar1")
+        deltadir2 = Path(u"testfiles/output/dir.deltatar2")
         cur_full_sigs = Path(u"testfiles/output/fullsig.dir1")
 
         cur_dir = Path(u"testfiles/dir1")
@@ -181,7 +180,7 @@
         sigstack = [cur_full_sigs]
         for dirname in [u'dir2', u'dir3', u'dir4']:
             # print "Processing ", dirname
-            old_dir = cur_dir  # @UnusedVariable
+            old_dir = cur_dir
             cur_dir = Path(u"testfiles/" + dirname)
 
             old_full_sigs = cur_full_sigs

=== modified file 'testing/unit/test_dup_temp.py'
--- testing/unit/test_dup_temp.py	2020-02-06 15:27:43 +0000
+++ testing/unit/test_dup_temp.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>

=== modified file 'testing/unit/test_dup_time.py'
--- testing/unit/test_dup_time.py	2019-08-18 19:21:02 +0000
+++ testing/unit/test_dup_time.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -20,15 +20,13 @@
 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 
 from __future__ import print_function
-from past.builtins import cmp
-from builtins import object
 from future import standard_library
 standard_library.install_aliases()
+from builtins import object
 
 import sys
 import unittest
 import time
-import types
 from duplicity import dup_time
 from . import UnitTestCase
 
@@ -53,7 +51,7 @@
 
     def testConversion_separator(self):
         u"""Same as testConversion, but change time Separator"""
-        self.set_global(u'time_separator', u"_")
+        self.set_config(u'time_separator', u"_")
         self.testConversion()
 
     def testCmp(self):
@@ -72,7 +70,7 @@
 
     def testCmp_separator(self):
         u"""Like testCmp but with new separator"""
-        self.set_global(u'time_separator', u"_")
+        self.set_config(u'time_separator', u"_")
         cmp = dup_time.cmp
         assert cmp(1, 2) == -1
         assert cmp(2, 2) == 0
@@ -156,14 +154,14 @@
 
     def setUp(self):
         super(TimeTest1, self).setUp()
-        self.set_global(u'old_filenames', False)
+        self.set_config(u'old_filenames', False)
 
 
 class TimeTest2(TimeTest, UnitTestCase):
 
     def setUp(self):
         super(TimeTest2, self).setUp()
-        self.set_global(u'old_filenames', True)
+        self.set_config(u'old_filenames', True)
 
 if __name__ == u'__main__':
     unittest.main()

=== modified file 'testing/unit/test_file_naming.py'
--- testing/unit/test_file_naming.py	2019-08-18 19:21:02 +0000
+++ testing/unit/test_file_naming.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -29,7 +29,7 @@
 from duplicity import dup_time
 from duplicity import file_naming
 from duplicity import log
-from duplicity import globals
+from duplicity import config
 from duplicity import util
 from . import UnitTestCase
 
@@ -91,18 +91,18 @@
     def test_more(self):
         u"""More file_parsing tests"""
         file_naming.prepare_regex(force=True)
-        pr = file_naming.parse(globals.file_prefix + globals.file_prefix_signature + b"dns.h112bi.h14rg0.st.g")
+        pr = file_naming.parse(config.file_prefix + config.file_prefix_signature + b"dns.h112bi.h14rg0.st.g")
         assert pr, pr
         assert pr.type == u"new-sig"
         assert pr.end_time == 1029826800
 
-        if not globals.short_filenames:
-            pr = file_naming.parse(globals.file_prefix + globals.file_prefix_signature + b"duplicity-new-signatures.2002-08-18T00:04:30-07:00.to.2002-08-20T00:00:00-07:00.sigtar.gpg")
+        if not config.short_filenames:
+            pr = file_naming.parse(config.file_prefix + config.file_prefix_signature + b"duplicity-new-signatures.2002-08-18T00:04:30-07:00.to.2002-08-20T00:00:00-07:00.sigtar.gpg")
             assert pr, pr
             assert pr.type == u"new-sig"
             assert pr.end_time == 1029826800
 
-        pr = file_naming.parse(globals.file_prefix + globals.file_prefix_signature + b"dfs.h5dixs.st.g")
+        pr = file_naming.parse(config.file_prefix + config.file_prefix_signature + b"dfs.h5dixs.st.g")
         assert pr, pr
         assert pr.type == u"full-sig"
         assert pr.time == 1036954144, repr(pr.time)
@@ -110,20 +110,20 @@
     def test_partial(self):
         u"""Test addition of partial flag"""
         file_naming.prepare_regex(force=True)
-        pr = file_naming.parse(globals.file_prefix + globals.file_prefix_signature + b"dns.h112bi.h14rg0.st.p.g")
+        pr = file_naming.parse(config.file_prefix + config.file_prefix_signature + b"dns.h112bi.h14rg0.st.p.g")
         assert pr, pr
         assert pr.partial
         assert pr.type == u"new-sig"
         assert pr.end_time == 1029826800
 
-        if not globals.short_filenames:
-            pr = file_naming.parse(globals.file_prefix + globals.file_prefix_signature + b"duplicity-new-signatures.2002-08-18T00:04:30-07:00.to.2002-08-20T00:00:00-07:00.sigtar.part.gpg")
+        if not config.short_filenames:
+            pr = file_naming.parse(config.file_prefix + config.file_prefix_signature + b"duplicity-new-signatures.2002-08-18T00:04:30-07:00.to.2002-08-20T00:00:00-07:00.sigtar.part.gpg")
             assert pr, pr
             assert pr.partial
             assert pr.type == u"new-sig"
             assert pr.end_time == 1029826800
 
-        pr = file_naming.parse(globals.file_prefix + globals.file_prefix_signature + b"dfs.h5dixs.st.p.g")
+        pr = file_naming.parse(config.file_prefix + config.file_prefix_signature + b"dfs.h5dixs.st.p.g")
         assert pr, pr
         assert pr.partial
         assert pr.type == u"full-sig"
@@ -134,24 +134,24 @@
     u"""Test long filename parsing and generation"""
     def setUp(self):
         super(FileNamingLong, self).setUp()
-        self.set_global(u'short_filenames', 0)
+        self.set_config(u'short_filenames', 0)
 
 
 class FileNamingShort(UnitTestCase, FileNamingBase):
     u"""Test short filename parsing and generation"""
     def setUp(self):
         super(FileNamingShort, self).setUp()
-        self.set_global(u'short_filenames', 1)
+        self.set_config(u'short_filenames', 1)
 
 
 class FileNamingPrefixes(UnitTestCase, FileNamingBase):
     u"""Test filename parsing and generation with prefixes"""
     def setUp(self):
         super(FileNamingPrefixes, self).setUp()
-        self.set_global(u'file_prefix', b"global-")
-        self.set_global(u'file_prefix_manifest', b"mani-")
-        self.set_global(u'file_prefix_signature', b"sign-")
-        self.set_global(u'file_prefix_archive', b"arch-")
+        self.set_config(u'file_prefix', b"global-")
+        self.set_config(u'file_prefix_manifest', b"mani-")
+        self.set_config(u'file_prefix_signature', b"sign-")
+        self.set_config(u'file_prefix_archive', b"arch-")
 
 
 if __name__ == u"__main__":

=== modified file 'testing/unit/test_globmatch.py'
--- testing/unit/test_globmatch.py	2020-01-17 16:51:15 +0000
+++ testing/unit/test_globmatch.py	2020-03-22 12:35:54 +0000
@@ -1,5 +1,5 @@
 # -*- coding: utf-8 -*-
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -26,11 +26,10 @@
 standard_library.install_aliases()
 
 import sys
-import unittest
 
-from duplicity.globmatch import *
-from duplicity.path import *
 from . import UnitTestCase
+from duplicity.globmatch import *  # pylint: disable=unused-wildcard-import,redefined-builtin
+from duplicity.path import *  # pylint: disable=unused-wildcard-import,redefined-builtin
 from mock import patch
 
 

=== modified file 'testing/unit/test_gpg.py'
--- testing/unit/test_gpg.py	2020-01-16 17:02:32 +0000
+++ testing/unit/test_gpg.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -31,7 +31,6 @@
 import platform
 import pytest
 import random
-import sys
 import unittest
 
 from duplicity import gpg
@@ -146,7 +145,7 @@
         size = 400 * 1000
         gwfh = GPGWriteFile_Helper()
         profile = gpg.GPGProfile(passphrase=u"foobar")
-        for i in range(10):  # @UnusedVariable
+        for i in range(10):
             gpg.GPGWriteFile(gwfh, u"testfiles/output/gpgwrite.gpg",
                              profile, size=size)
             # print os.stat("testfiles/output/gpgwrite.gpg").st_size-size
@@ -160,7 +159,7 @@
         u"""Test GzipWriteFile"""
         size = 400 * 1000
         gwfh = GPGWriteFile_Helper()
-        for i in range(10):  # @UnusedVariable
+        for i in range(10):
             gpg.GzipWriteFile(gwfh, u"testfiles/output/gzwrite.gz",
                               size=size)
             # print os.stat("testfiles/output/gzwrite.gz").st_size-size
@@ -215,8 +214,8 @@
         self.unpack_testfiles()
 
     def test_sha(self):
-        hash = gpg.get_hash(u"SHA1", path.Path(u"testfiles/various_file_types/regular_file"))
-        assert hash == u"886d722999862724e1e62d0ac51c468ee336ef8e", hash
+        testhash = gpg.get_hash(u"SHA1", path.Path(u"testfiles/various_file_types/regular_file"))
+        assert testhash == u"886d722999862724e1e62d0ac51c468ee336ef8e", testhash
 
 
 if __name__ == u"__main__":

=== modified file 'testing/unit/test_gpginterface.py'
--- testing/unit/test_gpginterface.py	2019-06-29 15:35:07 +0000
+++ testing/unit/test_gpginterface.py	2020-03-22 12:35:54 +0000
@@ -1,3 +1,5 @@
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
+#
 u"""py-unit tests for GnuPG
 
 COPYRIGHT:
@@ -29,7 +31,6 @@
 import unittest
 
 import tempfile
-import sys
 
 from duplicity import gpginterface
 
@@ -57,8 +58,7 @@
         self.gnupg.options.meta_interactive = 0
         self.gnupg.options.extra_args.append(u'--no-secmem-warning')
 
-    def do_create_fh_operation(self, args, input,
-                               passphrase=None):
+    def do_create_fh_operation(self, args, input, passphrase=None):  # pylint: disable=redefined-builtin
         creations = [u'stdin', u'stdout']
 
         # Make sure we're getting the passphrase to GnuPG
@@ -96,15 +96,15 @@
             u"No way to send the passphrase to GnuPG!"
 
         creations = []
-        # We'll handle the passphrase manually
-        if passphrase is not None:
-            proc.handles.append(u'passphrase')  # @UndefinedVariable
-
         attachments = {u'stdin': stdin, u'stdout': stdout}
 
         proc = self.gnupg.run(args, create_fhs=creations,
                               attach_fhs=attachments)
 
+        # We'll handle the passphrase manually
+        if passphrase is not None:
+            proc.handles.append(u'passphrase')
+
         if passphrase is not None:
             proc.handles[u'passphrase'].write(passphrase)
             proc.handles[u'passphrase'].close()

=== modified file 'testing/unit/test_lazy.py'
--- testing/unit/test_lazy.py	2019-06-29 15:35:07 +0000
+++ testing/unit/test_lazy.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -19,6 +19,8 @@
 # along with duplicity; if not, write to the Free Software Foundation,
 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 
+# pylint: disable=no-value-for-parameter
+
 from __future__ import print_function
 from future import standard_library
 standard_library.install_aliases()
@@ -30,7 +32,7 @@
 import sys
 from functools import reduce
 
-from duplicity.lazy import *  # @UnusedWildImport
+from duplicity.lazy import *  # pylint: disable=unused-wildcard-import,redefined-builtin
 from . import UnitTestCase
 
 
@@ -250,7 +252,7 @@
         u"""Test splitting iterator into three"""
         counter = [0]
 
-        def ff(x):
+        def ff(x):  # pylint: disable=unused-argument
             counter[0] += 1
 
         i_orig = self.one_to_100()
@@ -268,12 +270,12 @@
 
 
 class ITRBadder(ITRBranch):
-    def start_process(self, index):
+    def start_process(self, index):  # pylint: disable=unused-argument
         self.total = 0
 
     def end_process(self):
         if self.base_index:
-            summand = self.base_index[-1]
+            summand = self.base_index[-1]  # pylint: disable=unsubscriptable-object
             # print "Adding ", summand
             self.total += summand
 
@@ -283,7 +285,7 @@
 
 
 class ITRBadder2(ITRBranch):
-    def start_process(self, index):
+    def start_process(self, index):  # pylint: disable=unused-argument
         self.total = 0
 
     def end_process(self):

=== modified file 'testing/unit/test_manifest.py'
--- testing/unit/test_manifest.py	2019-06-29 15:35:07 +0000
+++ testing/unit/test_manifest.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -23,13 +23,10 @@
 from future import standard_library
 standard_library.install_aliases()
 
-from io import StringIO
 import re
-import sys
-import types
 import unittest
 
-from duplicity import globals
+from duplicity import config
 from duplicity import manifest
 from duplicity import path
 
@@ -88,11 +85,11 @@
 
     def setUp(self):
         UnitTestCase.setUp(self)
-        self.old_files_changed = globals.file_changed
-        globals.file_changed = u'testing'
+        self.old_files_changed = config.file_changed
+        config.file_changed = u'testing'
 
     def tearDown(self):
-        globals.file_changed = self.old_files_changed
+        config.file_changed = self.old_files_changed
 
     def test_basic(self):
         vi1 = manifest.VolumeInfo()
@@ -105,7 +102,7 @@
         for vi in [vi1, vi2, vi3]:
             m.add_volume_info(vi)
 
-        self.set_global(u'local_path', path.Path(u"Foobar"))
+        self.set_config(u'local_path', path.Path(u"Foobar"))
         m.set_dirinfo()
         m.set_files_changed_info([])
 
@@ -127,7 +124,7 @@
         for vi in [vi1, vi2, vi3]:
             m.add_volume_info(vi)
 
-        self.set_global(u'local_path', path.Path(u"Foobar"))
+        self.set_config(u'local_path', path.Path(u"Foobar"))
         m.set_dirinfo()
         m.set_files_changed_info([
             (b'one', b'new'),

=== modified file 'testing/unit/test_patchdir.py'
--- testing/unit/test_patchdir.py	2019-08-18 19:21:02 +0000
+++ testing/unit/test_patchdir.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -20,22 +20,23 @@
 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 
 from __future__ import print_function
+from future import standard_library
+standard_library.install_aliases()
 from builtins import map
 from builtins import object
 from builtins import range
-from future import standard_library
-standard_library.install_aliases()
 
 import io
+import os
 import unittest
 
 from duplicity import diffdir
 from duplicity import patchdir
-from duplicity import log  # @UnusedImport
 from duplicity import selection
-from duplicity import tarfile  # @UnusedImport
-from duplicity import librsync  # @UnusedImport
-from duplicity.path import *  # @UnusedWildImport
+from duplicity import tarfile
+from duplicity import librsync
+from duplicity.lazy import *  # pylint: disable=unused-wildcard-import,redefined-builtin
+from duplicity.path import *  # pylint: disable=unused-wildcard-import,redefined-builtin
 from . import UnitTestCase
 
 

=== modified file 'testing/unit/test_path.py'
--- testing/unit/test_path.py	2020-01-18 13:07:05 +0000
+++ testing/unit/test_path.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -23,11 +23,10 @@
 from future import standard_library
 standard_library.install_aliases()
 
-import sys
+import os
 import unittest
 
-from duplicity import log  # @UnusedImport
-from duplicity.path import *  # @UnusedWildImport
+from duplicity.path import *  # pylint: disable=unused-wildcard-import,redefined-builtin
 from . import UnitTestCase
 
 

=== modified file 'testing/unit/test_selection.py'
--- testing/unit/test_selection.py	2019-06-29 15:35:07 +0000
+++ testing/unit/test_selection.py	2020-03-22 12:35:54 +0000
@@ -1,5 +1,5 @@
 # -*- coding: utf-8 -*-
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -25,13 +25,11 @@
 from future import standard_library
 standard_library.install_aliases()
 
-import types
 import io
 import unittest
-import duplicity.path
 
-from duplicity.selection import *  # @UnusedWildImport
-from duplicity.lazy import *  # @UnusedWildImport
+from duplicity.selection import *  # pylint: disable=unused-wildcard-import,redefined-builtin
+from duplicity.lazy import *  # pylint: disable=unused-wildcard-import,redefined-builtin
 from . import UnitTestCase
 from mock import patch
 
@@ -580,7 +578,7 @@
 
     def test_filelist_null_separator(self):
         u"""test_filelist, but with null_separator set"""
-        self.set_global(u"null_separator", 1)
+        self.set_config(u"null_separator", 1)
         self.ParseTest([(u"--include-filelist", u"file")],
                        [(), (u"1",), (u"1", u"1"), (u"1", u"1", u"2"),
                         (u"1", u"1", u"3")],

=== modified file 'testing/unit/test_statistics.py'
--- testing/unit/test_statistics.py	2019-06-29 15:35:07 +0000
+++ testing/unit/test_statistics.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -23,12 +23,11 @@
 from future import standard_library
 standard_library.install_aliases()
 
-import sys
 import unittest
 
-from duplicity.statistics import *  # @UnusedWildImport
+from . import UnitTestCase
 from duplicity import path
-from . import UnitTestCase
+from duplicity.statistics import *  # pylint: disable=redefined-builtin, unused-wildcard-import
 
 
 class StatsObjTest(UnitTestCase):

=== modified file 'testing/unit/test_tarfile.py'
--- testing/unit/test_tarfile.py	2019-06-29 15:35:07 +0000
+++ testing/unit/test_tarfile.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2013 Michael Terry <mike@xxxxxxxxxxx>
 #

=== modified file 'testing/unit/test_tempdir.py'
--- testing/unit/test_tempdir.py	2019-06-29 15:35:07 +0000
+++ testing/unit/test_tempdir.py	2020-03-22 12:35:54 +0000
@@ -1,4 +1,4 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
 # Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
@@ -39,7 +39,7 @@
         self.assertTrue(td.mktemp() != td.mktemp())
 
         # create and remove a temp dir
-        dir = td.mktemp()
+        dir = td.mktemp()  # pylint: disable=redefined-builtin
         os.mkdir(dir)
         os.rmdir(dir)
 

=== modified file 'testing/unit/test_util.py'
--- testing/unit/test_util.py	2019-06-29 15:35:07 +0000
+++ testing/unit/test_util.py	2020-03-22 12:35:54 +0000
@@ -1,3 +1,4 @@
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
 #
 # This file is part of duplicity.
 #

=== modified file 'tox.ini'
--- tox.ini	2020-02-24 18:48:50 +0000
+++ tox.ini	2020-03-22 12:35:54 +0000
@@ -69,6 +69,7 @@
 
 [pytest]
 testpaths = testing/unit testing/functional
+addopts = --failed-first --maxfail=1
 
 
 [pycodestyle]


Follow ups