← Back to team overview

duplicity-team team mailing list archive

[Merge] lp:~mterry/duplicity/2.6isms into lp:duplicity

 

Michael Terry has proposed merging lp:~mterry/duplicity/2.6isms into lp:duplicity.

Requested reviews:
  duplicity-team (duplicity-team)

For more details, see:
https://code.launchpad.net/~mterry/duplicity/2.6isms/+merge/216404

Here's a whole stack of minor syntax modernizations that will become necessary in python3.  They all work in python2.6.

I've added a new test to keep us honest and prevent backsliding on these modernizations.  It runs 2to3 and will fail the test if 2to3 finds anything that needs fixing (with a specific set of exceptions carved out).

This branch has most of the easy 2to3 fixes, the ones with obvious and safe syntax changes.

We could just let 2to3 do them for us, but ideally we use 2to3 as little as possible, since it doesn't always know how to solve a given problem.  I will propose a branch later that actually does use 2to3 to generate python3 versions of duplicity if they are requested.  But this is a first step to clean up the code base.
-- 
https://code.launchpad.net/~mterry/duplicity/2.6isms/+merge/216404
Your team duplicity-team is requested to review the proposed merge of lp:~mterry/duplicity/2.6isms into lp:duplicity.
=== modified file 'bin/duplicity'
--- bin/duplicity	2014-04-16 02:43:43 +0000
+++ bin/duplicity	2014-04-17 22:26:47 +0000
@@ -1042,7 +1042,7 @@
         log.Notice(_("Deleting local %s (not authoritative at backend).") % util.ufn(del_name))
         try:
             util.ignore_missing(os.unlink, del_name)
-        except Exception, e:
+        except Exception as e:
             log.Warn(_("Unable to delete %s: %s") % (util.ufn(del_name), str(e)))
 
     def copy_to_local(fn):
@@ -1505,18 +1505,18 @@
     # sys.exit() function.  Python handles this by
     # raising the SystemExit exception.  Cleanup code
     # goes here, if needed.
-    except SystemExit, e:
+    except SystemExit as e:
         # No traceback, just get out
         util.release_lockfile()
         sys.exit(e)
 
-    except KeyboardInterrupt, e:
+    except KeyboardInterrupt as e:
         # No traceback, just get out
         log.Info(_("INT intercepted...exiting."))
         util.release_lockfile()
         sys.exit(4)
 
-    except gpg.GPGError, e:
+    except gpg.GPGError as e:
         # For gpg errors, don't show an ugly stack trace by
         # default. But do with sufficient verbosity.
         util.release_lockfile()
@@ -1526,7 +1526,7 @@
                        log.ErrorCode.gpg_failed,
                        e.__class__.__name__)
 
-    except duplicity.errors.UserError, e:
+    except duplicity.errors.UserError as e:
         util.release_lockfile()
         # For user errors, don't show an ugly stack trace by
         # default. But do with sufficient verbosity.
@@ -1536,7 +1536,7 @@
                        log.ErrorCode.user_error,
                        e.__class__.__name__)
 
-    except duplicity.errors.BackendException, e:
+    except duplicity.errors.BackendException as e:
         util.release_lockfile()
         # For backend errors, don't show an ugly stack trace by
         # default. But do with sufficient verbosity.
@@ -1546,7 +1546,7 @@
                        log.ErrorCode.user_error,
                        e.__class__.__name__)
 
-    except Exception, e:
+    except Exception as e:
         util.release_lockfile()
         if "Forced assertion for testing" in str(e):
             log.FatalError(u"%s: %s" % (e.__class__.__name__, unicode(e)),

=== modified file 'bin/rdiffdir'
--- bin/rdiffdir	2014-02-05 02:57:01 +0000
+++ bin/rdiffdir	2014-04-17 22:26:47 +0000
@@ -64,7 +64,7 @@
                                        "include-filelist-stdin", "include-globbing-filelist",
                                        "include-regexp=", "max-blocksize", "null-separator",
                                        "verbosity=", "write-sig-to="])
-    except getopt.error, e:
+    except getopt.error as e:
         command_line_error("Bad command line option: %s" % (str(e),))
 
     for opt, arg in optlist:

=== modified file 'duplicity/backend.py'
--- duplicity/backend.py	2014-04-17 19:02:22 +0000
+++ duplicity/backend.py	2014-04-17 22:26:47 +0000
@@ -306,7 +306,7 @@
             try:
                 kwargs = {"raise_errors" : True}
                 return fn(*args, **kwargs)
-            except Exception, e:
+            except Exception as e:
                 log.Warn(_("Attempt %s failed: %s: %s")
                          % (n, e.__class__.__name__, str(e)))
                 log.Debug(_("Backtrace of previous error: %s")
@@ -332,10 +332,10 @@
                 try:
                     self.retry_count = n
                     return fn(self, *args)
-                except FatalBackendError, e:
+                except FatalBackendError as e:
                     # die on fatal errors
                     raise e
-                except Exception, e:
+                except Exception as e:
                     # retry on anything else
                     log.Warn(_("Attempt %s failed. %s: %s")
                              % (n, e.__class__.__name__, str(e)))
@@ -345,7 +345,7 @@
         # final trial, die on exception
             self.retry_count = n+1
             return fn(self, *args)
-        except Exception, e:
+        except Exception as e:
             log.Debug(_("Backtrace of previous error: %s")
                         % exception_traceback())
             log.FatalError(_("Giving up after %s attempts. %s: %s")

=== modified file 'duplicity/backends/_boto_multi.py'
--- duplicity/backends/_boto_multi.py	2014-04-09 09:22:27 +0000
+++ duplicity/backends/_boto_multi.py	2014-04-17 22:26:47 +0000
@@ -33,8 +33,8 @@
 from duplicity.filechunkio import FileChunkIO
 from duplicity import progress
 
-from _boto_single import BotoBackend as BotoSingleBackend
-from _boto_single import get_connection
+from ._boto_single import BotoBackend as BotoSingleBackend
+from ._boto_single import get_connection
 
 BOTO_MIN_VERSION = "2.1.1"
 
@@ -63,7 +63,7 @@
             try:
                 args = self.queue.get(True, 1)
                 progress.report_transfer(args[0], args[1])
-            except Queue.Empty, e:
+            except Queue.Empty as e:
                 pass
 
 
@@ -210,7 +210,7 @@
             conn = None
             bucket = None
             del conn
-        except Exception, e:
+        except Exception as e:
             traceback.print_exc()
             if num_retries:
                 log.Debug("%s: Upload of chunk %d failed. Retrying %d more times..." % (

=== modified file 'duplicity/backends/_boto_single.py'
--- duplicity/backends/_boto_single.py	2014-04-09 09:55:21 +0000
+++ duplicity/backends/_boto_single.py	2014-04-17 22:26:47 +0000
@@ -202,7 +202,7 @@
             try:
                 try:
                     self.bucket = self.conn.get_bucket(self.bucket_name, validate=True)
-                except Exception, e:
+                except Exception as e:
                     if "NoSuchBucket" in str(e):
                         if globals.s3_european_buckets:
                             self.bucket = self.conn.create_bucket(self.bucket_name,
@@ -211,7 +211,7 @@
                             self.bucket = self.conn.create_bucket(self.bucket_name)
                     else:
                         raise e
-            except Exception, e:
+            except Exception as e:
                 log.Warn("Failed to create bucket (attempt #%d) '%s' failed (reason: %s: %s)"
                          "" % (n, self.bucket_name,
                                e.__class__.__name__,
@@ -252,7 +252,7 @@
                 self.resetConnection()
                 log.Debug("Uploaded %s/%s to %s Storage at roughly %f bytes/second" % (self.straight_url, remote_filename, storage_class, rough_upload_speed))
                 return
-            except Exception, e:
+            except Exception as e:
                 log.Warn("Upload '%s/%s' failed (attempt #%d, reason: %s: %s)"
                          "" % (self.straight_url,
                                remote_filename,
@@ -279,7 +279,7 @@
                 key.get_contents_to_filename(local_path.name)
                 local_path.setdata()
                 return
-            except Exception, e:
+            except Exception as e:
                 log.Warn("Download %s/%s failed (attempt #%d, reason: %s: %s)"
                          "" % (self.straight_url,
                                remote_filename,
@@ -304,7 +304,7 @@
             log.Info("Listing %s" % self.straight_url)
             try:
                 return self._list_filenames_in_bucket()
-            except Exception, e:
+            except Exception as e:
                 log.Warn("List %s failed (attempt #%d, reason: %s: %s)"
                          "" % (self.straight_url,
                                n,
@@ -348,7 +348,7 @@
             if key is None:
                 return {'size': -1}
             return {'size': key.size}
-        except Exception, e:
+        except Exception as e:
             log.Warn("Query %s/%s failed: %s"
                      "" % (self.straight_url,
                            filename,
@@ -368,7 +368,7 @@
 
     def pre_process_download(self, files_to_download, wait=False):
         # Used primarily to move files in Glacier to S3
-        if isinstance(files_to_download, basestring):
+        if isinstance(files_to_download, (bytes, str, unicode)):
             files_to_download = [files_to_download]
 
         for remote_filename in files_to_download:
@@ -397,7 +397,7 @@
                             log.Info("File %s was successfully restored from Glacier" % remote_filename)
                     success = True
                     break
-                except Exception, e:
+                except Exception as e:
                     log.Warn("Restoration from Glacier for file %s/%s failed (attempt #%d, reason: %s: %s)"
                              "" % (self.straight_url,
                                    remote_filename,

=== modified file 'duplicity/backends/_cf_cloudfiles.py'
--- duplicity/backends/_cf_cloudfiles.py	2013-12-27 06:39:00 +0000
+++ duplicity/backends/_cf_cloudfiles.py	2014-04-17 22:26:47 +0000
@@ -44,17 +44,17 @@
         self.resp_exc = ResponseError
         conn_kwargs = {}
 
-        if not os.environ.has_key('CLOUDFILES_USERNAME'):
+        if 'CLOUDFILES_USERNAME' not in os.environ:
             raise BackendException('CLOUDFILES_USERNAME environment variable'
                                    'not set.')
 
-        if not os.environ.has_key('CLOUDFILES_APIKEY'):
+        if 'CLOUDFILES_APIKEY' not in os.environ:
             raise BackendException('CLOUDFILES_APIKEY environment variable not set.')
 
         conn_kwargs['username'] = os.environ['CLOUDFILES_USERNAME']
         conn_kwargs['api_key'] = os.environ['CLOUDFILES_APIKEY']
 
-        if os.environ.has_key('CLOUDFILES_AUTHURL'):
+        if 'CLOUDFILES_AUTHURL' in os.environ:
             conn_kwargs['authurl'] = os.environ['CLOUDFILES_AUTHURL']
         else:
             conn_kwargs['authurl'] = consts.default_authurl
@@ -63,7 +63,7 @@
 
         try:
             conn = Connection(**conn_kwargs)
-        except Exception, e:
+        except Exception as e:
             log.FatalError("Connection failed, please check your credentials: %s %s"
                            % (e.__class__.__name__, str(e)),
                            log.ErrorCode.connection_failed)
@@ -79,10 +79,10 @@
                 sobject = self.container.create_object(remote_filename)
                 sobject.load_from_filename(source_path.name)
                 return
-            except self.resp_exc, error:
+            except self.resp_exc as error:
                 log.Warn("Upload of '%s' failed (attempt %d): CloudFiles returned: %s %s"
                          % (remote_filename, n, error.status, error.reason))
-            except Exception, e:
+            except Exception as e:
                 log.Warn("Upload of '%s' failed (attempt %s): %s: %s"
                         % (remote_filename, n, e.__class__.__name__, str(e)))
                 log.Debug("Backtrace of previous error: %s"
@@ -102,10 +102,10 @@
                     f.write(chunk)
                 local_path.setdata()
                 return
-            except self.resp_exc, resperr:
+            except self.resp_exc as resperr:
                 log.Warn("Download of '%s' failed (attempt %s): CloudFiles returned: %s %s"
                          % (remote_filename, n, resperr.status, resperr.reason))
-            except Exception, e:
+            except Exception as e:
                 log.Warn("Download of '%s' failed (attempt %s): %s: %s"
                          % (remote_filename, n, e.__class__.__name__, str(e)))
                 log.Debug("Backtrace of previous error: %s"
@@ -128,10 +128,10 @@
                     objs = self.container.list_objects(marker=keys[-1])
                     keys += objs
                 return keys
-            except self.resp_exc, resperr:
+            except self.resp_exc as resperr:
                 log.Warn("Listing of '%s' failed (attempt %s): CloudFiles returned: %s %s"
                          % (self.container, n, resperr.status, resperr.reason))
-            except Exception, e:
+            except Exception as e:
                 log.Warn("Listing of '%s' failed (attempt %s): %s: %s"
                          % (self.container, n, e.__class__.__name__, str(e)))
                 log.Debug("Backtrace of previous error: %s"
@@ -148,14 +148,14 @@
             try:
                 self.container.delete_object(remote_filename)
                 return
-            except self.resp_exc, resperr:
+            except self.resp_exc as resperr:
                 if n > 1 and resperr.status == 404:
                     # We failed on a timeout, but delete succeeded on the server
                     log.Warn("Delete of '%s' missing after retry - must have succeded earler" % remote_filename )
                     return
                 log.Warn("Delete of '%s' failed (attempt %s): CloudFiles returned: %s %s"
                          % (remote_filename, n, resperr.status, resperr.reason))
-            except Exception, e:
+            except Exception as e:
                 log.Warn("Delete of '%s' failed (attempt %s): %s: %s"
                          % (remote_filename, n, e.__class__.__name__, str(e)))
                 log.Debug("Backtrace of previous error: %s"
@@ -179,7 +179,7 @@
             return {'size': sobject.size}
         except NoSuchObject:
             return {'size': -1}
-        except Exception, e:
+        except Exception as e:
             log.Warn("Error querying '%s/%s': %s"
                      "" % (self.container,
                            filename,

=== modified file 'duplicity/backends/_cf_pyrax.py'
--- duplicity/backends/_cf_pyrax.py	2013-12-27 06:39:00 +0000
+++ duplicity/backends/_cf_pyrax.py	2014-04-17 22:26:47 +0000
@@ -45,24 +45,24 @@
 
         conn_kwargs = {}
 
-        if not os.environ.has_key('CLOUDFILES_USERNAME'):
+        if 'CLOUDFILES_USERNAME' not in os.environ:
             raise BackendException('CLOUDFILES_USERNAME environment variable'
                                    'not set.')
 
-        if not os.environ.has_key('CLOUDFILES_APIKEY'):
+        if 'CLOUDFILES_APIKEY' not in os.environ:
             raise BackendException('CLOUDFILES_APIKEY environment variable not set.')
 
         conn_kwargs['username'] = os.environ['CLOUDFILES_USERNAME']
         conn_kwargs['api_key'] = os.environ['CLOUDFILES_APIKEY']
 
-        if os.environ.has_key('CLOUDFILES_REGION'):
+        if 'CLOUDFILES_REGION' in os.environ:
             conn_kwargs['region'] = os.environ['CLOUDFILES_REGION']
 
         container = parsed_url.path.lstrip('/')
 
         try:
             pyrax.set_credentials(**conn_kwargs)
-        except Exception, e:
+        except Exception as e:
             log.FatalError("Connection failed, please check your credentials: %s %s"
                            % (e.__class__.__name__, str(e)),
                            log.ErrorCode.connection_failed)
@@ -81,10 +81,10 @@
             try:
                 self.container.upload_file(source_path.name, remote_filename)
                 return
-            except self.client_exc, error:
+            except self.client_exc as error:
                 log.Warn("Upload of '%s' failed (attempt %d): pyrax returned: %s %s"
                          % (remote_filename, n, error.__class__.__name__, error.message))
-            except Exception, e:
+            except Exception as e:
                 log.Warn("Upload of '%s' failed (attempt %s): %s: %s"
                         % (remote_filename, n, e.__class__.__name__, str(e)))
                 log.Debug("Backtrace of previous error: %s"
@@ -105,10 +105,10 @@
                 return
             except self.nso_exc:
                 return
-            except self.client_exc, resperr:
+            except self.client_exc as resperr:
                 log.Warn("Download of '%s' failed (attempt %s): pyrax returned: %s %s"
                          % (remote_filename, n, resperr.__class__.__name__, resperr.message))
-            except Exception, e:
+            except Exception as e:
                 log.Warn("Download of '%s' failed (attempt %s): %s: %s"
                          % (remote_filename, n, e.__class__.__name__, str(e)))
                 log.Debug("Backtrace of previous error: %s"
@@ -131,10 +131,10 @@
                     objs = self.container.get_object_names(marker = keys[-1])
                     keys += objs
                 return keys
-            except self.client_exc, resperr:
+            except self.client_exc as resperr:
                 log.Warn("Listing of '%s' failed (attempt %s): pyrax returned: %s %s"
                          % (self.container, n, resperr.__class__.__name__, resperr.message))
-            except Exception, e:
+            except Exception as e:
                 log.Warn("Listing of '%s' failed (attempt %s): %s: %s"
                          % (self.container, n, e.__class__.__name__, str(e)))
                 log.Debug("Backtrace of previous error: %s"
@@ -151,14 +151,14 @@
             try:
                 self.container.delete_object(remote_filename)
                 return
-            except self.client_exc, resperr:
+            except self.client_exc as resperr:
                 if n > 1 and resperr.status == 404:
                     # We failed on a timeout, but delete succeeded on the server
                     log.Warn("Delete of '%s' missing after retry - must have succeded earler" % remote_filename)
                     return
                 log.Warn("Delete of '%s' failed (attempt %s): pyrax returned: %s %s"
                          % (remote_filename, n, resperr.__class__.__name__, resperr.message))
-            except Exception, e:
+            except Exception as e:
                 log.Warn("Delete of '%s' failed (attempt %s): %s: %s"
                          % (remote_filename, n, e.__class__.__name__, str(e)))
                 log.Debug("Backtrace of previous error: %s"
@@ -181,7 +181,7 @@
             return {'size': sobject.total_bytes}
         except self.nso_exc:
             return {'size': -1}
-        except Exception, e:
+        except Exception as e:
             log.Warn("Error querying '%s/%s': %s"
                      "" % (self.container,
                            filename,

=== modified file 'duplicity/backends/_ssh_paramiko.py'
--- duplicity/backends/_ssh_paramiko.py	2013-12-30 16:01:49 +0000
+++ duplicity/backends/_ssh_paramiko.py	2014-04-17 22:26:47 +0000
@@ -134,7 +134,7 @@
         try:
             if os.path.isfile("/etc/ssh/ssh_known_hosts"):
                 self.client.load_system_host_keys("/etc/ssh/ssh_known_hosts")
-        except Exception, e:
+        except Exception as e:
             raise BackendException("could not load /etc/ssh/ssh_known_hosts, maybe corrupt?")
         try:
             # use load_host_keys() to signal it's writable to paramiko
@@ -144,7 +144,7 @@
                 self.client.load_host_keys(file)
             else:
                 self.client._host_keys_filename = file
-        except Exception, e:
+        except Exception as e:
             raise BackendException("could not load ~/.ssh/known_hosts, maybe corrupt?")
 
         """ the next block reorganizes all host parameters into a
@@ -211,7 +211,7 @@
                                 allow_agent=True, 
                                 look_for_keys=True,
                                 key_filename=self.config['identityfile'])
-        except Exception, e:
+        except Exception as e:
             raise BackendException("ssh connection to %s@%s:%d failed: %s" % (
                                     self.config['user'],
                                     self.config['hostname'],
@@ -229,7 +229,7 @@
         else:
             try:
                 self.sftp=self.client.open_sftp()
-            except Exception, e:
+            except Exception as e:
                 raise BackendException("sftp negotiation failed: %s" % e)
 
 
@@ -244,17 +244,17 @@
                         continue
                     try:
                         attrs=self.sftp.stat(d)
-                    except IOError, e:
+                    except IOError as e:
                         if e.errno == errno.ENOENT:
                             try:
                                 self.sftp.mkdir(d)
-                            except Exception, e:
+                            except Exception as e:
                                 raise BackendException("sftp mkdir %s failed: %s" % (self.sftp.normalize(".")+"/"+d,e))
                         else:
                             raise BackendException("sftp stat %s failed: %s" % (self.sftp.normalize(".")+"/"+d,e))
                     try:
                         self.sftp.chdir(d)
-                    except Exception, e:
+                    except Exception as e:
                         raise BackendException("sftp chdir to %s failed: %s" % (self.sftp.normalize(".")+"/"+d,e))
 
     def put(self, source_path, remote_filename = None):
@@ -275,7 +275,7 @@
                         chan=self.client.get_transport().open_session()
                         chan.settimeout(globals.timeout)
                         chan.exec_command("scp -t '%s'" % self.remote_dir) # scp in sink mode uses the arg as base directory
-                    except Exception, e:
+                    except Exception as e:
                         raise BackendException("scp execution failed: %s" % e)
                     # scp protocol: one 0x0 after startup, one after the Create meta, one after saving
                     # if there's a problem: 0x1 or 0x02 and some error text
@@ -298,9 +298,9 @@
                     try:
                         self.sftp.put(source_path.name,remote_filename)
                         return
-                    except Exception, e:
+                    except Exception as e:
                         raise BackendException("sftp put of %s (as %s) failed: %s" % (source_path.name,remote_filename,e))
-            except Exception, e:
+            except Exception as e:
                 log.Warn("%s (Try %d of %d) Will retry in %d seconds." % (e,n,globals.num_retries,self.retry_delay))
         raise BackendException("Giving up trying to upload '%s' after %d attempts" % (remote_filename,n))
 
@@ -320,7 +320,7 @@
                         chan=self.client.get_transport().open_session()
                         chan.settimeout(globals.timeout)
                         chan.exec_command("scp -f '%s/%s'" % (self.remote_dir,remote_filename))
-                    except Exception, e:
+                    except Exception as e:
                         raise BackendException("scp execution failed: %s" % e)
 
                     chan.send('\0')     # overall ready indicator
@@ -343,7 +343,7 @@
                             buff=chan.recv(blocksize)
                             f.write(buff)
                             togo-=len(buff)
-                    except Exception, e:
+                    except Exception as e:
                         raise BackendException("scp get %s failed: %s" % (remote_filename,e))
 
                     msg=chan.recv(1)    # check the final status
@@ -357,10 +357,10 @@
                     try:
                         self.sftp.get(remote_filename,local_path.name)
                         return
-                    except Exception, e:
+                    except Exception as e:
                         raise BackendException("sftp get of %s (to %s) failed: %s" % (remote_filename,local_path.name,e))
                 local_path.setdata()
-            except Exception, e:
+            except Exception as e:
                 log.Warn("%s (Try %d of %d) Will retry in %d seconds." % (e,n,globals.num_retries,self.retry_delay))
         raise BackendException("Giving up trying to download '%s' after %d attempts" % (remote_filename,n))
 
@@ -379,9 +379,9 @@
                 else:
                     try:
                         return self.sftp.listdir()
-                    except Exception, e:
+                    except Exception as e:
                         raise BackendException("sftp listing of %s failed: %s" % (self.sftp.getcwd(),e))
-            except Exception, e:
+            except Exception as e:
                 log.Warn("%s (Try %d of %d) Will retry in %d seconds." % (e,n,globals.num_retries,self.retry_delay))
         raise BackendException("Giving up trying to list '%s' after %d attempts" % (self.remote_dir,n))
 
@@ -397,12 +397,12 @@
                     else:
                         try:
                             self.sftp.remove(fn)
-                        except Exception, e:
+                        except Exception as e:
                             raise BackendException("sftp rm %s failed: %s" % (fn,e))
 
                     # If we get here, we deleted this file successfully. Move on to the next one.
                     break
-                except Exception, e:
+                except Exception as e:
                     if n == globals.num_retries:
                         log.FatalError(str(e), log.ErrorCode.backend_error)
                     else:
@@ -416,7 +416,7 @@
             chan=self.client.get_transport().open_session()
             chan.settimeout(globals.timeout)
             chan.exec_command(cmd)
-        except Exception, e:
+        except Exception as e:
             raise BackendException("%sexecution failed: %s" % (errorprefix,e))
         output=chan.recv(-1)
         res=chan.recv_exit_status()
@@ -434,7 +434,7 @@
         sshconfig = paramiko.SSHConfig()
         try:
             sshconfig.parse(open(file))
-        except Exception, e:
+        except Exception as e:
             raise BackendException("could not load '%s', maybe corrupt?" % (file))
         
         return sshconfig.lookup(host)

=== modified file 'duplicity/backends/botobackend.py'
--- duplicity/backends/botobackend.py	2014-04-16 20:45:09 +0000
+++ duplicity/backends/botobackend.py	2014-04-17 22:26:47 +0000
@@ -22,8 +22,8 @@
 
 import duplicity.backend
 from duplicity import globals
-from _boto_multi import BotoBackend as BotoMultiUploadBackend
-from _boto_single import BotoBackend as BotoSingleUploadBackend
+from ._boto_multi import BotoBackend as BotoMultiUploadBackend
+from ._boto_single import BotoBackend as BotoSingleUploadBackend
 
 if globals.s3_use_multiprocessing:
     duplicity.backend.register_backend("gs", BotoMultiUploadBackend)

=== modified file 'duplicity/backends/cfbackend.py'
--- duplicity/backends/cfbackend.py	2013-11-24 16:49:57 +0000
+++ duplicity/backends/cfbackend.py	2014-04-17 22:26:47 +0000
@@ -22,6 +22,6 @@
 
 if (globals.cf_backend and
     globals.cf_backend.lower().strip() == 'pyrax'):
-    import _cf_pyrax
+    from . import _cf_pyrax
 else:
-    import _cf_cloudfiles
+    from . import _cf_cloudfiles

=== modified file 'duplicity/backends/dpbxbackend.py'
--- duplicity/backends/dpbxbackend.py	2014-03-05 17:05:04 +0000
+++ duplicity/backends/dpbxbackend.py	2014-04-17 22:26:47 +0000
@@ -29,6 +29,7 @@
 import urllib
 import re
 import locale, sys
+from functools import reduce
 
 import traceback, StringIO
 from exceptions import Exception
@@ -80,14 +81,14 @@
 
             try:
                 return f(self, *args)
-            except TypeError, e:
+            except TypeError as e:
                 log_exception(e)
                 log.FatalError('dpbx type error "%s"' % (e,), log.ErrorCode.backend_code_error)
-            except rest.ErrorResponse, e:
+            except rest.ErrorResponse as e:
                 msg = e.user_error_msg or str(e)
                 log.Error('dpbx error: %s' % (msg,), log.ErrorCode.backend_command_error)
                 raise e
-            except Exception, e:
+            except Exception as e:
                 log_exception(e)
                 log.Error('dpbx code error "%s"' % (e,), log.ErrorCode.backend_code_error)
                 raise e
@@ -119,7 +120,7 @@
         
             def write_creds(self, token):
                 open(self.TOKEN_FILE, 'w').close() # create/reset file
-                os.chmod(self.TOKEN_FILE,0600)     # set it -rw------ (NOOP in Windows?)
+                os.chmod(self.TOKEN_FILE, 0o600)     # set it -rw------ (NOOP in Windows?)
                 # now write the content
                 f = open(self.TOKEN_FILE, 'w')
                 f.write("|".join([token.key, token.secret]))
@@ -159,7 +160,7 @@
         if not self.sess.is_linked():
           try: # to login to the box
             self.sess.link()
-          except rest.ErrorResponse, e:
+          except rest.ErrorResponse as e:
             log.FatalError('dpbx Error: %s\n' % str(e), log.ErrorCode.dpbx_nologin)
           if not self.sess.is_linked(): # stil not logged in
             log.FatalError("dpbx Cannot login: check your credentials",log.ErrorCode.dpbx_nologin)

=== modified file 'duplicity/backends/gdocsbackend.py'
--- duplicity/backends/gdocsbackend.py	2014-01-03 10:37:54 +0000
+++ duplicity/backends/gdocsbackend.py	2014-04-17 22:26:47 +0000
@@ -113,7 +113,7 @@
                 self.__handle_error("Failed to initialize upload of file '%s' to remote folder '%s'"
                          % (source_path.get_filename(), self.folder.title.text), raise_errors)
             assert not file.close()
-        except Exception, e:
+        except Exception as e:
             self.__handle_error("Failed to upload file '%s' to remote folder '%s': %s"
                                 % (source_path.get_filename(), self.folder.title.text, str(e)), raise_errors)
 
@@ -132,7 +132,7 @@
             else:
                 self.__handle_error("Failed to find file '%s' in remote folder '%s'"
                                     % (remote_filename, self.folder.title.text), raise_errors)
-        except Exception, e:
+        except Exception as e:
             self.__handle_error("Failed to download file '%s' in remote folder '%s': %s"
                                  % (remote_filename, self.folder.title.text, str(e)), raise_errors)
 
@@ -143,7 +143,7 @@
             entries = self.__fetch_entries(self.folder.resource_id.text,
                                            GDocsBackend.BACKUP_DOCUMENT_TYPE)
             return [entry.title.text for entry in entries]
-        except Exception, e:
+        except Exception as e:
             self.__handle_error("Failed to fetch list of files in remote folder '%s': %s"
                                 % (self.folder.title.text, str(e)), raise_errors)
 
@@ -166,7 +166,7 @@
                 else:
                     log.Warn("Failed to fetch file '%s' in remote folder '%s'"
                              % (filename, self.folder.title.text))
-            except Exception, e:
+            except Exception as e:
                 self.__handle_error("Failed to remove file '%s' in remote folder '%s': %s"
                                     % (filename, self.folder.title.text, str(e)), raise_errors)
 
@@ -184,7 +184,7 @@
                                      service='writely',
                                      captcha_token=captcha_token,
                                      captcha_response=captcha_response)
-        except gdata.client.CaptchaChallenge, challenge:
+        except gdata.client.CaptchaChallenge as challenge:
             print('A captcha challenge in required. Please visit ' + challenge.captcha_url)
             answer = None
             while not answer:
@@ -196,7 +196,7 @@
                                 'access code for using this Duplicity backend. Follow the instrucction in '
                                 'http://www.google.com/support/accounts/bin/static.py?page=guide.cs&guide=1056283&topic=1056286 '
                                 'and create your application-specific password to run duplicity backups.')
-        except Exception, e:
+        except Exception as e:
             self.__handle_error('Error while authenticating client: %s.' % str(e))
 
     def __fetch_entries(self, folder_id, type, title=None):
@@ -238,7 +238,7 @@
 
             # Done!
             return result
-        except Exception, e:
+        except Exception as e:
             self.__handle_error('Error while fetching remote entries: %s.' % str(e))
 
 duplicity.backend.register_backend('gdocs', GDocsBackend)

=== modified file 'duplicity/backends/giobackend.py'
--- duplicity/backends/giobackend.py	2013-12-27 06:39:00 +0000
+++ duplicity/backends/giobackend.py	2014-04-17 22:26:47 +0000
@@ -93,14 +93,14 @@
         # Now make the directory if it doesn't exist
         try:
             self.remote_file.make_directory_with_parents(None)
-        except GLib.GError, e:
+        except GLib.GError as e:
             if e.code != Gio.IOErrorEnum.EXISTS:
                 raise
 
     def done_with_mount(self, fileobj, result, loop):
         try:
             fileobj.mount_enclosing_volume_finish(result)
-        except GLib.GError, e:
+        except GLib.GError as e:
             # check for NOT_SUPPORTED because some schemas (e.g. file://) validly don't
             if e.code != Gio.IOErrorEnum.ALREADY_MOUNTED and e.code != Gio.IOErrorEnum.NOT_SUPPORTED:
                 log.FatalError(_("Connection failed, please check your password: %s")
@@ -132,7 +132,7 @@
             source.copy(target,
                         Gio.FileCopyFlags.OVERWRITE | Gio.FileCopyFlags.NOFOLLOW_SYMLINKS,
                         None, self.copy_progress, None)
-        except Exception, e:
+        except Exception as e:
             self.handle_error(raise_errors, e, op, source.get_parse_name(),
                               target.get_parse_name())
 
@@ -163,7 +163,7 @@
             while info:
                 files.append(info.get_name())
                 info = enum.next_file(None)
-        except Exception, e:
+        except Exception as e:
             self.handle_error(raise_errors, e, 'list',
                               self.remote_file.get_parse_name())
         return files
@@ -176,7 +176,7 @@
             target_file = self.remote_file.get_child(filename)
             try:
                 target_file.delete(None)
-            except Exception, e:
+            except Exception as e:
                 if isinstance(e, GLib.GError):
                     if e.code == Gio.IOErrorEnum.NOT_FOUND:
                         continue
@@ -193,7 +193,7 @@
             info = target_file.query_info(attrs, Gio.FileQueryInfoFlags.NONE,
                                           None)
             return {'size': info.get_size()}
-        except Exception, e:
+        except Exception as e:
             if isinstance(e, GLib.GError):
                 if e.code == Gio.IOErrorEnum.NOT_FOUND:
                     return {'size': -1} # early exit, no need to retry

=== modified file 'duplicity/backends/imapbackend.py'
--- duplicity/backends/imapbackend.py	2013-12-27 06:39:00 +0000
+++ duplicity/backends/imapbackend.py	2014-04-17 22:26:47 +0000
@@ -54,7 +54,7 @@
 
         #  Set the password
         if ( not parsed_url.password ):
-            if os.environ.has_key('IMAP_PASSWORD'):
+            if 'IMAP_PASSWORD' in os.environ:
                 password = os.environ.get('IMAP_PASSWORD')
             else:
                 password = getpass.getpass("Enter account password: ")

=== modified file 'duplicity/backends/localbackend.py'
--- duplicity/backends/localbackend.py	2013-12-27 06:39:00 +0000
+++ duplicity/backends/localbackend.py	2014-04-17 22:26:47 +0000
@@ -74,13 +74,13 @@
                 source_path.rename(target_path)
             except OSError:
                 pass
-            except Exception, e:
+            except Exception as e:
                 self.handle_error(e, 'put', source_path.name, target_path.name)
             else:
                 return
         try:
             target_path.writefileobj(source_path.open("rb"))
-        except Exception, e:
+        except Exception as e:
             self.handle_error(e, 'put', source_path.name, target_path.name)
 
         """If we get here, renaming failed previously"""
@@ -93,7 +93,7 @@
         source_path = self.remote_pathdir.append(filename)
         try:
             local_path.writefileobj(source_path.open("rb"))
-        except Exception, e:
+        except Exception as e:
             self.handle_error(e, 'get', source_path.name, local_path.name)
 
     def _list(self):
@@ -104,7 +104,7 @@
                 pass
         try:
             return self.remote_pathdir.listdir()
-        except Exception, e:
+        except Exception as e:
             self.handle_error(e, 'list', self.remote_pathdir.name)
 
     def delete(self, filename_list):
@@ -113,7 +113,7 @@
         for filename in filename_list:
             try:
                 self.remote_pathdir.append(filename).delete()
-            except Exception, e:
+            except Exception as e:
                 self.handle_error(e, 'delete', self.remote_pathdir.append(filename).name)
 
     def _query_file_info(self, filename):
@@ -125,7 +125,7 @@
             target_file.setdata()
             size = target_file.getsize()
             return {'size': size}
-        except Exception, e:
+        except Exception as e:
             self.handle_error(e, 'query', target_file.name)
             return {'size': None}
 

=== modified file 'duplicity/backends/megabackend.py'
--- duplicity/backends/megabackend.py	2013-12-27 06:39:00 +0000
+++ duplicity/backends/megabackend.py	2014-04-17 22:26:47 +0000
@@ -80,7 +80,7 @@
 
             self.client.upload(source_path.get_canonical(), self.folder, dest_filename=remote_filename)
 
-        except Exception, e:
+        except Exception as e:
             self.__handle_error("Failed to upload file '%s' to remote folder '%s': %s"
                                 % (source_path.get_canonical(), self.__get_node_name(self.folder), str(e)), raise_errors)
 
@@ -100,7 +100,7 @@
             else:
                 self.__handle_error("Failed to find file '%s' in remote folder '%s'"
                                     % (remote_filename, self.__get_node_name(self.folder)), raise_errors)
-        except Exception, e:
+        except Exception as e:
             self.__handle_error("Failed to download file '%s' in remote folder '%s': %s"
                                  % (remote_filename, self.__get_node_name(self.folder), str(e)), raise_errors)
 
@@ -110,7 +110,7 @@
         try:
             entries = self.client.get_files_in_node(self.folder)
             return [ self.client.get_name_from_file({entry:entries[entry]}) for entry in entries]
-        except Exception, e:
+        except Exception as e:
             self.__handle_error("Failed to fetch list of files in remote folder '%s': %s"
                                 % (self.__get_node_name(self.folder), str(e)), raise_errors)
 
@@ -129,7 +129,7 @@
                 else:
                     log.Warn("Failed to fetch file '%s' in remote folder '%s'"
                              % (filename, self.__get_node_name(self.folder)))
-            except Exception, e:
+            except Exception as e:
                 self.__handle_error("Failed to remove file '%s' in remote folder '%s': %s"
                                     % (filename, self.__get_node_name(self.folder), str(e)), raise_errors)
 
@@ -147,7 +147,7 @@
     def __authorize(self, email, password):
         try:
             self.client.login(email, password)
-        except Exception, e:
+        except Exception as e:
             self.__handle_error('Error while authenticating client: %s.' % str(e))
 
     def __filter_entries(self, entries, parent_id=None, title=None, type=None):
@@ -169,7 +169,7 @@
                 result.update({k:v})
 
             return result
-        except Exception, e:
+        except Exception as e:
             self.__handle_error('Error while fetching remote entries: %s.' % str(e))
 
 duplicity.backend.register_backend('mega', MegaBackend)

=== modified file 'duplicity/backends/sshbackend.py'
--- duplicity/backends/sshbackend.py	2012-05-16 11:03:20 +0000
+++ duplicity/backends/sshbackend.py	2014-04-17 22:26:47 +0000
@@ -26,11 +26,11 @@
 
 if (globals.ssh_backend and
     globals.ssh_backend.lower().strip() == 'pexpect'):
-    import _ssh_pexpect
+    from . import _ssh_pexpect
 else:
     # take user by the hand to prevent typo driven bug reports
     if globals.ssh_backend.lower().strip() != 'paramiko':
         log.Warn(_("Warning: Selected ssh backend '%s' is neither 'paramiko nor 'pexpect'. Will use default paramiko instead.") % globals.ssh_backend)
     warn_option("--scp-command", globals.scp_command)
     warn_option("--sftp-command", globals.sftp_command)
-    import _ssh_paramiko
+    from . import _ssh_paramiko

=== modified file 'duplicity/backends/swiftbackend.py'
--- duplicity/backends/swiftbackend.py	2013-12-27 06:39:00 +0000
+++ duplicity/backends/swiftbackend.py	2014-04-17 22:26:47 +0000
@@ -44,20 +44,20 @@
         conn_kwargs = {}
 
         # if the user has already authenticated
-        if os.environ.has_key('SWIFT_PREAUTHURL') and os.environ.has_key('SWIFT_PREAUTHTOKEN'):
+        if 'SWIFT_PREAUTHURL' in os.environ and 'SWIFT_PREAUTHTOKEN' in os.environ:
             conn_kwargs['preauthurl'] = os.environ['SWIFT_PREAUTHURL']
             conn_kwargs['preauthtoken'] = os.environ['SWIFT_PREAUTHTOKEN']           
         
         else:
-            if not os.environ.has_key('SWIFT_USERNAME'):
+            if 'SWIFT_USERNAME' not in os.environ:
                 raise BackendException('SWIFT_USERNAME environment variable '
                                        'not set.')
 
-            if not os.environ.has_key('SWIFT_PASSWORD'):
+            if 'SWIFT_PASSWORD' not in os.environ:
                 raise BackendException('SWIFT_PASSWORD environment variable '
                                        'not set.')
 
-            if not os.environ.has_key('SWIFT_AUTHURL'):
+            if 'SWIFT_AUTHURL' not in os.environ:
                 raise BackendException('SWIFT_AUTHURL environment variable '
                                        'not set.')
 
@@ -65,11 +65,11 @@
             conn_kwargs['key'] = os.environ['SWIFT_PASSWORD']
             conn_kwargs['authurl'] = os.environ['SWIFT_AUTHURL']
 
-        if os.environ.has_key('SWIFT_AUTHVERSION'):
+        if 'SWIFT_AUTHVERSION' in os.environ:
             conn_kwargs['auth_version'] = os.environ['SWIFT_AUTHVERSION']
         else:
             conn_kwargs['auth_version'] = '1'
-        if os.environ.has_key('SWIFT_TENANTNAME'):
+        if 'SWIFT_TENANTNAME' in os.environ:
             conn_kwargs['tenant_name'] = os.environ['SWIFT_TENANTNAME']
             
         self.container = parsed_url.path.lstrip('/')
@@ -77,7 +77,7 @@
         try:
             self.conn = Connection(**conn_kwargs)
             self.conn.put_container(self.container)
-        except Exception, e:
+        except Exception as e:
             log.FatalError("Connection failed: %s %s"
                            % (e.__class__.__name__, str(e)),
                            log.ErrorCode.connection_failed)
@@ -93,10 +93,10 @@
                                      remote_filename, 
                                      file(source_path.name))
                 return
-            except self.resp_exc, error:
+            except self.resp_exc as error:
                 log.Warn("Upload of '%s' failed (attempt %d): Swift server returned: %s %s"
                          % (remote_filename, n, error.http_status, error.message))
-            except Exception, e:
+            except Exception as e:
                 log.Warn("Upload of '%s' failed (attempt %s): %s: %s"
                         % (remote_filename, n, e.__class__.__name__, str(e)))
                 log.Debug("Backtrace of previous error: %s"
@@ -117,10 +117,10 @@
                     f.write(chunk)
                 local_path.setdata()
                 return
-            except self.resp_exc, resperr:
+            except self.resp_exc as resperr:
                 log.Warn("Download of '%s' failed (attempt %s): Swift server returned: %s %s"
                          % (remote_filename, n, resperr.http_status, resperr.message))
-            except Exception, e:
+            except Exception as e:
                 log.Warn("Download of '%s' failed (attempt %s): %s: %s"
                          % (remote_filename, n, e.__class__.__name__, str(e)))
                 log.Debug("Backtrace of previous error: %s"
@@ -139,10 +139,10 @@
                 # to make multiple requests to get them all.
                 headers, objs = self.conn.get_container(self.container)
                 return [ o['name'] for o in objs ]
-            except self.resp_exc, resperr:
+            except self.resp_exc as resperr:
                 log.Warn("Listing of '%s' failed (attempt %s): Swift server returned: %s %s"
                          % (self.container, n, resperr.http_status, resperr.message))
-            except Exception, e:
+            except Exception as e:
                 log.Warn("Listing of '%s' failed (attempt %s): %s: %s"
                          % (self.container, n, e.__class__.__name__, str(e)))
                 log.Debug("Backtrace of previous error: %s"
@@ -159,14 +159,14 @@
             try:
                 self.conn.delete_object(self.container, remote_filename)
                 return
-            except self.resp_exc, resperr:
+            except self.resp_exc as resperr:
                 if n > 1 and resperr.http_status == 404:
                     # We failed on a timeout, but delete succeeded on the server
                     log.Warn("Delete of '%s' missing after retry - must have succeded earlier" % remote_filename )
                     return
                 log.Warn("Delete of '%s' failed (attempt %s): Swift server returned: %s %s"
                          % (remote_filename, n, resperr.http_status, resperr.message))
-            except Exception, e:
+            except Exception as e:
                 log.Warn("Delete of '%s' failed (attempt %s): %s: %s"
                          % (remote_filename, n, e.__class__.__name__, str(e)))
                 log.Debug("Backtrace of previous error: %s"
@@ -186,10 +186,10 @@
     def _query_file_info(self, filename, raise_errors=False):
         try:
             sobject = self.conn.head_object(self.container, filename)
-            return {'size': long(sobject['content-length'])}
+            return {'size': int(sobject['content-length'])}
         except self.resp_exc:
             return {'size': -1}
-        except Exception, e:
+        except Exception as e:
             log.Warn("Error querying '%s/%s': %s"
                      "" % (self.container,
                            filename,

=== modified file 'duplicity/backends/webdavbackend.py'
--- duplicity/backends/webdavbackend.py	2014-04-16 20:45:09 +0000
+++ duplicity/backends/webdavbackend.py	2014-04-17 22:26:47 +0000
@@ -96,7 +96,7 @@
         def request(self, *args, **kwargs):
             try:
                 return httplib.HTTPSConnection.request(self, *args, **kwargs)
-            except ssl.SSLError, e:
+            except ssl.SSLError as e:
                 # encapsulate ssl errors
                 raise BackendException("SSL failed: %s" % str(e),log.ErrorCode.backend_error)
 
@@ -293,7 +293,7 @@
                 if filename:
                     result.append(filename)
             return result
-        except Exception, e:
+        except Exception as e:
             raise e
         finally:
             if response: response.close()
@@ -383,7 +383,7 @@
                 reason = response.reason
                 response.close()
                 raise BackendException("Bad status code %s reason %s." % (status,reason))
-        except Exception, e:
+        except Exception as e:
             raise e
         finally:
             if response: response.close()
@@ -407,7 +407,7 @@
                 reason = response.reason
                 response.close()
                 raise BackendException("Bad status code %s reason %s." % (status,reason))
-        except Exception, e:
+        except Exception as e:
             raise e
         finally:
             if response: response.close()
@@ -429,7 +429,7 @@
                     reason = response.reason
                     response.close()
                     raise BackendException("Bad status code %s reason %s." % (status,reason))
-            except Exception, e:
+            except Exception as e:
                 raise e
             finally:
                 if response: response.close()

=== modified file 'duplicity/cached_ops.py'
--- duplicity/cached_ops.py	2012-11-09 03:21:40 +0000
+++ duplicity/cached_ops.py	2014-04-17 22:26:47 +0000
@@ -34,7 +34,7 @@
     def __call__(self, *args):
         try:
             return self.cache[args]
-        except (KeyError, TypeError), e:
+        except (KeyError, TypeError) as e:
             result = self.f(*args)
             if not isinstance(e, TypeError):
                 # TypeError most likely means that args is not hashable

=== modified file 'duplicity/collections.py'
--- duplicity/collections.py	2014-01-17 16:44:46 +0000
+++ duplicity/collections.py	2014-04-17 22:26:47 +0000
@@ -96,7 +96,7 @@
             self.set_manifest(filename)
         else:
             assert pr.volume_number is not None
-            assert not self.volume_name_dict.has_key(pr.volume_number), \
+            assert pr.volume_number not in self.volume_name_dict, \
                    (self.volume_name_dict, filename)
             self.volume_name_dict[pr.volume_number] = filename
 
@@ -222,7 +222,7 @@
         # public key w/o secret key
         try:
             manifest_buffer = self.backend.get_data(self.remote_manifest_name)
-        except GPGError, message:
+        except GPGError as message:
             #TODO: We check for gpg v1 and v2 messages, should be an error code.
             if ("secret key not available" in message.args[0] or
                 "No secret key" in message.args[0]):
@@ -916,7 +916,7 @@
         # Build dictionary from end_times to lists of corresponding chains
         endtime_chain_dict = {}
         for chain in chain_list:
-            if endtime_chain_dict.has_key(chain.end_time):
+            if chain.end_time in endtime_chain_dict:
                 endtime_chain_dict[chain.end_time].append(chain)
             else:
                 endtime_chain_dict[chain.end_time] = [chain]

=== modified file 'duplicity/commandline.py'
--- duplicity/commandline.py	2014-04-17 17:45:37 +0000
+++ duplicity/commandline.py	2014-04-17 22:26:47 +0000
@@ -109,7 +109,7 @@
 def check_time(option, opt, value):
     try:
         return dup_time.genstrtotime(value)
-    except dup_time.TimeException, e:
+    except dup_time.TimeException as e:
         raise optparse.OptionValueError(str(e))
 
 def check_verbosity(option, opt, value):

=== modified file 'duplicity/diffdir.py'
--- duplicity/diffdir.py	2013-12-27 06:39:00 +0000
+++ duplicity/diffdir.py	2014-04-17 22:26:47 +0000
@@ -389,7 +389,7 @@
     def read(self, length = -1):
         try:
             buf = self.infile.read(length)
-        except IOError, ex:
+        except IOError as ex:
             buf = ""
             log.Warn(_("Error %s getting delta for %s") % (str(ex), util.ufn(self.infile.name)))
         if stats:
@@ -461,7 +461,7 @@
         TarBlockIter initializer
         """
         self.input_iter = input_iter
-        self.offset = 0l                    # total length of data read
+        self.offset = 0                     # total length of data read
         self.process_waiting = False        # process_continued has more blocks
         self.process_next_vol_number = None # next volume number to write in multivol
         self.previous_index = None          # holds index of last block returned
@@ -564,7 +564,7 @@
         Return closing string for tarfile, reset offset
         """
         blocks, remainder = divmod(self.offset, tarfile.RECORDSIZE) #@UnusedVariable
-        self.offset = 0l
+        self.offset = 0
         return '\0' * (tarfile.RECORDSIZE - remainder) # remainder can be 0
 
     def __iter__(self):
@@ -736,5 +736,5 @@
         return 512 # set minimum of 512 bytes
     else:
         # Split file into about 2000 pieces, rounding to 512
-        file_blocksize = long((file_len / (2000 * 512)) * 512)
+        file_blocksize = int((file_len / (2000 * 512)) * 512)
         return min(file_blocksize, globals.max_blocksize)

=== modified file 'duplicity/dup_temp.py'
--- duplicity/dup_temp.py	2013-12-27 06:39:00 +0000
+++ duplicity/dup_temp.py	2014-04-17 22:26:47 +0000
@@ -179,9 +179,9 @@
         tgt = self.dirpath.append(self.remname)
         src_iter = SrcIter(src)
         if pr.compressed:
-            gpg.GzipWriteFile(src_iter, tgt.name, size = sys.maxint)
+            gpg.GzipWriteFile(src_iter, tgt.name, size = sys.maxsize)
         elif pr.encrypted:
-            gpg.GPGWriteFile(src_iter, tgt.name, globals.gpg_profile, size = sys.maxint)
+            gpg.GPGWriteFile(src_iter, tgt.name, globals.gpg_profile, size = sys.maxsize)
         else:
             os.system("cp -p \"%s\" \"%s\"" % (src.name, tgt.name))
         globals.backend.move(tgt) #@UndefinedVariable
@@ -195,7 +195,7 @@
         src_iter = SrcIter(src)
         pr = file_naming.parse(self.permname)
         if pr.compressed:
-            gpg.GzipWriteFile(src_iter, tgt.name, size = sys.maxint)
+            gpg.GzipWriteFile(src_iter, tgt.name, size = sys.maxsize)
             os.unlink(src.name)
         else:
             os.rename(src.name, tgt.name)

=== modified file 'duplicity/dup_threading.py'
--- duplicity/dup_threading.py	2010-07-22 19:15:11 +0000
+++ duplicity/dup_threading.py	2014-04-17 22:26:47 +0000
@@ -192,7 +192,7 @@
             if state['error'] is None:
                 return state['value']
             else:
-                raise state['error'], None, state['trace']
+                raise state['error'].with_traceback(state['trace'])
         finally:
             cv.release()
 
@@ -207,7 +207,7 @@
             cv.release()
 
             return (True, waiter)
-        except Exception, e:
+        except Exception as e:
             cv.acquire()
             state['done'] = True
             state['error'] = e

=== modified file 'duplicity/dup_time.py'
--- duplicity/dup_time.py	2011-11-03 11:27:45 +0000
+++ duplicity/dup_time.py	2014-04-17 22:26:47 +0000
@@ -62,7 +62,7 @@
 def setcurtime(time_in_secs = None):
     """Sets the current time in curtime and curtimestr"""
     global curtime, curtimestr
-    t = time_in_secs or long(time.time())
+    t = time_in_secs or int(time.time())
     assert type(t) in (types.LongType, types.IntType)
     curtime, curtimestr = t, timetostring(t)
 
@@ -137,9 +137,9 @@
         # even when we're not in the same timezone that wrote the
         # string
         if len(timestring) == 16:
-            return long(utc_in_secs)
+            return int(utc_in_secs)
         else:
-            return long(utc_in_secs + tzdtoseconds(timestring[19:]))
+            return int(utc_in_secs + tzdtoseconds(timestring[19:]))
     except (TypeError, ValueError, AssertionError):
         return None
 
@@ -169,7 +169,7 @@
     if seconds == 1:
         partlist.append("1 second")
     elif not partlist or seconds > 1:
-        if isinstance(seconds, int) or isinstance(seconds, long):
+        if isinstance(seconds, (types.LongType, types.IntType)):
             partlist.append("%s seconds" % seconds)
         else:
             partlist.append("%.2f seconds" % seconds)

=== modified file 'duplicity/file_naming.py'
--- duplicity/file_naming.py	2014-01-31 12:41:00 +0000
+++ duplicity/file_naming.py	2014-04-17 22:26:47 +0000
@@ -158,7 +158,7 @@
     """
     Convert string s in base 36 to long int
     """
-    total = 0L
+    total = 0
     for i in range(len(s)):
         total *= 36
         digit_ord = ord(s[i])

=== modified file 'duplicity/globals.py'
--- duplicity/globals.py	2014-04-09 09:22:27 +0000
+++ duplicity/globals.py	2014-04-17 22:26:47 +0000
@@ -87,7 +87,7 @@
 gpg_options = ''
 
 # Maximum file blocksize
-max_blocksize = 2048L
+max_blocksize = 2048
 
 # If true, filelists and directory statistics will be split on
 # nulls instead of newlines.

=== modified file 'duplicity/gpg.py'
--- duplicity/gpg.py	2013-12-27 06:39:00 +0000
+++ duplicity/gpg.py	2014-04-17 22:26:47 +0000
@@ -215,7 +215,7 @@
                 msg += unicode(line.strip(), locale.getpreferredencoding(), 'replace') + u"\n"
         msg += u"===== End GnuPG log =====\n"
         if not (msg.find(u"invalid packet (ctb=14)") > -1):
-            raise GPGError, msg
+            raise GPGError(msg)
         else:
             return ""
 

=== modified file 'duplicity/gpginterface.py'
--- duplicity/gpginterface.py	2013-12-27 06:39:00 +0000
+++ duplicity/gpginterface.py	2014-04-17 22:26:47 +0000
@@ -353,14 +353,14 @@
         if attach_fhs == None: attach_fhs = {}
 
         for std in _stds:
-            if not attach_fhs.has_key(std) \
+            if std not in attach_fhs \
                and std not in create_fhs:
                 attach_fhs.setdefault(std, getattr(sys, std))
 
         handle_passphrase = 0
 
         if self.passphrase != None \
-           and not attach_fhs.has_key('passphrase') \
+           and 'passphrase' not in attach_fhs \
            and 'passphrase' not in create_fhs:
             handle_passphrase = 1
             create_fhs.append('passphrase')
@@ -384,18 +384,18 @@
         process = Process()
 
         for fh_name in create_fhs + attach_fhs.keys():
-            if not _fd_modes.has_key(fh_name):
-                raise KeyError, \
+            if fh_name not in _fd_modes:
+                raise KeyError(
                       "unrecognized filehandle name '%s'; must be one of %s" \
-                      % (fh_name, _fd_modes.keys())
+                      % (fh_name, _fd_modes.keys()))
 
         for fh_name in create_fhs:
             # make sure the user doesn't specify a filehandle
             # to be created *and* attached
-            if attach_fhs.has_key(fh_name):
-                raise ValueError, \
+            if fh_name in attach_fhs:
+                raise ValueError(
                       "cannot have filehandle '%s' in both create_fhs and attach_fhs" \
-                      % fh_name
+                      % fh_name)
 
             pipe = os.pipe()
             # fix by drt@xxxxxxxxxxxxx noting
@@ -660,7 +660,7 @@
         if self.returned == None:
             self.thread.join()
         if self.returned != 0:
-            raise IOError, "GnuPG exited non-zero, with code %d" % (self.returned >> 8)
+            raise IOError("GnuPG exited non-zero, with code %d" % (self.returned >> 8))
 
 
 def threaded_waitpid(process):

=== modified file 'duplicity/librsync.py'
--- duplicity/librsync.py	2010-11-20 15:39:00 +0000
+++ duplicity/librsync.py	2014-04-17 22:26:47 +0000
@@ -26,7 +26,7 @@
 
 """
 
-import _librsync
+from . import _librsync
 import types, array
 
 blocksize = _librsync.RS_JOB_BLOCKSIZE
@@ -90,7 +90,7 @@
             self._add_to_inbuf()
         try:
             self.eof, len_inbuf_read, cycle_out = self.maker.cycle(self.inbuf)
-        except _librsync.librsyncError, e:
+        except _librsync.librsyncError as e:
             raise librsyncError(str(e))
         self.inbuf = self.inbuf[len_inbuf_read:]
         self.outbuf.fromstring(cycle_out)
@@ -126,7 +126,7 @@
         LikeFile.__init__(self, infile)
         try:
             self.maker = _librsync.new_sigmaker(blocksize)
-        except _librsync.librsyncError, e:
+        except _librsync.librsyncError as e:
             raise librsyncError(str(e))
 
 class DeltaFile(LikeFile):
@@ -148,7 +148,7 @@
             assert not signature.close()
         try:
             self.maker = _librsync.new_deltamaker(sig_string)
-        except _librsync.librsyncError, e:
+        except _librsync.librsyncError as e:
             raise librsyncError(str(e))
 
 
@@ -167,7 +167,7 @@
             raise TypeError("basis_file must be a (true) file")
         try:
             self.maker = _librsync.new_patchmaker(basis_file)
-        except _librsync.librsyncError, e:
+        except _librsync.librsyncError as e:
             raise librsyncError(str(e))
 
 
@@ -182,7 +182,7 @@
         """Return new signature instance"""
         try:
             self.sig_maker = _librsync.new_sigmaker(blocksize)
-        except _librsync.librsyncError, e:
+        except _librsync.librsyncError as e:
             raise librsyncError(str(e))
         self.gotsig = None
         self.buffer = ""
@@ -201,7 +201,7 @@
         """Run self.buffer through sig_maker, add to self.sig_string"""
         try:
             eof, len_buf_read, cycle_out = self.sig_maker.cycle(self.buffer)
-        except _librsync.librsyncError, e:
+        except _librsync.librsyncError as e:
             raise librsyncError(str(e))
         self.buffer = self.buffer[len_buf_read:]
         self.sigstring_list.append(cycle_out)

=== modified file 'duplicity/patchdir.py'
--- duplicity/patchdir.py	2013-12-27 06:39:00 +0000
+++ duplicity/patchdir.py	2014-04-17 22:26:47 +0000
@@ -504,7 +504,7 @@
             if final_ropath.exists():
                 # otherwise final patch was delete
                 yield final_ropath
-        except Exception, e:
+        except Exception as e:
             filename = normalized[-1].get_ropath().get_relative_path()
             log.Warn(_("Error '%s' patching %s") % 
                      (str(e), filename),

=== modified file 'duplicity/path.py'
--- duplicity/path.py	2013-12-27 06:39:00 +0000
+++ duplicity/path.py	2014-04-17 22:26:47 +0000
@@ -500,7 +500,7 @@
         """Refresh stat cache"""
         try:
             self.stat = os.lstat(self.name)
-        except OSError, e:
+        except OSError as e:
             err_string = errno.errorcode[e[0]]
             if err_string in ["ENOENT", "ENOTDIR", "ELOOP", "ENOTCONN"]:
                 self.stat, self.type = None, None # file doesn't exist

=== modified file 'duplicity/progress.py'
--- duplicity/progress.py	2013-04-15 12:10:35 +0000
+++ duplicity/progress.py	2014-04-17 22:26:47 +0000
@@ -264,7 +264,7 @@
         projection = 1.0
         if self.progress_estimation > 0:
             projection = (1.0 - self.progress_estimation) / self.progress_estimation
-        self.time_estimation = long(projection * float(self.elapsed_sum.total_seconds()))
+        self.time_estimation = int(projection * float(self.elapsed_sum.total_seconds()))
 
         # Apply values only when monotonic, so the estimates look more consistent to the human eye
         if self.progress_estimation < last_progress_estimation:
@@ -299,7 +299,7 @@
         volume and for the current volume
         """
         changing = max(bytecount - self.last_bytecount, 0)
-        self.total_bytecount += long(changing) # Annotate only changing bytes since last probe
+        self.total_bytecount += int(changing) # Annotate only changing bytes since last probe
         self.last_bytecount = bytecount
         if changing > 0:
             self.stall_last_time = datetime.now()

=== modified file 'duplicity/robust.py'
--- duplicity/robust.py	2013-12-27 06:39:00 +0000
+++ duplicity/robust.py	2014-04-17 22:26:47 +0000
@@ -39,7 +39,7 @@
     #       RPathException, Rdiff.RdiffException,
     #       librsync.librsyncError, C.UnknownFileTypeError), exc:
     #   TracebackArchive.add()
-    except (IOError, EnvironmentError, librsync.librsyncError, path.PathException), exc:
+    except (IOError, EnvironmentError, librsync.librsyncError, path.PathException) as exc:
         if (not isinstance(exc, EnvironmentError) or
             ((exc[0] in errno.errorcode)
              and errno.errorcode[exc[0]] in

=== modified file 'duplicity/selection.py'
--- duplicity/selection.py	2013-12-27 06:39:00 +0000
+++ duplicity/selection.py	2014-04-17 22:26:47 +0000
@@ -256,7 +256,7 @@
                     self.add_selection_func(self.regexp_get_sf(arg, 1))
                 else:
                     assert 0, "Bad selection option %s" % opt
-        except SelectError, e:
+        except SelectError as e:
             self.parse_catch_error(e)
         assert filelists_index == len(filelists)
         self.parse_last_excludes()
@@ -351,7 +351,7 @@
                 continue # skip blanks
             try:
                 tuple = self.filelist_parse_line(line, include)
-            except FilePrefixError, exc:
+            except FilePrefixError as exc:
                 incr_warnings(exc)
                 continue
             tuple_list.append(tuple)

=== modified file 'duplicity/statistics.py'
--- duplicity/statistics.py	2010-07-22 19:15:11 +0000
+++ duplicity/statistics.py	2014-04-17 22:26:47 +0000
@@ -104,7 +104,7 @@
         if not index:
             filename = "."
         else:
-            filename = apply(os.path.join, index)
+            filename = os.path.join(*index)
             if use_repr:
                 # use repr to quote newlines in relative filename, then
                 # take of leading and trailing quote and quote spaces.
@@ -123,7 +123,7 @@
         for attr, val_string in zip(self.stat_file_attrs,
                                     lineparts[-len(self.stat_file_attrs):]):
             try:
-                val = long(val_string)
+                val = int(val_string)
             except ValueError:
                 try:
                     val = float(val_string)
@@ -230,7 +230,7 @@
                 error(line)
             try:
                 try:
-                    val1 = long(value_string)
+                    val1 = int(value_string)
                 except ValueError:
                     val1 = None
                 val2 = float(value_string)

=== modified file 'duplicity/tempdir.py'
--- duplicity/tempdir.py	2013-12-27 06:39:00 +0000
+++ duplicity/tempdir.py	2014-04-17 22:26:47 +0000
@@ -213,7 +213,7 @@
         """
         self.__lock.acquire()
         try:
-            if self.__pending.has_key(fname):
+            if fname in self.__pending:
                 log.Debug(_("Forgetting temporary file %s") % util.ufn(fname))
                 del(self.__pending[fname])
             else:

=== modified file 'duplicity/util.py'
--- duplicity/util.py	2014-01-17 16:44:46 +0000
+++ duplicity/util.py	2014-04-17 22:26:47 +0000
@@ -80,7 +80,7 @@
     """
     try:
         return fn()
-    except Exception, e:
+    except Exception as e:
         if globals.ignore_errors:
             log.Warn(_("IGNORED_ERROR: Warning: ignoring error as requested: %s: %s")
                      % (e.__class__.__name__, str(e)))
@@ -131,7 +131,7 @@
     """
     try:
         fn(filename)
-    except OSError, ex:
+    except OSError as ex:
         if ex.errno == errno.ENOENT:
             pass
         else:

=== modified file 'testing/gnupg/trustdb.gpg'
Binary files testing/gnupg/trustdb.gpg	2011-11-04 12:48:04 +0000 and testing/gnupg/trustdb.gpg	2014-04-17 22:26:47 +0000 differ
=== modified file 'testing/tests/test_badupload.py'
--- testing/tests/test_badupload.py	2014-04-16 02:43:43 +0000
+++ testing/tests/test_badupload.py	2014-04-17 22:26:47 +0000
@@ -36,7 +36,7 @@
         try:
             self.backup("full", "testfiles/dir1", options=["--skip-volume=1"])
             self.fail()
-        except CmdError, e:
+        except CmdError as e:
             self.assertEqual(e.exit_status, 44)
 
 if __name__ == "__main__":

=== modified file 'testing/tests/test_collections.py'
--- testing/tests/test_collections.py	2014-04-16 02:43:43 +0000
+++ testing/tests/test_collections.py	2014-04-17 22:26:47 +0000
@@ -111,8 +111,8 @@
             assert 0
 
         chain = chains[0]
-        assert chain.end_time == 1029654270L
-        assert chain.fullset.time == 1029626221L
+        assert chain.end_time == 1029654270
+        assert chain.fullset.time == 1029626221
 
     def test_collections_status(self):
         """Test CollectionStatus object's set_values()"""
@@ -121,7 +121,7 @@
             assert cs.values_set
 
             assert cs.matched_chain_pair
-            assert cs.matched_chain_pair[0].end_time == 1029826800L
+            assert cs.matched_chain_pair[0].end_time == 1029826800
             assert len(cs.all_backup_chains) == 1, cs.all_backup_chains
 
         cs = collections.CollectionsStatus(self.real_backend, globals.archive_dir).set_values()
@@ -153,7 +153,7 @@
             for op in orphaned_paths: print op
             assert 0
         assert len(chains) == 1, chains
-        assert chains[0].end_time == 1029826800L
+        assert chains[0].end_time == 1029826800
 
     def sigchain_fileobj_get(self, local):
         """Return chain, local if local is true with filenames added"""

=== modified file 'testing/tests/test_filenaming.py'
--- testing/tests/test_filenaming.py	2014-04-16 02:43:43 +0000
+++ testing/tests/test_filenaming.py	2014-04-17 22:26:47 +0000
@@ -88,13 +88,13 @@
         pr = file_naming.parse(globals.file_prefix + globals.file_prefix_signature + "dns.h112bi.h14rg0.st.g")
         assert pr, pr
         assert pr.type == "new-sig"
-        assert pr.end_time == 1029826800L
+        assert pr.end_time == 1029826800
 
         if not globals.short_filenames:
             pr = file_naming.parse(globals.file_prefix + globals.file_prefix_signature + "duplicity-new-signatures.2002-08-18T00:04:30-07:00.to.2002-08-20T00:00:00-07:00.sigtar.gpg")
             assert pr, pr
             assert pr.type == "new-sig"
-            assert pr.end_time == 1029826800L
+            assert pr.end_time == 1029826800
 
         pr = file_naming.parse(globals.file_prefix + globals.file_prefix_signature + "dfs.h5dixs.st.g")
         assert pr, pr
@@ -108,14 +108,14 @@
         assert pr, pr
         assert pr.partial
         assert pr.type == "new-sig"
-        assert pr.end_time == 1029826800L
+        assert pr.end_time == 1029826800
 
         if not globals.short_filenames:
             pr = file_naming.parse(globals.file_prefix + globals.file_prefix_signature + "duplicity-new-signatures.2002-08-18T00:04:30-07:00.to.2002-08-20T00:00:00-07:00.sigtar.part.gpg")
             assert pr, pr
             assert pr.partial
             assert pr.type == "new-sig"
-            assert pr.end_time == 1029826800L
+            assert pr.end_time == 1029826800
 
         pr = file_naming.parse(globals.file_prefix + globals.file_prefix_signature + "dfs.h5dixs.st.p.g")
         assert pr, pr

=== modified file 'testing/tests/test_lazy.py'
--- testing/tests/test_lazy.py	2014-04-16 02:43:43 +0000
+++ testing/tests/test_lazy.py	2014-04-17 22:26:47 +0000
@@ -21,6 +21,7 @@
 
 import helper
 import unittest, pickle, sys
+from functools import reduce
 
 from duplicity.lazy import * #@UnusedWildImport
 
@@ -33,7 +34,7 @@
     empty = lambda s: iter([])
 
     def __init__(self, *args):
-        apply (unittest.TestCase.__init__, (self,) + args)
+        unittest.TestCase.__init__(self, *args)
         self.falseerror = self.falseerror_maker()
         self.trueerror = self.trueerror_maker()
         self.emptygen = self.emptygen_maker()

=== modified file 'testing/tests/test_patchdir.py'
--- testing/tests/test_patchdir.py	2014-04-16 02:43:43 +0000
+++ testing/tests/test_patchdir.py	2014-04-17 22:26:47 +0000
@@ -209,12 +209,12 @@
         self.out = out
 
     def snapshot(self):
-        """Make a snapshot ROPath, permissions 0600"""
+        """Make a snapshot ROPath, permissions 0o600"""
         ss = self.out.append("snapshot")
         fout = ss.open("wb")
         fout.write("hello, world!")
         assert not fout.close()
-        ss.chmod(0600)
+        ss.chmod(0o600)
         ss.difftype = "snapshot"
         return ss
 
@@ -230,24 +230,24 @@
         return deltabuf
 
     def delta1(self):
-        """Make a delta ROPath, permissions 0640"""
+        """Make a delta ROPath, permissions 0o640"""
         delta1 = self.out.append("delta1")
         fout = delta1.open("wb")
         fout.write(self.get_delta("hello, world!",
                                   "aonseuth aosetnuhaonsuhtansoetuhaoe"))
         assert not fout.close()
-        delta1.chmod(0640)
+        delta1.chmod(0o640)
         delta1.difftype = "diff"
         return delta1
 
     def delta2(self):
-        """Make another delta ROPath, permissions 0644"""
+        """Make another delta ROPath, permissions 0o644"""
         delta2 = self.out.append("delta1")
         fout = delta2.open("wb")
         fout.write(self.get_delta("aonseuth aosetnuhaonsuhtansoetuhaoe",
                                   "3499 34957839485792357 458348573"))
         assert not fout.close()
-        delta2.chmod(0644)
+        delta2.chmod(0o644)
         delta2.difftype = "diff"
         return delta2
 

=== added file 'testing/tests/test_python3.py'
--- testing/tests/test_python3.py	1970-01-01 00:00:00 +0000
+++ testing/tests/test_python3.py	2014-04-17 22:26:47 +0000
@@ -0,0 +1,61 @@
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+#
+# Copyright 2014 Michael Terry <michael.terry@xxxxxxxxxxxxx>
+#
+# This file is part of duplicity.
+#
+# Duplicity is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the
+# Free Software Foundation; either version 2 of the License, or (at your
+# option) any later version.
+#
+# Duplicity is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with duplicity; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+import helper
+import os
+import subprocess
+import unittest
+
+helper.setup()
+
+
+class Python3ReadinessTest(unittest.TestCase):
+    def test_2to3(self):
+        _top_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)),
+                                "..", "..")
+
+        # As we modernize the source code, we can remove more and more nofixes
+        process = subprocess.Popen(["2to3",
+                                    "--nofix=dict",
+                                    "--nofix=filter",
+                                    "--nofix=map",
+                                    "--nofix=next",
+                                    "--nofix=print",
+                                    "--nofix=types",
+                                    "--nofix=unicode",
+                                    "--nofix=xrange",
+        # The following fixes we don't want to remove, since they are false
+        # positives, things we don't care about, or real incompatibilities
+        # but which 2to3 can fix for us better automatically.
+                                    "--nofix=callable",
+                                    "--nofix=future",
+                                    "--nofix=imports",
+                                    "--nofix=raw_input",
+                                    "--nofix=urllib",
+                                    _top_dir],
+                                   stdout=subprocess.PIPE,
+                                   stderr=subprocess.PIPE)
+        output = process.communicate()[0]
+        self.assertEqual(0, process.returncode)
+        self.assertEqual("", output, output)
+
+
+if __name__ == "__main__":
+    unittest.main()

=== modified file 'testing/tests/test_restart.py'
--- testing/tests/test_restart.py	2014-04-16 02:43:43 +0000
+++ testing/tests/test_restart.py	2014-04-17 22:26:47 +0000
@@ -326,7 +326,7 @@
         self.backup("full", "testfiles/blocktartest")
         # Create an exact clone of the snapshot folder in the sigtar already.
         # Permissions and mtime must match.
-        os.mkdir("testfiles/snapshot", 0755)
+        os.mkdir("testfiles/snapshot", 0o755)
         os.utime("testfiles/snapshot", (1030384548, 1030384548))
         # Adjust the sigtar.gz file to have a bogus second snapshot/ entry
         # at the beginning.


Follow ups