← Back to team overview

duplicity-team team mailing list archive

[Merge] lp:~mterry/duplicity/require-2.6 into lp:duplicity

 

Michael Terry has proposed merging lp:~mterry/duplicity/require-2.6 into lp:duplicity with lp:~mterry/duplicity/modern-testing as a prerequisite.

Requested reviews:
  duplicity-team (duplicity-team)

For more details, see:
https://code.launchpad.net/~mterry/duplicity/require-2.6/+merge/216210

Require at least Python 2.6.

Our code base already requires 2.6, because 2.6-isms have crept in.  Usually because we or a contributor didn't think to test with 2.4.  And frankly, I'm not even sure how to test with 2.4 on a modern system [1].

You know I've been pushing for this change for a while, but it seems that at this point, it's just moving from de facto to de jure.

Benefits of this:
 - We can start using newer syntax and features
 - We can drop a bunch of code (notably our internal copies of urlparse and tarfile)

Most of this branch is just removing code that we kept around only for 2.4.  I didn't start using any new 2.6-isms.  Those can be separate branches if this is accepted.

[1] https://launchpad.net/~fkrull/+archive/deadsnakes is a good start, but virtualenv in Ubuntu 14.04 only supports 2.6+.  So you'd have to hook everything up manually.
-- 
https://code.launchpad.net/~mterry/duplicity/require-2.6/+merge/216210
Your team duplicity-team is requested to review the proposed merge of lp:~mterry/duplicity/require-2.6 into lp:duplicity.
=== modified file 'README'
--- README	2014-02-21 17:35:24 +0000
+++ README	2014-04-16 20:51:42 +0000
@@ -19,7 +19,7 @@
 
 REQUIREMENTS:
 
- * Python v2.4 or later
+ * Python v2.6 or later
  * librsync v0.9.6 or later
  * GnuPG v1.x for encryption
  * python-lockfile for concurrency locking
@@ -28,7 +28,6 @@
  * for ftp over SSL -- lftp version 3.7.15 or later
  * Boto 2.0 or later for single-processing S3 or GCS access (default)
  * Boto 2.1.1 or later for multi-processing S3 access
- * Python v2.6 or later for multi-processing S3 access
  * Boto 2.7.0 or later for Glacier S3 access
 
 If you install from the source package, you will also need:

=== modified file 'bin/duplicity.1'
--- bin/duplicity.1	2014-03-09 20:37:24 +0000
+++ bin/duplicity.1	2014-04-16 20:51:42 +0000
@@ -51,7 +51,7 @@
 .SH REQUIREMENTS
 Duplicity requires a POSIX-like operating system with a
 .B python
-interpreter version 2.4+ installed.
+interpreter version 2.6+ installed.
 It is best used under GNU/Linux.
 
 Some backends also require additional components (probably available as packages for your specific platform):

=== modified file 'dist/duplicity.spec.template'
--- dist/duplicity.spec.template	2011-11-25 17:47:57 +0000
+++ dist/duplicity.spec.template	2014-04-16 20:51:42 +0000
@@ -10,8 +10,8 @@
 License: GPL
 Group: Applications/Archiving
 BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-root-%(%{__id_u} -n)
-requires: librsync >= 0.9.6, %{PYTHON_NAME} >= 2.4, gnupg >= 1.0.6
-BuildPrereq: %{PYTHON_NAME}-devel >= 2.4, librsync-devel >= 0.9.6
+requires: librsync >= 0.9.6, %{PYTHON_NAME} >= 2.6, gnupg >= 1.0.6
+BuildPrereq: %{PYTHON_NAME}-devel >= 2.6, librsync-devel >= 0.9.6
 
 %description
 Duplicity incrementally backs up files and directory by encrypting

=== modified file 'duplicity/__init__.py'
--- duplicity/__init__.py	2013-12-27 06:39:00 +0000
+++ duplicity/__init__.py	2014-04-16 20:51:42 +0000
@@ -19,12 +19,5 @@
 # along with duplicity; if not, write to the Free Software Foundation,
 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 
-import __builtin__
 import gettext
-
-t = gettext.translation('duplicity', fallback=True)
-t.install(unicode=True)
-
-# Once we can depend on python >=2.5, we can just use names='ngettext' above.
-# But for now, do the install manually.
-__builtin__.__dict__['ngettext'] = t.ungettext
+gettext.install('duplicity', unicode=True, names=['ngettext'])

=== modified file 'duplicity/_librsyncmodule.c'
--- duplicity/_librsyncmodule.c	2013-01-17 16:17:42 +0000
+++ duplicity/_librsyncmodule.c	2014-04-16 20:51:42 +0000
@@ -26,15 +26,6 @@
 #include <librsync.h>
 #define RS_JOB_BLOCKSIZE 65536
 
-/* Support Python 2.4 and 2.5 */
-#ifndef PyVarObject_HEAD_INIT
-    #define PyVarObject_HEAD_INIT(type, size) \
-        PyObject_HEAD_INIT(type) size,
-#endif
-#ifndef Py_TYPE
-    #define Py_TYPE(ob) (((PyObject*)(ob))->ob_type)
-#endif
-
 static PyObject *librsyncError;
 
 /* Sets python error string from result */

=== modified file 'duplicity/backend.py'
--- duplicity/backend.py	2014-02-07 19:04:51 +0000
+++ duplicity/backend.py	2014-04-16 20:51:42 +0000
@@ -32,13 +32,12 @@
 import getpass
 import gettext
 import urllib
+import urlparse
 
 from duplicity import dup_temp
-from duplicity import dup_threading
 from duplicity import file_naming
 from duplicity import globals
 from duplicity import log
-from duplicity import urlparse_2_5 as urlparser
 from duplicity import progress
 
 from duplicity.util import exception_traceback
@@ -58,6 +57,28 @@
 _forced_backend = None
 _backends = {}
 
+# These URL schemes have a backend with a notion of an RFC "network location".
+# The 'file' and 's3+http' schemes should not be in this list.
+# 'http' and 'https' are not actually used for duplicity backend urls, but are needed
+# in order to properly support urls returned from some webdav servers. adding them here
+# is a hack. we should instead not stomp on the url parsing module to begin with.
+#
+# This looks similar to urlparse's 'uses_netloc' list, but urlparse doesn't use
+# that list for parsing, only creating urls.  And doesn't include our custom
+# schemes anyway.  So we keep our own here for our own use.
+uses_netloc = ['ftp',
+               'ftps',
+               'hsi',
+               'rsync',
+               's3',
+               'u1',
+               'scp', 'ssh', 'sftp',
+               'webdav', 'webdavs',
+               'gdocs',
+               'http', 'https',
+               'imap', 'imaps',
+               'mega']
+
 
 def import_backends():
     """
@@ -165,47 +186,6 @@
             raise BackendException(_("Could not initialize backend: %s") % str(sys.exc_info()[1]))
 
 
-_urlparser_initialized = False
-_urlparser_initialized_lock = dup_threading.threading_module().Lock()
-
-def _ensure_urlparser_initialized():
-    """
-    Ensure that the appropriate clobbering of variables in the
-    urlparser module has been done. In the future, the need for this
-    clobbering to begin with should preferably be eliminated.
-    """
-    def init():
-        global _urlparser_initialized
-
-        if not _urlparser_initialized:
-            # These URL schemes have a backend with a notion of an RFC "network location".
-            # The 'file' and 's3+http' schemes should not be in this list.
-            # 'http' and 'https' are not actually used for duplicity backend urls, but are needed
-            # in order to properly support urls returned from some webdav servers. adding them here
-            # is a hack. we should instead not stomp on the url parsing module to begin with.
-            #
-            # todo: eliminate the need for backend specific hacking here completely.
-            urlparser.uses_netloc = ['ftp',
-                                     'ftps',
-                                     'hsi',
-                                     'rsync',
-                                     's3',
-                                     'u1',
-                                     'scp', 'ssh', 'sftp',
-                                     'webdav', 'webdavs',
-                                     'gdocs',
-                                     'http', 'https',
-                                     'imap', 'imaps',
-                                     'mega']
-
-            # Do not transform or otherwise parse the URL path component.
-            urlparser.uses_query = []
-            urlparser.uses_fragm = []
-
-            _urlparser_initialized = True
-
-    dup_threading.with_lock(_urlparser_initialized_lock, init)
-
 class ParsedUrl:
     """
     Parse the given URL as a duplicity backend URL.
@@ -219,7 +199,6 @@
     """
     def __init__(self, url_string):
         self.url_string = url_string
-        _ensure_urlparser_initialized()
 
         # While useful in some cases, the fact is that the urlparser makes
         # all the properties in the URL deferred or lazy.  This means that
@@ -227,7 +206,7 @@
         # problems here, so they will be caught early.
 
         try:
-            pu = urlparser.urlparse(url_string)
+            pu = urlparse.urlparse(url_string)
         except Exception:
             raise InvalidBackendURL("Syntax error in: %s" % url_string)
 
@@ -273,26 +252,37 @@
         self.port = None
         try:
             self.port = pu.port
-        except Exception:
+        except Exception: # not raised in python2.7+, just returns None
             # old style rsync://host::[/]dest, are still valid, though they contain no port
             if not ( self.scheme in ['rsync'] and re.search('::[^:]*$', self.url_string)):
                 raise InvalidBackendURL("Syntax error (port) in: %s A%s B%s C%s" % (url_string, (self.scheme in ['rsync']), re.search('::[^:]+$', self.netloc), self.netloc ) )
 
+        # Our URL system uses two slashes more than urlparse's does when using
+        # non-netloc URLs.  And we want to make sure that if urlparse assuming
+        # a netloc where we don't want one, that we correct it.
+        if self.scheme not in uses_netloc:
+            if self.netloc:
+                self.path = '//' + self.netloc + self.path
+                self.netloc = ''
+                self.hostname = None
+            elif self.path.startswith('/'):
+                self.path = '//' + self.path
+
         # This happens for implicit local paths.
-        if not pu.scheme:
+        if not self.scheme:
             return
 
         # Our backends do not handle implicit hosts.
-        if pu.scheme in urlparser.uses_netloc and not pu.hostname:
+        if self.scheme in uses_netloc and not self.hostname:
             raise InvalidBackendURL("Missing hostname in a backend URL which "
                                     "requires an explicit hostname: %s"
                                     "" % (url_string))
 
         # Our backends do not handle implicit relative paths.
-        if pu.scheme not in urlparser.uses_netloc and not pu.path.startswith('//'):
+        if self.scheme not in uses_netloc and not self.path.startswith('//'):
             raise InvalidBackendURL("missing // - relative paths not supported "
                                     "for scheme %s: %s"
-                                    "" % (pu.scheme, url_string))
+                                    "" % (self.scheme, url_string))
 
     def geturl(self):
         return self.url_string

=== modified file 'duplicity/backends/botobackend.py'
--- duplicity/backends/botobackend.py	2014-02-21 17:14:37 +0000
+++ duplicity/backends/botobackend.py	2014-04-16 20:51:42 +0000
@@ -22,14 +22,10 @@
 
 import duplicity.backend
 from duplicity import globals
-import sys
 from _boto_multi import BotoBackend as BotoMultiUploadBackend
 from _boto_single import BotoBackend as BotoSingleUploadBackend
 
 if globals.s3_use_multiprocessing:
-    if sys.version_info[:2] < (2, 6):
-        print "Sorry, S3 multiprocessing requires version 2.6 or later of python"
-        sys.exit(1)
     duplicity.backend.register_backend("gs", BotoMultiUploadBackend)
     duplicity.backend.register_backend("s3", BotoMultiUploadBackend)
     duplicity.backend.register_backend("s3+http", BotoMultiUploadBackend)

=== modified file 'duplicity/backends/webdavbackend.py'
--- duplicity/backends/webdavbackend.py	2013-12-30 16:01:49 +0000
+++ duplicity/backends/webdavbackend.py	2014-04-16 20:51:42 +0000
@@ -26,13 +26,13 @@
 import re
 import urllib
 import urllib2
+import urlparse
 import xml.dom.minidom
 
 import duplicity.backend
 from duplicity import globals
 from duplicity import log
 from duplicity.errors import * #@UnusedWildImport
-from duplicity import urlparse_2_5 as urlparser
 from duplicity.backend import retry_fatal
 
 class CustomMethodRequest(urllib2.Request):
@@ -332,7 +332,7 @@
         @return: A matching filename, or None if the href did not match.
         """
         raw_filename = self._getText(href.childNodes).strip()
-        parsed_url = urlparser.urlparse(urllib.unquote(raw_filename))
+        parsed_url = urlparse.urlparse(urllib.unquote(raw_filename))
         filename = parsed_url.path
         log.Debug("webdav path decoding and translation: "
                   "%s -> %s" % (raw_filename, filename))

=== modified file 'duplicity/commandline.py'
--- duplicity/commandline.py	2014-03-09 20:37:24 +0000
+++ duplicity/commandline.py	2014-04-16 20:51:42 +0000
@@ -507,9 +507,7 @@
     parser.add_option("--s3_multipart_max_timeout", type="int", metavar=_("number"))
 
     # Option to allow the s3/boto backend use the multiprocessing version.
-    # By default it is off since it does not work for Python 2.4 or 2.5.
-    if sys.version_info[:2] >= (2, 6):
-        parser.add_option("--s3-use-multiprocessing", action = "store_true")
+    parser.add_option("--s3-use-multiprocessing", action = "store_true")
 
     # scp command to use (ssh pexpect backend)
     parser.add_option("--scp-command", metavar = _("command"))

=== modified file 'duplicity/log.py'
--- duplicity/log.py	2013-12-27 06:39:00 +0000
+++ duplicity/log.py	2014-04-16 20:51:42 +0000
@@ -49,7 +49,6 @@
     return DupToLoggerLevel(verb)
 
 def LevelName(level):
-    level = LoggerToDupLevel(level)
     if   level >= 9: return "DEBUG"
     elif level >= 5: return "INFO"
     elif level >= 3: return "NOTICE"
@@ -59,12 +58,10 @@
 def Log(s, verb_level, code=1, extra=None, force_print=False):
     """Write s to stderr if verbosity level low enough"""
     global _logger
-    # controlLine is a terrible hack until duplicity depends on Python 2.5
-    # and its logging 'extra' keyword that allows a custom record dictionary.
     if extra:
-        _logger.controlLine = '%d %s' % (code, extra)
+        controlLine = '%d %s' % (code, extra)
     else:
-        _logger.controlLine = '%d' % (code)
+        controlLine = '%d' % (code)
     if not s:
         s = '' # If None is passed, standard logging would render it as 'None'
 
@@ -79,8 +76,9 @@
     if not isinstance(s, unicode):
         s = s.decode("utf8", "replace")
 
-    _logger.log(DupToLoggerLevel(verb_level), s)
-    _logger.controlLine = None
+    _logger.log(DupToLoggerLevel(verb_level), s,
+                extra={'levelName': LevelName(verb_level),
+                       'controlLine': controlLine})
 
     if force_print:
         _logger.setLevel(initial_level)
@@ -305,22 +303,6 @@
     shutdown()
     sys.exit(code)
 
-class DupLogRecord(logging.LogRecord):
-    """Custom log record that holds a message code"""
-    def __init__(self, controlLine, *args, **kwargs):
-        global _logger
-        logging.LogRecord.__init__(self, *args, **kwargs)
-        self.controlLine = controlLine
-        self.levelName = LevelName(self.levelno)
-
-class DupLogger(logging.Logger):
-    """Custom logger that creates special code-bearing records"""
-    # controlLine is a terrible hack until duplicity depends on Python 2.5
-    # and its logging 'extra' keyword that allows a custom record dictionary.
-    controlLine = None
-    def makeRecord(self, name, lvl, fn, lno, msg, args, exc_info, *argv, **kwargs):
-        return DupLogRecord(self.controlLine, name, lvl, fn, lno, msg, args, exc_info)
-
 class OutFilter(logging.Filter):
     """Filter that only allows warning or less important messages"""
     def filter(self, record):
@@ -337,7 +319,6 @@
     if _logger:
         return
 
-    logging.setLoggerClass(DupLogger)
     _logger = logging.getLogger("duplicity")
 
     # Default verbosity allows notices and above

=== modified file 'duplicity/tarfile.py'
--- duplicity/tarfile.py	2013-10-05 15:11:55 +0000
+++ duplicity/tarfile.py	2014-04-16 20:51:42 +0000
@@ -1,2594 +1,35 @@
-#! /usr/bin/python2.7
-# -*- coding: iso-8859-1 -*-
-#-------------------------------------------------------------------
-# tarfile.py
-#-------------------------------------------------------------------
-# Copyright (C) 2002 Lars Gust�l <lars@xxxxxxxxxxxx>
-# All rights reserved.
-#
-# Permission  is  hereby granted,  free  of charge,  to  any person
-# obtaining a  copy of  this software  and associated documentation
-# files  (the  "Software"),  to   deal  in  the  Software   without
-# restriction,  including  without limitation  the  rights to  use,
-# copy, modify, merge, publish, distribute, sublicense, and/or sell
-# copies  of  the  Software,  and to  permit  persons  to  whom the
-# Software  is  furnished  to  do  so,  subject  to  the  following
-# conditions:
-#
-# The above copyright  notice and this  permission notice shall  be
-# included in all copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS  IS", WITHOUT WARRANTY OF ANY  KIND,
-# EXPRESS OR IMPLIED, INCLUDING  BUT NOT LIMITED TO  THE WARRANTIES
-# OF  MERCHANTABILITY,  FITNESS   FOR  A  PARTICULAR   PURPOSE  AND
-# NONINFRINGEMENT.  IN  NO  EVENT SHALL  THE  AUTHORS  OR COPYRIGHT
-# HOLDERS  BE LIABLE  FOR ANY  CLAIM, DAMAGES  OR OTHER  LIABILITY,
-# WHETHER  IN AN  ACTION OF  CONTRACT, TORT  OR OTHERWISE,  ARISING
-# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
-# OTHER DEALINGS IN THE SOFTWARE.
-#
-"""Read from and write to tar format archives.
-"""
-
-__version__ = "$Revision: 85213 $"
-# $Source$
-
-version     = "0.9.0"
-__author__  = "Lars Gust�l (lars@xxxxxxxxxxxx)"
-__date__    = "$Date: 2010-10-04 10:37:53 -0500 (Mon, 04 Oct 2010) $"
-__cvsid__   = "$Id: tarfile.py 85213 2010-10-04 15:37:53Z lars.gustaebel $"
-__credits__ = "Gustavo Niemeyer, Niels Gust�l, Richard Townsend."
-
-#---------
-# Imports
-#---------
-import sys
-import os
-import shutil
-import stat
-import errno
-import time
-import struct
-import copy
-import re
-import operator
-
+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
+#
+# Copyright 2013 Michael Terry <mike@xxxxxxxxxxx>
+#
+# This file is part of duplicity.
+#
+# Duplicity is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the
+# Free Software Foundation; either version 2 of the License, or (at your
+# option) any later version.
+#
+# Duplicity is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with duplicity; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+"""Like system tarfile but with caching."""
+
+from __future__ import absolute_import
+
+import tarfile
+
+# Grab all symbols in tarfile, to try to reproduce its API exactly.
+# from <> import * wouldn't get everything we want, since tarfile defines
+# __all__.  So we do it ourselves.
+for sym in dir(tarfile):
+    globals()[sym] = getattr(tarfile, sym)
+
+# Now make sure that we cache the grp/pwd ops
 from duplicity import cached_ops
 grp = pwd = cached_ops
-
-# from tarfile import *
-__all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError"]
-
-#---------------------------------------------------------
-# tar constants
-#---------------------------------------------------------
-NUL = "\0"                      # the null character
-BLOCKSIZE = 512                 # length of processing blocks
-RECORDSIZE = BLOCKSIZE * 20     # length of records
-GNU_MAGIC = "ustar  \0"         # magic gnu tar string
-POSIX_MAGIC = "ustar\x0000"     # magic posix tar string
-
-LENGTH_NAME = 100               # maximum length of a filename
-LENGTH_LINK = 100               # maximum length of a linkname
-LENGTH_PREFIX = 155             # maximum length of the prefix field
-
-REGTYPE = "0"                   # regular file
-AREGTYPE = "\0"                 # regular file
-LNKTYPE = "1"                   # link (inside tarfile)
-SYMTYPE = "2"                   # symbolic link
-CHRTYPE = "3"                   # character special device
-BLKTYPE = "4"                   # block special device
-DIRTYPE = "5"                   # directory
-FIFOTYPE = "6"                  # fifo special device
-CONTTYPE = "7"                  # contiguous file
-
-GNUTYPE_LONGNAME = "L"          # GNU tar longname
-GNUTYPE_LONGLINK = "K"          # GNU tar longlink
-GNUTYPE_SPARSE = "S"            # GNU tar sparse file
-
-XHDTYPE = "x"                   # POSIX.1-2001 extended header
-XGLTYPE = "g"                   # POSIX.1-2001 global header
-SOLARIS_XHDTYPE = "X"           # Solaris extended header
-
-USTAR_FORMAT = 0                # POSIX.1-1988 (ustar) format
-GNU_FORMAT = 1                  # GNU tar format
-PAX_FORMAT = 2                  # POSIX.1-2001 (pax) format
-DEFAULT_FORMAT = GNU_FORMAT
-
-#---------------------------------------------------------
-# tarfile constants
-#---------------------------------------------------------
-# File types that tarfile supports:
-SUPPORTED_TYPES = (REGTYPE, AREGTYPE, LNKTYPE,
-                   SYMTYPE, DIRTYPE, FIFOTYPE,
-                   CONTTYPE, CHRTYPE, BLKTYPE,
-                   GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
-                   GNUTYPE_SPARSE)
-
-# File types that will be treated as a regular file.
-REGULAR_TYPES = (REGTYPE, AREGTYPE,
-                 CONTTYPE, GNUTYPE_SPARSE)
-
-# File types that are part of the GNU tar format.
-GNU_TYPES = (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
-             GNUTYPE_SPARSE)
-
-# Fields from a pax header that override a TarInfo attribute.
-PAX_FIELDS = ("path", "linkpath", "size", "mtime",
-              "uid", "gid", "uname", "gname")
-
-# Fields in a pax header that are numbers, all other fields
-# are treated as strings.
-PAX_NUMBER_FIELDS = {
-    "atime": float,
-    "ctime": float,
-    "mtime": float,
-    "uid": int,
-    "gid": int,
-    "size": int
-}
-
-#---------------------------------------------------------
-# Bits used in the mode field, values in octal.
-#---------------------------------------------------------
-S_IFLNK = 0120000        # symbolic link
-S_IFREG = 0100000        # regular file
-S_IFBLK = 0060000        # block device
-S_IFDIR = 0040000        # directory
-S_IFCHR = 0020000        # character device
-S_IFIFO = 0010000        # fifo
-
-TSUID   = 04000          # set UID on execution
-TSGID   = 02000          # set GID on execution
-TSVTX   = 01000          # reserved
-
-TUREAD  = 0400           # read by owner
-TUWRITE = 0200           # write by owner
-TUEXEC  = 0100           # execute/search by owner
-TGREAD  = 0040           # read by group
-TGWRITE = 0020           # write by group
-TGEXEC  = 0010           # execute/search by group
-TOREAD  = 0004           # read by other
-TOWRITE = 0002           # write by other
-TOEXEC  = 0001           # execute/search by other
-
-#---------------------------------------------------------
-# initialization
-#---------------------------------------------------------
-ENCODING = sys.getfilesystemencoding()
-if ENCODING is None:
-    ENCODING = sys.getdefaultencoding()
-
-#---------------------------------------------------------
-# Some useful functions
-#---------------------------------------------------------
-
-def stn(s, length):
-    """Convert a python string to a null-terminated string buffer.
-    """
-    return s[:length] + (length - len(s)) * NUL
-
-def nts(s):
-    """Convert a null-terminated string field to a python string.
-    """
-    # Use the string up to the first null char.
-    p = s.find("\0")
-    if p == -1:
-        return s
-    return s[:p]
-
-def nti(s):
-    """Convert a number field to a python number.
-    """
-    # There are two possible encodings for a number field, see
-    # itn() below.
-    if s[0] != chr(0200):
-        try:
-            n = int(nts(s) or "0", 8)
-        except ValueError:
-            raise InvalidHeaderError("invalid header")
-    else:
-        n = 0L
-        for i in xrange(len(s) - 1):
-            n <<= 8
-            n += ord(s[i + 1])
-    return n
-
-def itn(n, digits=8, format=DEFAULT_FORMAT):
-    """Convert a python number to a number field.
-    """
-    # POSIX 1003.1-1988 requires numbers to be encoded as a string of
-    # octal digits followed by a null-byte, this allows values up to
-    # (8**(digits-1))-1. GNU tar allows storing numbers greater than
-    # that if necessary. A leading 0200 byte indicates this particular
-    # encoding, the following digits-1 bytes are a big-endian
-    # representation. This allows values up to (256**(digits-1))-1.
-    if 0 <= n < 8 ** (digits - 1):
-        s = "%0*o" % (digits - 1, n) + NUL
-    else:
-        if format != GNU_FORMAT or n >= 256 ** (digits - 1):
-            raise ValueError("overflow in number field")
-
-        if n < 0:
-            # XXX We mimic GNU tar's behaviour with negative numbers,
-            # this could raise OverflowError.
-            n = struct.unpack("L", struct.pack("l", n))[0]
-
-        s = ""
-        for i in xrange(digits - 1):
-            s = chr(n & 0377) + s
-            n >>= 8
-        s = chr(0200) + s
-    return s
-
-def uts(s, encoding, errors):
-    """Convert a unicode object to a string.
-    """
-    if errors == "utf-8":
-        # An extra error handler similar to the -o invalid=UTF-8 option
-        # in POSIX.1-2001. Replace untranslatable characters with their
-        # UTF-8 representation.
-        try:
-            return s.encode(encoding, "strict")
-        except UnicodeEncodeError:
-            x = []
-            for c in s:
-                try:
-                    x.append(c.encode(encoding, "strict"))
-                except UnicodeEncodeError:
-                    x.append(c.encode("utf8"))
-            return "".join(x)
-    else:
-        return s.encode(encoding, errors)
-
-def calc_chksums(buf):
-    """Calculate the checksum for a member's header by summing up all
-       characters except for the chksum field which is treated as if
-       it was filled with spaces. According to the GNU tar sources,
-       some tars (Sun and NeXT) calculate chksum with signed char,
-       which will be different if there are chars in the buffer with
-       the high bit set. So we calculate two checksums, unsigned and
-       signed.
-    """
-    unsigned_chksum = 256 + sum(struct.unpack("148B", buf[:148]) + struct.unpack("356B", buf[156:512]))
-    signed_chksum = 256 + sum(struct.unpack("148b", buf[:148]) + struct.unpack("356b", buf[156:512]))
-    return unsigned_chksum, signed_chksum
-
-def copyfileobj(src, dst, length=None):
-    """Copy length bytes from fileobj src to fileobj dst.
-       If length is None, copy the entire content.
-    """
-    if length == 0:
-        return
-    if length is None:
-        shutil.copyfileobj(src, dst)
-        return
-
-    BUFSIZE = 16 * 1024
-    blocks, remainder = divmod(length, BUFSIZE)
-    for b in xrange(blocks):
-        buf = src.read(BUFSIZE)
-        if len(buf) < BUFSIZE:
-            raise IOError("end of file reached")
-        dst.write(buf)
-
-    if remainder != 0:
-        buf = src.read(remainder)
-        if len(buf) < remainder:
-            raise IOError("end of file reached")
-        dst.write(buf)
-    return
-
-filemode_table = (
-    ((S_IFLNK,      "l"),
-     (S_IFREG,      "-"),
-     (S_IFBLK,      "b"),
-     (S_IFDIR,      "d"),
-     (S_IFCHR,      "c"),
-     (S_IFIFO,      "p")),
-
-    ((TUREAD,       "r"),),
-    ((TUWRITE,      "w"),),
-    ((TUEXEC|TSUID, "s"),
-     (TSUID,        "S"),
-     (TUEXEC,       "x")),
-
-    ((TGREAD,       "r"),),
-    ((TGWRITE,      "w"),),
-    ((TGEXEC|TSGID, "s"),
-     (TSGID,        "S"),
-     (TGEXEC,       "x")),
-
-    ((TOREAD,       "r"),),
-    ((TOWRITE,      "w"),),
-    ((TOEXEC|TSVTX, "t"),
-     (TSVTX,        "T"),
-     (TOEXEC,       "x"))
-)
-
-def filemode(mode):
-    """Convert a file's mode to a string of the form
-       -rwxrwxrwx.
-       Used by TarFile.list()
-    """
-    perm = []
-    for table in filemode_table:
-        for bit, char in table:
-            if mode & bit == bit:
-                perm.append(char)
-                break
-        else:
-            perm.append("-")
-    return "".join(perm)
-
-class TarError(Exception):
-    """Base exception."""
-    pass
-class ExtractError(TarError):
-    """General exception for extract errors."""
-    pass
-class ReadError(TarError):
-    """Exception for unreadble tar archives."""
-    pass
-class CompressionError(TarError):
-    """Exception for unavailable compression methods."""
-    pass
-class StreamError(TarError):
-    """Exception for unsupported operations on stream-like TarFiles."""
-    pass
-class HeaderError(TarError):
-    """Base exception for header errors."""
-    pass
-class EmptyHeaderError(HeaderError):
-    """Exception for empty headers."""
-    pass
-class TruncatedHeaderError(HeaderError):
-    """Exception for truncated headers."""
-    pass
-class EOFHeaderError(HeaderError):
-    """Exception for end of file headers."""
-    pass
-class InvalidHeaderError(HeaderError):
-    """Exception for invalid headers."""
-    pass
-class SubsequentHeaderError(HeaderError):
-    """Exception for missing and invalid extended headers."""
-    pass
-
-#---------------------------
-# internal stream interface
-#---------------------------
-class _LowLevelFile:
-    """Low-level file object. Supports reading and writing.
-       It is used instead of a regular file object for streaming
-       access.
-    """
-
-    def __init__(self, name, mode):
-        mode = {
-            "r": os.O_RDONLY,
-            "w": os.O_WRONLY | os.O_CREAT | os.O_TRUNC,
-        }[mode]
-        if hasattr(os, "O_BINARY"):
-            mode |= os.O_BINARY
-        self.fd = os.open(name, mode, 0666)
-
-    def close(self):
-        os.close(self.fd)
-
-    def read(self, size):
-        return os.read(self.fd, size)
-
-    def write(self, s):
-        os.write(self.fd, s)
-
-class _Stream:
-    """Class that serves as an adapter between TarFile and
-       a stream-like object.  The stream-like object only
-       needs to have a read() or write() method and is accessed
-       blockwise.  Use of gzip or bzip2 compression is possible.
-       A stream-like object could be for example: sys.stdin,
-       sys.stdout, a socket, a tape device etc.
-
-       _Stream is intended to be used only internally.
-    """
-
-    def __init__(self, name, mode, comptype, fileobj, bufsize):
-        """Construct a _Stream object.
-        """
-        self._extfileobj = True
-        if fileobj is None:
-            fileobj = _LowLevelFile(name, mode)
-            self._extfileobj = False
-
-        if comptype == '*':
-            # Enable transparent compression detection for the
-            # stream interface
-            fileobj = _StreamProxy(fileobj)
-            comptype = fileobj.getcomptype()
-
-        self.name     = name or ""
-        self.mode     = mode
-        self.comptype = comptype
-        self.fileobj  = fileobj
-        self.bufsize  = bufsize
-        self.buf      = ""
-        self.pos      = 0L
-        self.closed   = False
-
-        if comptype == "gz":
-            try:
-                import zlib
-            except ImportError:
-                raise CompressionError("zlib module is not available")
-            self.zlib = zlib
-            self.crc = zlib.crc32("") & 0xffffffffL
-            if mode == "r":
-                self._init_read_gz()
-            else:
-                self._init_write_gz()
-
-        if comptype == "bz2":
-            try:
-                import bz2
-            except ImportError:
-                raise CompressionError("bz2 module is not available")
-            if mode == "r":
-                self.dbuf = ""
-                self.cmp = bz2.BZ2Decompressor()
-            else:
-                self.cmp = bz2.BZ2Compressor()
-
-    def __del__(self):
-        if hasattr(self, "closed") and not self.closed:
-            self.close()
-
-    def _init_write_gz(self):
-        """Initialize for writing with gzip compression.
-        """
-        self.cmp = self.zlib.compressobj(9, self.zlib.DEFLATED,
-                                            -self.zlib.MAX_WBITS,
-                                            self.zlib.DEF_MEM_LEVEL,
-                                            0)
-        timestamp = struct.pack("<L", long(time.time()))
-        self.__write("\037\213\010\010%s\002\377" % timestamp)
-        if self.name.endswith(".gz"):
-            self.name = self.name[:-3]
-        self.__write(self.name + NUL)
-
-    def write(self, s):
-        """Write string s to the stream.
-        """
-        if self.comptype == "gz":
-            self.crc = self.zlib.crc32(s, self.crc) & 0xffffffffL
-        self.pos += len(s)
-        if self.comptype != "tar":
-            s = self.cmp.compress(s)
-        self.__write(s)
-
-    def __write(self, s):
-        """Write string s to the stream if a whole new block
-           is ready to be written.
-        """
-        self.buf += s
-        while len(self.buf) > self.bufsize:
-            self.fileobj.write(self.buf[:self.bufsize])
-            self.buf = self.buf[self.bufsize:]
-
-    def close(self):
-        """Close the _Stream object. No operation should be
-           done on it afterwards.
-        """
-        if self.closed:
-            return
-
-        if self.mode == "w" and self.comptype != "tar":
-            self.buf += self.cmp.flush()
-
-        if self.mode == "w" and self.buf:
-            self.fileobj.write(self.buf)
-            self.buf = ""
-            if self.comptype == "gz":
-                # The native zlib crc is an unsigned 32-bit integer, but
-                # the Python wrapper implicitly casts that to a signed C
-                # long.  So, on a 32-bit box self.crc may "look negative",
-                # while the same crc on a 64-bit box may "look positive".
-                # To avoid irksome warnings from the `struct` module, force
-                # it to look positive on all boxes.
-                self.fileobj.write(struct.pack("<L", self.crc & 0xffffffffL))
-                self.fileobj.write(struct.pack("<L", self.pos & 0xffffFFFFL))
-
-        if not self._extfileobj:
-            self.fileobj.close()
-
-        self.closed = True
-
-    def _init_read_gz(self):
-        """Initialize for reading a gzip compressed fileobj.
-        """
-        self.cmp = self.zlib.decompressobj(-self.zlib.MAX_WBITS)
-        self.dbuf = ""
-
-        # taken from gzip.GzipFile with some alterations
-        if self.__read(2) != "\037\213":
-            raise ReadError("not a gzip file")
-        if self.__read(1) != "\010":
-            raise CompressionError("unsupported compression method")
-
-        flag = ord(self.__read(1))
-        self.__read(6)
-
-        if flag & 4:
-            xlen = ord(self.__read(1)) + 256 * ord(self.__read(1))
-            self.read(xlen)
-        if flag & 8:
-            while True:
-                s = self.__read(1)
-                if not s or s == NUL:
-                    break
-        if flag & 16:
-            while True:
-                s = self.__read(1)
-                if not s or s == NUL:
-                    break
-        if flag & 2:
-            self.__read(2)
-
-    def tell(self):
-        """Return the stream's file pointer position.
-        """
-        return self.pos
-
-    def seek(self, pos=0):
-        """Set the stream's file pointer to pos. Negative seeking
-           is forbidden.
-        """
-        if pos - self.pos >= 0:
-            blocks, remainder = divmod(pos - self.pos, self.bufsize)
-            for i in xrange(blocks):
-                self.read(self.bufsize)
-            self.read(remainder)
-        else:
-            raise StreamError("seeking backwards is not allowed")
-        return self.pos
-
-    def read(self, size=None):
-        """Return the next size number of bytes from the stream.
-           If size is not defined, return all bytes of the stream
-           up to EOF.
-        """
-        if size is None:
-            t = []
-            while True:
-                buf = self._read(self.bufsize)
-                if not buf:
-                    break
-                t.append(buf)
-            buf = "".join(t)
-        else:
-            buf = self._read(size)
-        self.pos += len(buf)
-        return buf
-
-    def _read(self, size):
-        """Return size bytes from the stream.
-        """
-        if self.comptype == "tar":
-            return self.__read(size)
-
-        c = len(self.dbuf)
-        t = [self.dbuf]
-        while c < size:
-            buf = self.__read(self.bufsize)
-            if not buf:
-                break
-            try:
-                buf = self.cmp.decompress(buf)
-            except IOError:
-                raise ReadError("invalid compressed data")
-            t.append(buf)
-            c += len(buf)
-        t = "".join(t)
-        self.dbuf = t[size:]
-        return t[:size]
-
-    def __read(self, size):
-        """Return size bytes from stream. If internal buffer is empty,
-           read another block from the stream.
-        """
-        c = len(self.buf)
-        t = [self.buf]
-        while c < size:
-            buf = self.fileobj.read(self.bufsize)
-            if not buf:
-                break
-            t.append(buf)
-            c += len(buf)
-        t = "".join(t)
-        self.buf = t[size:]
-        return t[:size]
-# class _Stream
-
-class _StreamProxy(object):
-    """Small proxy class that enables transparent compression
-       detection for the Stream interface (mode 'r|*').
-    """
-
-    def __init__(self, fileobj):
-        self.fileobj = fileobj
-        self.buf = self.fileobj.read(BLOCKSIZE)
-
-    def read(self, size):
-        self.read = self.fileobj.read
-        return self.buf
-
-    def getcomptype(self):
-        if self.buf.startswith("\037\213\010"):
-            return "gz"
-        if self.buf.startswith("BZh91"):
-            return "bz2"
-        return "tar"
-
-    def close(self):
-        self.fileobj.close()
-# class StreamProxy
-
-class _BZ2Proxy(object):
-    """Small proxy class that enables external file object
-       support for "r:bz2" and "w:bz2" modes. This is actually
-       a workaround for a limitation in bz2 module's BZ2File
-       class which (unlike gzip.GzipFile) has no support for
-       a file object argument.
-    """
-
-    blocksize = 16 * 1024
-
-    def __init__(self, fileobj, mode):
-        self.fileobj = fileobj
-        self.mode = mode
-        self.name = getattr(self.fileobj, "name", None)
-        self.init()
-
-    def init(self):
-        import bz2
-        self.pos = 0
-        if self.mode == "r":
-            self.bz2obj = bz2.BZ2Decompressor()
-            self.fileobj.seek(0)
-            self.buf = ""
-        else:
-            self.bz2obj = bz2.BZ2Compressor()
-
-    def read(self, size):
-        b = [self.buf]
-        x = len(self.buf)
-        while x < size:
-            raw = self.fileobj.read(self.blocksize)
-            if not raw:
-                break
-            data = self.bz2obj.decompress(raw)
-            b.append(data)
-            x += len(data)
-        self.buf = "".join(b)
-
-        buf = self.buf[:size]
-        self.buf = self.buf[size:]
-        self.pos += len(buf)
-        return buf
-
-    def seek(self, pos):
-        if pos < self.pos:
-            self.init()
-        self.read(pos - self.pos)
-
-    def tell(self):
-        return self.pos
-
-    def write(self, data):
-        self.pos += len(data)
-        raw = self.bz2obj.compress(data)
-        self.fileobj.write(raw)
-
-    def close(self):
-        if self.mode == "w":
-            raw = self.bz2obj.flush()
-            self.fileobj.write(raw)
-# class _BZ2Proxy
-
-#------------------------
-# Extraction file object
-#------------------------
-class _FileInFile(object):
-    """A thin wrapper around an existing file object that
-       provides a part of its data as an individual file
-       object.
-    """
-
-    def __init__(self, fileobj, offset, size, sparse=None):
-        self.fileobj = fileobj
-        self.offset = offset
-        self.size = size
-        self.sparse = sparse
-        self.position = 0
-
-    def tell(self):
-        """Return the current file position.
-        """
-        return self.position
-
-    def seek(self, position):
-        """Seek to a position in the file.
-        """
-        self.position = position
-
-    def read(self, size=None):
-        """Read data from the file.
-        """
-        if size is None:
-            size = self.size - self.position
-        else:
-            size = min(size, self.size - self.position)
-
-        if self.sparse is None:
-            return self.readnormal(size)
-        else:
-            return self.readsparse(size)
-
-    def readnormal(self, size):
-        """Read operation for regular files.
-        """
-        self.fileobj.seek(self.offset + self.position)
-        self.position += size
-        return self.fileobj.read(size)
-
-    def readsparse(self, size):
-        """Read operation for sparse files.
-        """
-        data = []
-        while size > 0:
-            buf = self.readsparsesection(size)
-            if not buf:
-                break
-            size -= len(buf)
-            data.append(buf)
-        return "".join(data)
-
-    def readsparsesection(self, size):
-        """Read a single section of a sparse file.
-        """
-        section = self.sparse.find(self.position)
-
-        if section is None:
-            return ""
-
-        size = min(size, section.offset + section.size - self.position)
-
-        if isinstance(section, _data):
-            realpos = section.realpos + self.position - section.offset
-            self.fileobj.seek(self.offset + realpos)
-            self.position += size
-            return self.fileobj.read(size)
-        else:
-            self.position += size
-            return NUL * size
-#class _FileInFile
-
-
-class ExFileObject(object):
-    """File-like object for reading an archive member.
-       Is returned by TarFile.extractfile().
-    """
-    blocksize = 1024
-
-    def __init__(self, tarfile, tarinfo):
-        self.fileobj = _FileInFile(tarfile.fileobj,
-                                   tarinfo.offset_data,
-                                   tarinfo.size,
-                                   getattr(tarinfo, "sparse", None))
-        self.name = tarinfo.name
-        self.mode = "r"
-        self.closed = False
-        self.size = tarinfo.size
-
-        self.position = 0
-        self.buffer = ""
-
-    def read(self, size=None):
-        """Read at most size bytes from the file. If size is not
-           present or None, read all data until EOF is reached.
-        """
-        if self.closed:
-            raise ValueError("I/O operation on closed file")
-
-        buf = ""
-        if self.buffer:
-            if size is None:
-                buf = self.buffer
-                self.buffer = ""
-            else:
-                buf = self.buffer[:size]
-                self.buffer = self.buffer[size:]
-
-        if size is None:
-            buf += self.fileobj.read()
-        else:
-            buf += self.fileobj.read(size - len(buf))
-
-        self.position += len(buf)
-        return buf
-
-    def readline(self, size=-1):
-        """Read one entire line from the file. If size is present
-           and non-negative, return a string with at most that
-           size, which may be an incomplete line.
-        """
-        if self.closed:
-            raise ValueError("I/O operation on closed file")
-
-        if "\n" in self.buffer:
-            pos = self.buffer.find("\n") + 1
-        else:
-            buffers = [self.buffer]
-            while True:
-                buf = self.fileobj.read(self.blocksize)
-                buffers.append(buf)
-                if not buf or "\n" in buf:
-                    self.buffer = "".join(buffers)
-                    pos = self.buffer.find("\n") + 1
-                    if pos == 0:
-                        # no newline found.
-                        pos = len(self.buffer)
-                    break
-
-        if size != -1:
-            pos = min(size, pos)
-
-        buf = self.buffer[:pos]
-        self.buffer = self.buffer[pos:]
-        self.position += len(buf)
-        return buf
-
-    def readlines(self):
-        """Return a list with all remaining lines.
-        """
-        result = []
-        while True:
-            line = self.readline()
-            if not line: break
-            result.append(line)
-        return result
-
-    def tell(self):
-        """Return the current file position.
-        """
-        if self.closed:
-            raise ValueError("I/O operation on closed file")
-
-        return self.position
-
-    def seek(self, pos, whence=0):
-        """Seek to a position in the file.
-        """
-        if self.closed:
-            raise ValueError("I/O operation on closed file")
-
-        if whence == 0:
-            self.position = min(max(pos, 0), self.size)
-        elif whence == 1:
-            if pos < 0:
-                self.position = max(self.position + pos, 0)
-            else:
-                self.position = min(self.position + pos, self.size)
-        elif whence == 2:
-            self.position = max(min(self.size + pos, self.size), 0)
-        else:
-            raise ValueError("Invalid argument")
-
-        self.buffer = ""
-        self.fileobj.seek(self.position)
-
-    def close(self):
-        """Close the file object.
-        """
-        self.closed = True
-
-    def __iter__(self):
-        """Get an iterator over the file's lines.
-        """
-        while True:
-            line = self.readline()
-            if not line:
-                break
-            yield line
-#class ExFileObject
-
-#------------------
-# Exported Classes
-#------------------
-class TarInfo(object):
-    """Informational class which holds the details about an
-       archive member given by a tar header block.
-       TarInfo objects are returned by TarFile.getmember(),
-       TarFile.getmembers() and TarFile.gettarinfo() and are
-       usually created internally.
-    """
-
-    def __init__(self, name=""):
-        """Construct a TarInfo object. name is the optional name
-           of the member.
-        """
-        self.name = name        # member name
-        self.mode = 0644        # file permissions
-        self.uid = 0            # user id
-        self.gid = 0            # group id
-        self.size = 0           # file size
-        self.mtime = 0          # modification time
-        self.chksum = 0         # header checksum
-        self.type = REGTYPE     # member type
-        self.linkname = ""      # link name
-        self.uname = ""         # user name
-        self.gname = ""         # group name
-        self.devmajor = 0       # device major number
-        self.devminor = 0       # device minor number
-
-        self.offset = 0         # the tar header starts here
-        self.offset_data = 0    # the file's data starts here
-
-        self.pax_headers = {}   # pax header information
-
-    # In pax headers the "name" and "linkname" field are called
-    # "path" and "linkpath".
-    def _getpath(self):
-        return self.name
-    def _setpath(self, name):
-        self.name = name
-    path = property(_getpath, _setpath)
-
-    def _getlinkpath(self):
-        return self.linkname
-    def _setlinkpath(self, linkname):
-        self.linkname = linkname
-    linkpath = property(_getlinkpath, _setlinkpath)
-
-    def __repr__(self):
-        return "<%s %r at %#x>" % (self.__class__.__name__,self.name,id(self))
-
-    def get_info(self, encoding, errors):
-        """Return the TarInfo's attributes as a dictionary.
-        """
-        info = {
-            "name":     self.name,
-            "mode":     self.mode & 07777,
-            "uid":      self.uid,
-            "gid":      self.gid,
-            "size":     self.size,
-            "mtime":    self.mtime,
-            "chksum":   self.chksum,
-            "type":     self.type,
-            "linkname": self.linkname,
-            "uname":    self.uname,
-            "gname":    self.gname,
-            "devmajor": self.devmajor,
-            "devminor": self.devminor
-        }
-
-        if info["type"] == DIRTYPE and not info["name"].endswith("/"):
-            info["name"] += "/"
-
-        for key in ("name", "linkname", "uname", "gname"):
-            if type(info[key]) is unicode:
-                info[key] = info[key].encode(encoding, errors)
-
-        return info
-
-    def tobuf(self, format=DEFAULT_FORMAT, encoding=ENCODING, errors="strict"):
-        """Return a tar header as a string of 512 byte blocks.
-        """
-        info = self.get_info(encoding, errors)
-
-        if format == USTAR_FORMAT:
-            return self.create_ustar_header(info)
-        elif format == GNU_FORMAT:
-            return self.create_gnu_header(info)
-        elif format == PAX_FORMAT:
-            return self.create_pax_header(info, encoding, errors)
-        else:
-            raise ValueError("invalid format")
-
-    def create_ustar_header(self, info):
-        """Return the object as a ustar header block.
-        """
-        info["magic"] = POSIX_MAGIC
-
-        if len(info["linkname"]) > LENGTH_LINK:
-            raise ValueError("linkname is too long")
-
-        if len(info["name"]) > LENGTH_NAME:
-            info["prefix"], info["name"] = self._posix_split_name(info["name"])
-
-        return self._create_header(info, USTAR_FORMAT)
-
-    def create_gnu_header(self, info):
-        """Return the object as a GNU header block sequence.
-        """
-        info["magic"] = GNU_MAGIC
-
-        buf = ""
-        if len(info["linkname"]) > LENGTH_LINK:
-            buf += self._create_gnu_long_header(info["linkname"], GNUTYPE_LONGLINK)
-
-        if len(info["name"]) > LENGTH_NAME:
-            buf += self._create_gnu_long_header(info["name"], GNUTYPE_LONGNAME)
-
-        return buf + self._create_header(info, GNU_FORMAT)
-
-    def create_pax_header(self, info, encoding, errors):
-        """Return the object as a ustar header block. If it cannot be
-           represented this way, prepend a pax extended header sequence
-           with supplement information.
-        """
-        info["magic"] = POSIX_MAGIC
-        pax_headers = self.pax_headers.copy()
-
-        # Test string fields for values that exceed the field length or cannot
-        # be represented in ASCII encoding.
-        for name, hname, length in (
-                ("name", "path", LENGTH_NAME), ("linkname", "linkpath", LENGTH_LINK),
-                ("uname", "uname", 32), ("gname", "gname", 32)):
-
-            if hname in pax_headers:
-                # The pax header has priority.
-                continue
-
-            val = info[name].decode(encoding, errors)
-
-            # Try to encode the string as ASCII.
-            try:
-                val.encode("ascii")
-            except UnicodeEncodeError:
-                pax_headers[hname] = val
-                continue
-
-            if len(info[name]) > length:
-                pax_headers[hname] = val
-
-        # Test number fields for values that exceed the field limit or values
-        # that like to be stored as float.
-        for name, digits in (("uid", 8), ("gid", 8), ("size", 12), ("mtime", 12)):
-            if name in pax_headers:
-                # The pax header has priority. Avoid overflow.
-                info[name] = 0
-                continue
-
-            val = info[name]
-            if not 0 <= val < 8 ** (digits - 1) or isinstance(val, float):
-                pax_headers[name] = unicode(val)
-                info[name] = 0
-
-        # Create a pax extended header if necessary.
-        if pax_headers:
-            buf = self._create_pax_generic_header(pax_headers)
-        else:
-            buf = ""
-
-        return buf + self._create_header(info, USTAR_FORMAT)
-
-    @classmethod
-    def create_pax_global_header(cls, pax_headers):
-        """Return the object as a pax global header block sequence.
-        """
-        return cls._create_pax_generic_header(pax_headers, type=XGLTYPE)
-
-    def _posix_split_name(self, name):
-        """Split a name longer than 100 chars into a prefix
-           and a name part.
-        """
-        prefix = name[:LENGTH_PREFIX + 1]
-        while prefix and prefix[-1] != "/":
-            prefix = prefix[:-1]
-
-        name = name[len(prefix):]
-        prefix = prefix[:-1]
-
-        if not prefix or len(name) > LENGTH_NAME:
-            raise ValueError("name is too long")
-        return prefix, name
-
-    @staticmethod
-    def _create_header(info, format):
-        """Return a header block. info is a dictionary with file
-           information, format must be one of the *_FORMAT constants.
-        """
-        parts = [
-            stn(info.get("name", ""), 100),
-            itn(info.get("mode", 0) & 07777, 8, format),
-            itn(info.get("uid", 0), 8, format),
-            itn(info.get("gid", 0), 8, format),
-            itn(info.get("size", 0), 12, format),
-            itn(info.get("mtime", 0), 12, format),
-            "        ", # checksum field
-            info.get("type", REGTYPE),
-            stn(info.get("linkname", ""), 100),
-            stn(info.get("magic", POSIX_MAGIC), 8),
-            stn(info.get("uname", ""), 32),
-            stn(info.get("gname", ""), 32),
-            itn(info.get("devmajor", 0), 8, format),
-            itn(info.get("devminor", 0), 8, format),
-            stn(info.get("prefix", ""), 155)
-        ]
-
-        buf = struct.pack("%ds" % BLOCKSIZE, "".join(parts))
-        chksum = calc_chksums(buf[-BLOCKSIZE:])[0]
-        buf = buf[:-364] + "%06o\0" % chksum + buf[-357:]
-        return buf
-
-    @staticmethod
-    def _create_payload(payload):
-        """Return the string payload filled with zero bytes
-           up to the next 512 byte border.
-        """
-        blocks, remainder = divmod(len(payload), BLOCKSIZE)
-        if remainder > 0:
-            payload += (BLOCKSIZE - remainder) * NUL
-        return payload
-
-    @classmethod
-    def _create_gnu_long_header(cls, name, type):
-        """Return a GNUTYPE_LONGNAME or GNUTYPE_LONGLINK sequence
-           for name.
-        """
-        name += NUL
-
-        info = {}
-        info["name"] = "././@LongLink"
-        info["type"] = type
-        info["size"] = len(name)
-        info["magic"] = GNU_MAGIC
-
-        # create extended header + name blocks.
-        return cls._create_header(info, USTAR_FORMAT) + \
-                cls._create_payload(name)
-
-    @classmethod
-    def _create_pax_generic_header(cls, pax_headers, type=XHDTYPE):
-        """Return a POSIX.1-2001 extended or global header sequence
-           that contains a list of keyword, value pairs. The values
-           must be unicode objects.
-        """
-        records = []
-        for keyword, value in pax_headers.iteritems():
-            keyword = keyword.encode("utf8")
-            value = value.encode("utf8")
-            l = len(keyword) + len(value) + 3   # ' ' + '=' + '\n'
-            n = p = 0
-            while True:
-                n = l + len(str(p))
-                if n == p:
-                    break
-                p = n
-            records.append("%d %s=%s\n" % (p, keyword, value))
-        records = "".join(records)
-
-        # We use a hardcoded "././@PaxHeader" name like star does
-        # instead of the one that POSIX recommends.
-        info = {}
-        info["name"] = "././@PaxHeader"
-        info["type"] = type
-        info["size"] = len(records)
-        info["magic"] = POSIX_MAGIC
-
-        # Create pax header + record blocks.
-        return cls._create_header(info, USTAR_FORMAT) + \
-                cls._create_payload(records)
-
-    @classmethod
-    def frombuf(cls, buf):
-        """Construct a TarInfo object from a 512 byte string buffer.
-        """
-        if len(buf) == 0:
-            raise EmptyHeaderError("empty header")
-        if len(buf) != BLOCKSIZE:
-            raise TruncatedHeaderError("truncated header")
-        if buf.count(NUL) == BLOCKSIZE:
-            raise EOFHeaderError("end of file header")
-
-        chksum = nti(buf[148:156])
-        if chksum not in calc_chksums(buf):
-            raise InvalidHeaderError("bad checksum")
-
-        obj = cls()
-        obj.buf = buf
-        obj.name = nts(buf[0:100])
-        obj.mode = nti(buf[100:108])
-        obj.uid = nti(buf[108:116])
-        obj.gid = nti(buf[116:124])
-        obj.size = nti(buf[124:136])
-        obj.mtime = nti(buf[136:148])
-        obj.chksum = chksum
-        obj.type = buf[156:157]
-        obj.linkname = nts(buf[157:257])
-        obj.uname = nts(buf[265:297])
-        obj.gname = nts(buf[297:329])
-        obj.devmajor = nti(buf[329:337])
-        obj.devminor = nti(buf[337:345])
-        prefix = nts(buf[345:500])
-
-        # Old V7 tar format represents a directory as a regular
-        # file with a trailing slash.
-        if obj.type == AREGTYPE and obj.name.endswith("/"):
-            obj.type = DIRTYPE
-
-        # Remove redundant slashes from directories.
-        if obj.isdir():
-            obj.name = obj.name.rstrip("/")
-
-        # Reconstruct a ustar longname.
-        if prefix and obj.type not in GNU_TYPES:
-            obj.name = prefix + "/" + obj.name
-        return obj
-
-    @classmethod
-    def fromtarfile(cls, tarfile):
-        """Return the next TarInfo object from TarFile object
-           tarfile.
-        """
-        buf = tarfile.fileobj.read(BLOCKSIZE)
-        obj = cls.frombuf(buf)
-        obj.offset = tarfile.fileobj.tell() - BLOCKSIZE
-        return obj._proc_member(tarfile)
-
-    #--------------------------------------------------------------------------
-    # The following are methods that are called depending on the type of a
-    # member. The entry point is _proc_member() which can be overridden in a
-    # subclass to add custom _proc_*() methods. A _proc_*() method MUST
-    # implement the following
-    # operations:
-    # 1. Set self.offset_data to the position where the data blocks begin,
-    #    if there is data that follows.
-    # 2. Set tarfile.offset to the position where the next member's header will
-    #    begin.
-    # 3. Return self or another valid TarInfo object.
-    def _proc_member(self, tarfile):
-        """Choose the right processing method depending on
-           the type and call it.
-        """
-        if self.type in (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK):
-            return self._proc_gnulong(tarfile)
-        elif self.type == GNUTYPE_SPARSE:
-            return self._proc_sparse(tarfile)
-        elif self.type in (XHDTYPE, XGLTYPE, SOLARIS_XHDTYPE):
-            return self._proc_pax(tarfile)
-        else:
-            return self._proc_builtin(tarfile)
-
-    def _proc_builtin(self, tarfile):
-        """Process a builtin type or an unknown type which
-           will be treated as a regular file.
-        """
-        self.offset_data = tarfile.fileobj.tell()
-        offset = self.offset_data
-        if self.isreg() or self.type not in SUPPORTED_TYPES:
-            # Skip the following data blocks.
-            offset += self._block(self.size)
-        tarfile.offset = offset
-
-        # Patch the TarInfo object with saved global
-        # header information.
-        self._apply_pax_info(tarfile.pax_headers, tarfile.encoding, tarfile.errors)
-
-        return self
-
-    def _proc_gnulong(self, tarfile):
-        """Process the blocks that hold a GNU longname
-           or longlink member.
-        """
-        buf = tarfile.fileobj.read(self._block(self.size))
-
-        # Fetch the next header and process it.
-        try:
-            next = self.fromtarfile(tarfile)
-        except HeaderError:
-            raise SubsequentHeaderError("missing or bad subsequent header")
-
-        # Patch the TarInfo object from the next header with
-        # the longname information.
-        next.offset = self.offset
-        if self.type == GNUTYPE_LONGNAME:
-            next.name = nts(buf)
-        elif self.type == GNUTYPE_LONGLINK:
-            next.linkname = nts(buf)
-
-        return next
-
-    def _proc_sparse(self, tarfile):
-        """Process a GNU sparse header plus extra headers.
-        """
-        buf = self.buf
-        sp = _ringbuffer()
-        pos = 386
-        lastpos = 0L
-        realpos = 0L
-        # There are 4 possible sparse structs in the
-        # first header.
-        for i in xrange(4):
-            try:
-                offset = nti(buf[pos:pos + 12])
-                numbytes = nti(buf[pos + 12:pos + 24])
-            except ValueError:
-                break
-            if offset > lastpos:
-                sp.append(_hole(lastpos, offset - lastpos))
-            sp.append(_data(offset, numbytes, realpos))
-            realpos += numbytes
-            lastpos = offset + numbytes
-            pos += 24
-
-        isextended = ord(buf[482])
-        origsize = nti(buf[483:495])
-
-        # If the isextended flag is given,
-        # there are extra headers to process.
-        while isextended == 1:
-            buf = tarfile.fileobj.read(BLOCKSIZE)
-            pos = 0
-            for i in xrange(21):
-                try:
-                    offset = nti(buf[pos:pos + 12])
-                    numbytes = nti(buf[pos + 12:pos + 24])
-                except ValueError:
-                    break
-                if offset > lastpos:
-                    sp.append(_hole(lastpos, offset - lastpos))
-                sp.append(_data(offset, numbytes, realpos))
-                realpos += numbytes
-                lastpos = offset + numbytes
-                pos += 24
-            isextended = ord(buf[504])
-
-        if lastpos < origsize:
-            sp.append(_hole(lastpos, origsize - lastpos))
-
-        self.sparse = sp
-
-        self.offset_data = tarfile.fileobj.tell()
-        tarfile.offset = self.offset_data + self._block(self.size)
-        self.size = origsize
-
-        return self
-
-    def _proc_pax(self, tarfile):
-        """Process an extended or global header as described in
-           POSIX.1-2001.
-        """
-        # Read the header information.
-        buf = tarfile.fileobj.read(self._block(self.size))
-
-        # A pax header stores supplemental information for either
-        # the following file (extended) or all following files
-        # (global).
-        if self.type == XGLTYPE:
-            pax_headers = tarfile.pax_headers
-        else:
-            pax_headers = tarfile.pax_headers.copy()
-
-        # Parse pax header information. A record looks like that:
-        # "%d %s=%s\n" % (length, keyword, value). length is the size
-        # of the complete record including the length field itself and
-        # the newline. keyword and value are both UTF-8 encoded strings.
-        regex = re.compile(r"(\d+) ([^=]+)=", re.U)
-        pos = 0
-        while True:
-            match = regex.match(buf, pos)
-            if not match:
-                break
-
-            length, keyword = match.groups()
-            length = int(length)
-            value = buf[match.end(2) + 1:match.start(1) + length - 1]
-
-            keyword = keyword.decode("utf8")
-            value = value.decode("utf8")
-
-            pax_headers[keyword] = value
-            pos += length
-
-        # Fetch the next header.
-        try:
-            next = self.fromtarfile(tarfile)
-        except HeaderError:
-            raise SubsequentHeaderError("missing or bad subsequent header")
-
-        if self.type in (XHDTYPE, SOLARIS_XHDTYPE):
-            # Patch the TarInfo object with the extended header info.
-            next._apply_pax_info(pax_headers, tarfile.encoding, tarfile.errors)
-            next.offset = self.offset
-
-            if "size" in pax_headers:
-                # If the extended header replaces the size field,
-                # we need to recalculate the offset where the next
-                # header starts.
-                offset = next.offset_data
-                if next.isreg() or next.type not in SUPPORTED_TYPES:
-                    offset += next._block(next.size)
-                tarfile.offset = offset
-
-        return next
-
-    def _apply_pax_info(self, pax_headers, encoding, errors):
-        """Replace fields with supplemental information from a previous
-           pax extended or global header.
-        """
-        for keyword, value in pax_headers.iteritems():
-            if keyword not in PAX_FIELDS:
-                continue
-
-            if keyword == "path":
-                value = value.rstrip("/")
-
-            if keyword in PAX_NUMBER_FIELDS:
-                try:
-                    value = PAX_NUMBER_FIELDS[keyword](value)
-                except ValueError:
-                    value = 0
-            else:
-                value = uts(value, encoding, errors)
-
-            setattr(self, keyword, value)
-
-        self.pax_headers = pax_headers.copy()
-
-    def _block(self, count):
-        """Round up a byte count by BLOCKSIZE and return it,
-           e.g. _block(834) => 1024.
-        """
-        blocks, remainder = divmod(count, BLOCKSIZE)
-        if remainder:
-            blocks += 1
-        return blocks * BLOCKSIZE
-
-    def isreg(self):
-        return self.type in REGULAR_TYPES
-    def isfile(self):
-        return self.isreg()
-    def isdir(self):
-        return self.type == DIRTYPE
-    def issym(self):
-        return self.type == SYMTYPE
-    def islnk(self):
-        return self.type == LNKTYPE
-    def ischr(self):
-        return self.type == CHRTYPE
-    def isblk(self):
-        return self.type == BLKTYPE
-    def isfifo(self):
-        return self.type == FIFOTYPE
-    def issparse(self):
-        return self.type == GNUTYPE_SPARSE
-    def isdev(self):
-        return self.type in (CHRTYPE, BLKTYPE, FIFOTYPE)
-# class TarInfo
-
-class TarFile(object):
-    """The TarFile Class provides an interface to tar archives.
-    """
-
-    debug = 0                   # May be set from 0 (no msgs) to 3 (all msgs)
-
-    dereference = False         # If true, add content of linked file to the
-                                # tar file, else the link.
-
-    ignore_zeros = False        # If true, skips empty or invalid blocks and
-                                # continues processing.
-
-    errorlevel = 1              # If 0, fatal errors only appear in debug
-                                # messages (if debug >= 0). If > 0, errors
-                                # are passed to the caller as exceptions.
-
-    format = DEFAULT_FORMAT     # The format to use when creating an archive.
-
-    encoding = ENCODING         # Encoding for 8-bit character strings.
-
-    errors = None               # Error handler for unicode conversion.
-
-    tarinfo = TarInfo           # The default TarInfo class to use.
-
-    fileobject = ExFileObject   # The default ExFileObject class to use.
-
-    def __init__(self, name=None, mode="r", fileobj=None, format=None,
-            tarinfo=None, dereference=None, ignore_zeros=None, encoding=None,
-            errors=None, pax_headers=None, debug=None, errorlevel=None):
-        """Open an (uncompressed) tar archive `name'. `mode' is either 'r' to
-           read from an existing archive, 'a' to append data to an existing
-           file or 'w' to create a new file overwriting an existing one. `mode'
-           defaults to 'r'.
-           If `fileobj' is given, it is used for reading or writing data. If it
-           can be determined, `mode' is overridden by `fileobj's mode.
-           `fileobj' is not closed, when TarFile is closed.
-        """
-        if len(mode) > 1 or mode not in "raw":
-            raise ValueError("mode must be 'r', 'a' or 'w'")
-        self.mode = mode
-        self._mode = {"r": "rb", "a": "r+b", "w": "wb"}[mode]
-
-        if not fileobj:
-            if self.mode == "a" and not os.path.exists(name):
-                # Create nonexistent files in append mode.
-                self.mode = "w"
-                self._mode = "wb"
-            fileobj = bltn_open(name, self._mode)
-            self._extfileobj = False
-        else:
-            if name is None and hasattr(fileobj, "name"):
-                name = fileobj.name
-            if hasattr(fileobj, "mode"):
-                self._mode = fileobj.mode
-            self._extfileobj = True
-        if name:
-            self.name = os.path.abspath(name)
-        else:
-            self.name = None
-        self.fileobj = fileobj
-
-        # Init attributes.
-        if format is not None:
-            self.format = format
-        if tarinfo is not None:
-            self.tarinfo = tarinfo
-        if dereference is not None:
-            self.dereference = dereference
-        if ignore_zeros is not None:
-            self.ignore_zeros = ignore_zeros
-        if encoding is not None:
-            self.encoding = encoding
-
-        if errors is not None:
-            self.errors = errors
-        elif mode == "r":
-            self.errors = "utf-8"
-        else:
-            self.errors = "strict"
-
-        if pax_headers is not None and self.format == PAX_FORMAT:
-            self.pax_headers = pax_headers
-        else:
-            self.pax_headers = {}
-
-        if debug is not None:
-            self.debug = debug
-        if errorlevel is not None:
-            self.errorlevel = errorlevel
-
-        # Init datastructures.
-        self.closed = False
-        self.members = []       # list of members as TarInfo objects
-        self._loaded = False    # flag if all members have been read
-        self.offset = self.fileobj.tell()
-                                # current position in the archive file
-        self.inodes = {}        # dictionary caching the inodes of
-                                # archive members already added
-
-        try:
-            if self.mode == "r":
-                self.firstmember = None
-                self.firstmember = self.next()
-
-            if self.mode == "a":
-                # Move to the end of the archive,
-                # before the first empty block.
-                while True:
-                    self.fileobj.seek(self.offset)
-                    try:
-                        tarinfo = self.tarinfo.fromtarfile(self)
-                        self.members.append(tarinfo)
-                    except EOFHeaderError:
-                        self.fileobj.seek(self.offset)
-                        break
-                    except HeaderError, e:
-                        raise ReadError(str(e))
-
-            if self.mode in "aw":
-                self._loaded = True
-
-                if self.pax_headers:
-                    buf = self.tarinfo.create_pax_global_header(self.pax_headers.copy())
-                    self.fileobj.write(buf)
-                    self.offset += len(buf)
-        except:
-            if not self._extfileobj:
-                self.fileobj.close()
-            self.closed = True
-            raise
-
-    def _getposix(self):
-        return self.format == USTAR_FORMAT
-    def _setposix(self, value):
-        import warnings
-        warnings.warn("use the format attribute instead", DeprecationWarning,
-                      2)
-        if value:
-            self.format = USTAR_FORMAT
-        else:
-            self.format = GNU_FORMAT
-    posix = property(_getposix, _setposix)
-
-    #--------------------------------------------------------------------------
-    # Below are the classmethods which act as alternate constructors to the
-    # TarFile class. The open() method is the only one that is needed for
-    # public use; it is the "super"-constructor and is able to select an
-    # adequate "sub"-constructor for a particular compression using the mapping
-    # from OPEN_METH.
-    #
-    # This concept allows one to subclass TarFile without losing the comfort of
-    # the super-constructor. A sub-constructor is registered and made available
-    # by adding it to the mapping in OPEN_METH.
-
-    @classmethod
-    def open(cls, name=None, mode="r", fileobj=None, bufsize=RECORDSIZE, **kwargs):
-        """Open a tar archive for reading, writing or appending. Return
-           an appropriate TarFile class.
-
-           mode:
-           'r' or 'r:*' open for reading with transparent compression
-           'r:'         open for reading exclusively uncompressed
-           'r:gz'       open for reading with gzip compression
-           'r:bz2'      open for reading with bzip2 compression
-           'a' or 'a:'  open for appending, creating the file if necessary
-           'w' or 'w:'  open for writing without compression
-           'w:gz'       open for writing with gzip compression
-           'w:bz2'      open for writing with bzip2 compression
-
-           'r|*'        open a stream of tar blocks with transparent compression
-           'r|'         open an uncompressed stream of tar blocks for reading
-           'r|gz'       open a gzip compressed stream of tar blocks
-           'r|bz2'      open a bzip2 compressed stream of tar blocks
-           'w|'         open an uncompressed stream for writing
-           'w|gz'       open a gzip compressed stream for writing
-           'w|bz2'      open a bzip2 compressed stream for writing
-        """
-
-        if not name and not fileobj:
-            raise ValueError("nothing to open")
-
-        if mode in ("r", "r:*"):
-            # Find out which *open() is appropriate for opening the file.
-            for comptype in cls.OPEN_METH:
-                func = getattr(cls, cls.OPEN_METH[comptype])
-                if fileobj is not None:
-                    saved_pos = fileobj.tell()
-                try:
-                    return func(name, "r", fileobj, **kwargs)
-                except (ReadError, CompressionError), e:
-                    if fileobj is not None:
-                        fileobj.seek(saved_pos)
-                    continue
-            raise ReadError("file could not be opened successfully")
-
-        elif ":" in mode:
-            filemode, comptype = mode.split(":", 1)
-            filemode = filemode or "r"
-            comptype = comptype or "tar"
-
-            # Select the *open() function according to
-            # given compression.
-            if comptype in cls.OPEN_METH:
-                func = getattr(cls, cls.OPEN_METH[comptype])
-            else:
-                raise CompressionError("unknown compression type %r" % comptype)
-            return func(name, filemode, fileobj, **kwargs)
-
-        elif "|" in mode:
-            filemode, comptype = mode.split("|", 1)
-            filemode = filemode or "r"
-            comptype = comptype or "tar"
-
-            if filemode not in "rw":
-                raise ValueError("mode must be 'r' or 'w'")
-
-            t = cls(name, filemode,
-                    _Stream(name, filemode, comptype, fileobj, bufsize),
-                    **kwargs)
-            t._extfileobj = False
-            return t
-
-        elif mode in "aw":
-            return cls.taropen(name, mode, fileobj, **kwargs)
-
-        raise ValueError("undiscernible mode")
-
-    @classmethod
-    def taropen(cls, name, mode="r", fileobj=None, **kwargs):
-        """Open uncompressed tar archive name for reading or writing.
-        """
-        if len(mode) > 1 or mode not in "raw":
-            raise ValueError("mode must be 'r', 'a' or 'w'")
-        return cls(name, mode, fileobj, **kwargs)
-
-    @classmethod
-    def gzopen(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
-        """Open gzip compressed tar archive name for reading or writing.
-           Appending is not allowed.
-        """
-        if len(mode) > 1 or mode not in "rw":
-            raise ValueError("mode must be 'r' or 'w'")
-
-        try:
-            import gzip
-            gzip.GzipFile
-        except (ImportError, AttributeError):
-            raise CompressionError("gzip module is not available")
-
-        if fileobj is None:
-            fileobj = bltn_open(name, mode + "b")
-
-        try:
-            t = cls.taropen(name, mode,
-                gzip.GzipFile(name, mode, compresslevel, fileobj),
-                **kwargs)
-        except IOError:
-            raise ReadError("not a gzip file")
-        t._extfileobj = False
-        return t
-
-    @classmethod
-    def bz2open(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
-        """Open bzip2 compressed tar archive name for reading or writing.
-           Appending is not allowed.
-        """
-        if len(mode) > 1 or mode not in "rw":
-            raise ValueError("mode must be 'r' or 'w'.")
-
-        try:
-            import bz2
-        except ImportError:
-            raise CompressionError("bz2 module is not available")
-
-        if fileobj is not None:
-            fileobj = _BZ2Proxy(fileobj, mode)
-        else:
-            fileobj = bz2.BZ2File(name, mode, compresslevel=compresslevel)
-
-        try:
-            t = cls.taropen(name, mode, fileobj, **kwargs)
-        except (IOError, EOFError):
-            raise ReadError("not a bzip2 file")
-        t._extfileobj = False
-        return t
-
-    # All *open() methods are registered here.
-    OPEN_METH = {
-        "tar": "taropen",   # uncompressed tar
-        "gz":  "gzopen",    # gzip compressed tar
-        "bz2": "bz2open"    # bzip2 compressed tar
-    }
-
-    #--------------------------------------------------------------------------
-    # The public methods which TarFile provides:
-
-    def close(self):
-        """Close the TarFile. In write-mode, two finishing zero blocks are
-           appended to the archive.
-        """
-        if self.closed:
-            return
-
-        if self.mode in "aw":
-            self.fileobj.write(NUL * (BLOCKSIZE * 2))
-            self.offset += (BLOCKSIZE * 2)
-            # fill up the end with zero-blocks
-            # (like option -b20 for tar does)
-            blocks, remainder = divmod(self.offset, RECORDSIZE)
-            if remainder > 0:
-                self.fileobj.write(NUL * (RECORDSIZE - remainder))
-
-        if not self._extfileobj:
-            self.fileobj.close()
-        self.closed = True
-
-    def getmember(self, name):
-        """Return a TarInfo object for member `name'. If `name' can not be
-           found in the archive, KeyError is raised. If a member occurs more
-           than once in the archive, its last occurrence is assumed to be the
-           most up-to-date version.
-        """
-        tarinfo = self._getmember(name)
-        if tarinfo is None:
-            raise KeyError("filename %r not found" % name)
-        return tarinfo
-
-    def getmembers(self):
-        """Return the members of the archive as a list of TarInfo objects. The
-           list has the same order as the members in the archive.
-        """
-        self._check()
-        if not self._loaded:    # if we want to obtain a list of
-            self._load()        # all members, we first have to
-                                # scan the whole archive.
-        return self.members
-
-    def getnames(self):
-        """Return the members of the archive as a list of their names. It has
-           the same order as the list returned by getmembers().
-        """
-        return [tarinfo.name for tarinfo in self.getmembers()]
-
-    def gettarinfo(self, name=None, arcname=None, fileobj=None):
-        """Create a TarInfo object for either the file `name' or the file
-           object `fileobj' (using os.fstat on its file descriptor). You can
-           modify some of the TarInfo's attributes before you add it using
-           addfile(). If given, `arcname' specifies an alternative name for the
-           file in the archive.
-        """
-        self._check("aw")
-
-        # When fileobj is given, replace name by
-        # fileobj's real name.
-        if fileobj is not None:
-            name = fileobj.name
-
-        # Building the name of the member in the archive.
-        # Backward slashes are converted to forward slashes,
-        # Absolute paths are turned to relative paths.
-        if arcname is None:
-            arcname = name
-        drv, arcname = os.path.splitdrive(arcname)
-        arcname = arcname.replace(os.sep, "/")
-        arcname = arcname.lstrip("/")
-
-        # Now, fill the TarInfo object with
-        # information specific for the file.
-        tarinfo = self.tarinfo()
-        tarinfo.tarfile = self
-
-        # Use os.stat or os.lstat, depending on platform
-        # and if symlinks shall be resolved.
-        if fileobj is None:
-            if hasattr(os, "lstat") and not self.dereference:
-                statres = os.lstat(name)
-            else:
-                statres = os.stat(name)
-        else:
-            statres = os.fstat(fileobj.fileno())
-        linkname = ""
-
-        stmd = statres.st_mode
-        if stat.S_ISREG(stmd):
-            inode = (statres.st_ino, statres.st_dev)
-            if not self.dereference and statres.st_nlink > 1 and \
-                    inode in self.inodes and arcname != self.inodes[inode]:
-                # Is it a hardlink to an already
-                # archived file?
-                type = LNKTYPE
-                linkname = self.inodes[inode]
-            else:
-                # The inode is added only if its valid.
-                # For win32 it is always 0.
-                type = REGTYPE
-                if inode[0]:
-                    self.inodes[inode] = arcname
-        elif stat.S_ISDIR(stmd):
-            type = DIRTYPE
-        elif stat.S_ISFIFO(stmd):
-            type = FIFOTYPE
-        elif stat.S_ISLNK(stmd):
-            type = SYMTYPE
-            linkname = os.readlink(name)
-        elif stat.S_ISCHR(stmd):
-            type = CHRTYPE
-        elif stat.S_ISBLK(stmd):
-            type = BLKTYPE
-        else:
-            return None
-
-        # Fill the TarInfo object with all
-        # information we can get.
-        tarinfo.name = arcname
-        tarinfo.mode = stmd
-        tarinfo.uid = statres.st_uid
-        tarinfo.gid = statres.st_gid
-        if type == REGTYPE:
-            tarinfo.size = statres.st_size
-        else:
-            tarinfo.size = 0L
-        tarinfo.mtime = statres.st_mtime
-        tarinfo.type = type
-        tarinfo.linkname = linkname
-        if pwd:
-            try:
-                tarinfo.uname = pwd.getpwuid(tarinfo.uid)[0]
-            except KeyError:
-                pass
-        if grp:
-            try:
-                tarinfo.gname = grp.getgrgid(tarinfo.gid)[0]
-            except KeyError:
-                pass
-
-        if type in (CHRTYPE, BLKTYPE):
-            if hasattr(os, "major") and hasattr(os, "minor"):
-                tarinfo.devmajor = os.major(statres.st_rdev)
-                tarinfo.devminor = os.minor(statres.st_rdev)
-        return tarinfo
-
-    def list(self, verbose=True):
-        """Print a table of contents to sys.stdout. If `verbose' is False, only
-           the names of the members are printed. If it is True, an `ls -l'-like
-           output is produced.
-        """
-        self._check()
-
-        for tarinfo in self:
-            if verbose:
-                print filemode(tarinfo.mode),
-                print "%s/%s" % (tarinfo.uname or tarinfo.uid,
-                                 tarinfo.gname or tarinfo.gid),
-                if tarinfo.ischr() or tarinfo.isblk():
-                    print "%10s" % ("%d,%d" \
-                                    % (tarinfo.devmajor, tarinfo.devminor)),
-                else:
-                    print "%10d" % tarinfo.size,
-                print "%d-%02d-%02d %02d:%02d:%02d" \
-                      % time.localtime(tarinfo.mtime)[:6],
-
-            if tarinfo.isdir():
-                print tarinfo.name + "/",
-            else:
-                print tarinfo.name,
-
-            if verbose:
-                if tarinfo.issym():
-                    print "->", tarinfo.linkname,
-                if tarinfo.islnk():
-                    print "link to", tarinfo.linkname,
-            print
-
-    def add(self, name, arcname=None, recursive=True, exclude=None, filter=None):
-        """Add the file `name' to the archive. `name' may be any type of file
-           (directory, fifo, symbolic link, etc.). If given, `arcname'
-           specifies an alternative name for the file in the archive.
-           Directories are added recursively by default. This can be avoided by
-           setting `recursive' to False. `exclude' is a function that should
-           return True for each filename to be excluded. `filter' is a function
-           that expects a TarInfo object argument and returns the changed
-           TarInfo object, if it returns None the TarInfo object will be
-           excluded from the archive.
-        """
-        self._check("aw")
-
-        if arcname is None:
-            arcname = name
-
-        # Exclude pathnames.
-        if exclude is not None:
-            import warnings
-            warnings.warn("use the filter argument instead",
-                    DeprecationWarning, 2)
-            if exclude(name):
-                self._dbg(2, "tarfile: Excluded %r" % name)
-                return
-
-        # Skip if somebody tries to archive the archive...
-        if self.name is not None and os.path.abspath(name) == self.name:
-            self._dbg(2, "tarfile: Skipped %r" % name)
-            return
-
-        self._dbg(1, name)
-
-        # Create a TarInfo object from the file.
-        tarinfo = self.gettarinfo(name, arcname)
-
-        if tarinfo is None:
-            self._dbg(1, "tarfile: Unsupported type %r" % name)
-            return
-
-        # Change or exclude the TarInfo object.
-        if filter is not None:
-            tarinfo = filter(tarinfo)
-            if tarinfo is None:
-                self._dbg(2, "tarfile: Excluded %r" % name)
-                return
-
-        # Append the tar header and data to the archive.
-        if tarinfo.isreg():
-            f = bltn_open(name, "rb")
-            self.addfile(tarinfo, f)
-            f.close()
-
-        elif tarinfo.isdir():
-            self.addfile(tarinfo)
-            if recursive:
-                for f in os.listdir(name):
-                    self.add(os.path.join(name, f), os.path.join(arcname, f),
-                            recursive, exclude, filter)
-
-        else:
-            self.addfile(tarinfo)
-
-    def addfile(self, tarinfo, fileobj=None):
-        """Add the TarInfo object `tarinfo' to the archive. If `fileobj' is
-           given, tarinfo.size bytes are read from it and added to the archive.
-           You can create TarInfo objects using gettarinfo().
-           On Windows platforms, `fileobj' should always be opened with mode
-           'rb' to avoid irritation about the file size.
-        """
-        self._check("aw")
-
-        tarinfo = copy.copy(tarinfo)
-
-        buf = tarinfo.tobuf(self.format, self.encoding, self.errors)
-        self.fileobj.write(buf)
-        self.offset += len(buf)
-
-        # If there's data to follow, append it.
-        if fileobj is not None:
-            copyfileobj(fileobj, self.fileobj, tarinfo.size)
-            blocks, remainder = divmod(tarinfo.size, BLOCKSIZE)
-            if remainder > 0:
-                self.fileobj.write(NUL * (BLOCKSIZE - remainder))
-                blocks += 1
-            self.offset += blocks * BLOCKSIZE
-
-        self.members.append(tarinfo)
-
-    def extractall(self, path=".", members=None):
-        """Extract all members from the archive to the current working
-           directory and set owner, modification time and permissions on
-           directories afterwards. `path' specifies a different directory
-           to extract to. `members' is optional and must be a subset of the
-           list returned by getmembers().
-        """
-        directories = []
-
-        if members is None:
-            members = self
-
-        for tarinfo in members:
-            if tarinfo.isdir():
-                # Extract directories with a safe mode.
-                directories.append(tarinfo)
-                tarinfo = copy.copy(tarinfo)
-                tarinfo.mode = 0700
-            self.extract(tarinfo, path)
-
-        # Reverse sort directories.
-        directories.sort(key=operator.attrgetter('name'))
-        directories.reverse()
-
-        # Set correct owner, mtime and filemode on directories.
-        for tarinfo in directories:
-            dirpath = os.path.join(path, tarinfo.name)
-            try:
-                self.chown(tarinfo, dirpath)
-                self.utime(tarinfo, dirpath)
-                self.chmod(tarinfo, dirpath)
-            except ExtractError, e:
-                if self.errorlevel > 1:
-                    raise
-                else:
-                    self._dbg(1, "tarfile: %s" % e)
-
-    def extract(self, member, path=""):
-        """Extract a member from the archive to the current working directory,
-           using its full name. Its file information is extracted as accurately
-           as possible. `member' may be a filename or a TarInfo object. You can
-           specify a different directory using `path'.
-        """
-        self._check("r")
-
-        if isinstance(member, basestring):
-            tarinfo = self.getmember(member)
-        else:
-            tarinfo = member
-
-        # Prepare the link target for makelink().
-        if tarinfo.islnk():
-            tarinfo._link_target = os.path.join(path, tarinfo.linkname)
-
-        try:
-            self._extract_member(tarinfo, os.path.join(path, tarinfo.name))
-        except EnvironmentError, e:
-            if self.errorlevel > 0:
-                raise
-            else:
-                if e.filename is None:
-                    self._dbg(1, "tarfile: %s" % e.strerror)
-                else:
-                    self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename))
-        except ExtractError, e:
-            if self.errorlevel > 1:
-                raise
-            else:
-                self._dbg(1, "tarfile: %s" % e)
-
-    def extractfile(self, member):
-        """Extract a member from the archive as a file object. `member' may be
-           a filename or a TarInfo object. If `member' is a regular file, a
-           file-like object is returned. If `member' is a link, a file-like
-           object is constructed from the link's target. If `member' is none of
-           the above, None is returned.
-           The file-like object is read-only and provides the following
-           methods: read(), readline(), readlines(), seek() and tell()
-        """
-        self._check("r")
-
-        if isinstance(member, basestring):
-            tarinfo = self.getmember(member)
-        else:
-            tarinfo = member
-
-        if tarinfo.isreg():
-            return self.fileobject(self, tarinfo)
-
-        elif tarinfo.type not in SUPPORTED_TYPES:
-            # If a member's type is unknown, it is treated as a
-            # regular file.
-            return self.fileobject(self, tarinfo)
-
-        elif tarinfo.islnk() or tarinfo.issym():
-            if isinstance(self.fileobj, _Stream):
-                # A small but ugly workaround for the case that someone tries
-                # to extract a (sym)link as a file-object from a non-seekable
-                # stream of tar blocks.
-                raise StreamError("cannot extract (sym)link as file object")
-            else:
-                # A (sym)link's file object is its target's file object.
-                return self.extractfile(self._find_link_target(tarinfo))
-        else:
-            # If there's no data associated with the member (directory, chrdev,
-            # blkdev, etc.), return None instead of a file object.
-            return None
-
-    def _extract_member(self, tarinfo, targetpath):
-        """Extract the TarInfo object tarinfo to a physical
-           file called targetpath.
-        """
-        # Fetch the TarInfo object for the given name
-        # and build the destination pathname, replacing
-        # forward slashes to platform specific separators.
-        targetpath = targetpath.rstrip("/")
-        targetpath = targetpath.replace("/", os.sep)
-
-        # Create all upper directories.
-        upperdirs = os.path.dirname(targetpath)
-        if upperdirs and not os.path.exists(upperdirs):
-            # Create directories that are not part of the archive with
-            # default permissions.
-            os.makedirs(upperdirs)
-
-        if tarinfo.islnk() or tarinfo.issym():
-            self._dbg(1, "%s -> %s" % (tarinfo.name, tarinfo.linkname))
-        else:
-            self._dbg(1, tarinfo.name)
-
-        if tarinfo.isreg():
-            self.makefile(tarinfo, targetpath)
-        elif tarinfo.isdir():
-            self.makedir(tarinfo, targetpath)
-        elif tarinfo.isfifo():
-            self.makefifo(tarinfo, targetpath)
-        elif tarinfo.ischr() or tarinfo.isblk():
-            self.makedev(tarinfo, targetpath)
-        elif tarinfo.islnk() or tarinfo.issym():
-            self.makelink(tarinfo, targetpath)
-        elif tarinfo.type not in SUPPORTED_TYPES:
-            self.makeunknown(tarinfo, targetpath)
-        else:
-            self.makefile(tarinfo, targetpath)
-
-        self.chown(tarinfo, targetpath)
-        if not tarinfo.issym():
-            self.chmod(tarinfo, targetpath)
-            self.utime(tarinfo, targetpath)
-
-    #--------------------------------------------------------------------------
-    # Below are the different file methods. They are called via
-    # _extract_member() when extract() is called. They can be replaced in a
-    # subclass to implement other functionality.
-
-    def makedir(self, tarinfo, targetpath):
-        """Make a directory called targetpath.
-        """
-        try:
-            # Use a safe mode for the directory, the real mode is set
-            # later in _extract_member().
-            os.mkdir(targetpath, 0700)
-        except EnvironmentError, e:
-            if e.errno != errno.EEXIST:
-                raise
-
-    def makefile(self, tarinfo, targetpath):
-        """Make a file called targetpath.
-        """
-        source = self.extractfile(tarinfo)
-        target = bltn_open(targetpath, "wb")
-        copyfileobj(source, target)
-        source.close()
-        target.close()
-
-    def makeunknown(self, tarinfo, targetpath):
-        """Make a file from a TarInfo object with an unknown type
-           at targetpath.
-        """
-        self.makefile(tarinfo, targetpath)
-        self._dbg(1, "tarfile: Unknown file type %r, " \
-                     "extracted as regular file." % tarinfo.type)
-
-    def makefifo(self, tarinfo, targetpath):
-        """Make a fifo called targetpath.
-        """
-        if hasattr(os, "mkfifo"):
-            os.mkfifo(targetpath)
-        else:
-            raise ExtractError("fifo not supported by system")
-
-    def makedev(self, tarinfo, targetpath):
-        """Make a character or block device called targetpath.
-        """
-        if not hasattr(os, "mknod") or not hasattr(os, "makedev"):
-            raise ExtractError("special devices not supported by system")
-
-        mode = tarinfo.mode
-        if tarinfo.isblk():
-            mode |= stat.S_IFBLK
-        else:
-            mode |= stat.S_IFCHR
-
-        os.mknod(targetpath, mode,
-                 os.makedev(tarinfo.devmajor, tarinfo.devminor))
-
-    def makelink(self, tarinfo, targetpath):
-        """Make a (symbolic) link called targetpath. If it cannot be created
-          (platform limitation), we try to make a copy of the referenced file
-          instead of a link.
-        """
-        if hasattr(os, "symlink") and hasattr(os, "link"):
-            # For systems that support symbolic and hard links.
-            if tarinfo.issym():
-                os.symlink(tarinfo.linkname, targetpath)
-            else:
-                # See extract().
-                if os.path.exists(tarinfo._link_target):
-                    os.link(tarinfo._link_target, targetpath)
-                else:
-                    self._extract_member(self._find_link_target(tarinfo), targetpath)
-        else:
-            try:
-                self._extract_member(self._find_link_target(tarinfo), targetpath)
-            except KeyError:
-                raise ExtractError("unable to resolve link inside archive")
-
-    def chown(self, tarinfo, targetpath):
-        """Set owner of targetpath according to tarinfo.
-        """
-        if pwd and hasattr(os, "geteuid") and os.geteuid() == 0:
-            # We have to be root to do so.
-            try:
-                g = grp.getgrnam(tarinfo.gname)[2]
-            except KeyError:
-                try:
-                    g = grp.getgrgid(tarinfo.gid)[2]
-                except KeyError:
-                    g = os.getgid()
-            try:
-                u = pwd.getpwnam(tarinfo.uname)[2]
-            except KeyError:
-                try:
-                    u = pwd.getpwuid(tarinfo.uid)[2]
-                except KeyError:
-                    u = os.getuid()
-            try:
-                if tarinfo.issym() and hasattr(os, "lchown"):
-                    os.lchown(targetpath, u, g)
-                else:
-                    if sys.platform != "os2emx":
-                        os.chown(targetpath, u, g)
-            except EnvironmentError, e:
-                raise ExtractError("could not change owner to %d:%d" % (u, g))
-
-    def chmod(self, tarinfo, targetpath):
-        """Set file permissions of targetpath according to tarinfo.
-        """
-        if hasattr(os, 'chmod'):
-            try:
-                os.chmod(targetpath, tarinfo.mode)
-            except EnvironmentError, e:
-                raise ExtractError("could not change mode")
-
-    def utime(self, tarinfo, targetpath):
-        """Set modification time of targetpath according to tarinfo.
-        """
-        if not hasattr(os, 'utime'):
-            return
-        try:
-            os.utime(targetpath, (tarinfo.mtime, tarinfo.mtime))
-        except EnvironmentError, e:
-            raise ExtractError("could not change modification time")
-
-    #--------------------------------------------------------------------------
-    def next(self):
-        """Return the next member of the archive as a TarInfo object, when
-           TarFile is opened for reading. Return None if there is no more
-           available.
-        """
-        self._check("ra")
-        if self.firstmember is not None:
-            m = self.firstmember
-            self.firstmember = None
-            return m
-
-        # Read the next block.
-        self.fileobj.seek(self.offset)
-        tarinfo = None
-        while True:
-            try:
-                tarinfo = self.tarinfo.fromtarfile(self)
-            except EOFHeaderError, e:
-                if self.ignore_zeros:
-                    self._dbg(2, "0x%X: %s" % (self.offset, e))
-                    self.offset += BLOCKSIZE
-                    continue
-            except InvalidHeaderError, e:
-                if self.ignore_zeros:
-                    self._dbg(2, "0x%X: %s" % (self.offset, e))
-                    self.offset += BLOCKSIZE
-                    continue
-                elif self.offset == 0:
-                    raise ReadError(str(e))
-            except EmptyHeaderError:
-                if self.offset == 0:
-                    raise ReadError("empty file")
-            except TruncatedHeaderError, e:
-                if self.offset == 0:
-                    raise ReadError(str(e))
-            except SubsequentHeaderError, e:
-                raise ReadError(str(e))
-            break
-
-        if tarinfo is not None:
-            self.members.append(tarinfo)
-        else:
-            self._loaded = True
-
-        return tarinfo
-
-    #--------------------------------------------------------------------------
-    # Little helper methods:
-
-    def _getmember(self, name, tarinfo=None, normalize=False):
-        """Find an archive member by name from bottom to top.
-           If tarinfo is given, it is used as the starting point.
-        """
-        # Ensure that all members have been loaded.
-        members = self.getmembers()
-
-        # Limit the member search list up to tarinfo.
-        if tarinfo is not None:
-            members = members[:members.index(tarinfo)]
-
-        if normalize:
-            name = os.path.normpath(name)
-
-        for member in reversed(members):
-            if normalize:
-                member_name = os.path.normpath(member.name)
-            else:
-                member_name = member.name
-
-            if name == member_name:
-                return member
-
-    def _load(self):
-        """Read through the entire archive file and look for readable
-           members.
-        """
-        while True:
-            tarinfo = self.next()
-            if tarinfo is None:
-                break
-        self._loaded = True
-
-    def _check(self, mode=None):
-        """Check if TarFile is still open, and if the operation's mode
-           corresponds to TarFile's mode.
-        """
-        if self.closed:
-            raise IOError("%s is closed" % self.__class__.__name__)
-        if mode is not None and self.mode not in mode:
-            raise IOError("bad operation for mode %r" % self.mode)
-
-    def _find_link_target(self, tarinfo):
-        """Find the target member of a symlink or hardlink member in the
-           archive.
-        """
-        if tarinfo.issym():
-            # Always search the entire archive.
-            linkname = os.path.dirname(tarinfo.name) + "/" + tarinfo.linkname
-            limit = None
-        else:
-            # Search the archive before the link, because a hard link is
-            # just a reference to an already archived file.
-            linkname = tarinfo.linkname
-            limit = tarinfo
-
-        member = self._getmember(linkname, tarinfo=limit, normalize=True)
-        if member is None:
-            raise KeyError("linkname %r not found" % linkname)
-        return member
-
-    def __iter__(self):
-        """Provide an iterator object.
-        """
-        if self._loaded:
-            return iter(self.members)
-        else:
-            return TarIter(self)
-
-    def _dbg(self, level, msg):
-        """Write debugging output to sys.stderr.
-        """
-        if level <= self.debug:
-            print >> sys.stderr, msg
-
-    def __enter__(self):
-        self._check()
-        return self
-
-    def __exit__(self, type, value, traceback):
-        if type is None:
-            self.close()
-        else:
-            # An exception occurred. We must not call close() because
-            # it would try to write end-of-archive blocks and padding.
-            if not self._extfileobj:
-                self.fileobj.close()
-            self.closed = True
-# class TarFile
-
-class TarIter:
-    """Iterator Class.
-
-       for tarinfo in TarFile(...):
-           suite...
-    """
-
-    def __init__(self, tarfile):
-        """Construct a TarIter object.
-        """
-        self.tarfile = tarfile
-        self.index = 0
-    def __iter__(self):
-        """Return iterator object.
-        """
-        return self
-    def next(self):
-        """Return the next item using TarFile's next() method.
-           When all members have been read, set TarFile as _loaded.
-        """
-        # Fix for SF #1100429: Under rare circumstances it can
-        # happen that getmembers() is called during iteration,
-        # which will cause TarIter to stop prematurely.
-        if not self.tarfile._loaded:
-            tarinfo = self.tarfile.next()
-            if not tarinfo:
-                self.tarfile._loaded = True
-                raise StopIteration
-        else:
-            try:
-                tarinfo = self.tarfile.members[self.index]
-            except IndexError:
-                raise StopIteration
-        self.index += 1
-        return tarinfo
-
-# Helper classes for sparse file support
-class _section:
-    """Base class for _data and _hole.
-    """
-    def __init__(self, offset, size):
-        self.offset = offset
-        self.size = size
-    def __contains__(self, offset):
-        return self.offset <= offset < self.offset + self.size
-
-class _data(_section):
-    """Represent a data section in a sparse file.
-    """
-    def __init__(self, offset, size, realpos):
-        _section.__init__(self, offset, size)
-        self.realpos = realpos
-
-class _hole(_section):
-    """Represent a hole section in a sparse file.
-    """
-    pass
-
-class _ringbuffer(list):
-    """Ringbuffer class which increases performance
-       over a regular list.
-    """
-    def __init__(self):
-        self.idx = 0
-    def find(self, offset):
-        idx = self.idx
-        while True:
-            item = self[idx]
-            if offset in item:
-                break
-            idx += 1
-            if idx == len(self):
-                idx = 0
-            if idx == self.idx:
-                # End of File
-                return None
-        self.idx = idx
-        return item
-
-#---------------------------------------------
-# zipfile compatible TarFile class
-#---------------------------------------------
-TAR_PLAIN = 0           # zipfile.ZIP_STORED
-TAR_GZIPPED = 8         # zipfile.ZIP_DEFLATED
-class TarFileCompat:
-    """TarFile class compatible with standard module zipfile's
-       ZipFile class.
-    """
-    def __init__(self, file, mode="r", compression=TAR_PLAIN):
-        from warnings import warnpy3k
-        warnpy3k("the TarFileCompat class has been removed in Python 3.0",
-                stacklevel=2)
-        if compression == TAR_PLAIN:
-            self.tarfile = TarFile.taropen(file, mode)
-        elif compression == TAR_GZIPPED:
-            self.tarfile = TarFile.gzopen(file, mode)
-        else:
-            raise ValueError("unknown compression constant")
-        if mode[0:1] == "r":
-            members = self.tarfile.getmembers()
-            for m in members:
-                m.filename = m.name
-                m.file_size = m.size
-                m.date_time = time.gmtime(m.mtime)[:6]
-    def namelist(self):
-        return map(lambda m: m.name, self.infolist())
-    def infolist(self):
-        return filter(lambda m: m.type in REGULAR_TYPES,
-                      self.tarfile.getmembers())
-    def printdir(self):
-        self.tarfile.list()
-    def testzip(self):
-        return
-    def getinfo(self, name):
-        return self.tarfile.getmember(name)
-    def read(self, name):
-        return self.tarfile.extractfile(self.tarfile.getmember(name)).read()
-    def write(self, filename, arcname=None, compress_type=None):
-        self.tarfile.add(filename, arcname)
-    def writestr(self, zinfo, bytes):
-        try:
-            from cStringIO import StringIO
-        except ImportError:
-            from StringIO import StringIO
-        import calendar
-        tinfo = TarInfo(zinfo.filename)
-        tinfo.size = len(bytes)
-        tinfo.mtime = calendar.timegm(zinfo.date_time)
-        self.tarfile.addfile(tinfo, StringIO(bytes))
-    def close(self):
-        self.tarfile.close()
-#class TarFileCompat
-
-#--------------------
-# exported functions
-#--------------------
-def is_tarfile(name):
-    """Return True if name points to a tar archive that we
-       are able to handle, else return False.
-    """
-    try:
-        t = open(name)
-        t.close()
-        return True
-    except TarError:
-        return False
-
-bltn_open = open
-open = TarFile.open

=== removed file 'duplicity/urlparse_2_5.py'
--- duplicity/urlparse_2_5.py	2011-10-08 16:22:30 +0000
+++ duplicity/urlparse_2_5.py	1970-01-01 00:00:00 +0000
@@ -1,385 +0,0 @@
-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
-
-"""Parse (absolute and relative) URLs.
-
-See RFC 1808: "Relative Uniform Resource Locators", by R. Fielding,
-UC Irvine, June 1995.
-"""
-
-__all__ = ["urlparse", "urlunparse", "urljoin", "urldefrag",
-           "urlsplit", "urlunsplit"]
-
-# A classification of schemes ('' means apply by default)
-uses_relative = ['ftp', 'ftps', 'http', 'gopher', 'nntp',
-                 'wais', 'file', 'https', 'shttp', 'mms',
-                 'prospero', 'rtsp', 'rtspu', '', 'sftp', 'imap', 'imaps']
-uses_netloc = ['ftp', 'ftps', 'http', 'gopher', 'nntp', 'telnet',
-               'wais', 'file', 'mms', 'https', 'shttp',
-               'snews', 'prospero', 'rtsp', 'rtspu', 'rsync', '',
-               'svn', 'svn+ssh', 'sftp', 'imap', 'imaps']
-non_hierarchical = ['gopher', 'hdl', 'mailto', 'news',
-                    'telnet', 'wais', 'snews', 'sip', 'sips', 'imap', 'imaps']
-uses_params = ['ftp', 'ftps', 'hdl', 'prospero', 'http',
-               'https', 'shttp', 'rtsp', 'rtspu', 'sip', 'sips',
-               'mms', '', 'sftp', 'imap', 'imaps']
-uses_query = ['http', 'wais', 'https', 'shttp', 'mms',
-              'gopher', 'rtsp', 'rtspu', 'sip', 'sips', 'imap', 'imaps', '']
-uses_fragment = ['ftp', 'ftps', 'hdl', 'http', 'gopher', 'news',
-                 'nntp', 'wais', 'https', 'shttp', 'snews',
-                 'file', 'prospero', '']
-
-# Characters valid in scheme names
-scheme_chars = ('abcdefghijklmnopqrstuvwxyz'
-                'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
-                '0123456789'
-                '+-.')
-
-MAX_CACHE_SIZE = 20
-_parse_cache = {}
-
-def clear_cache():
-    """Clear the parse cache."""
-    global _parse_cache
-    _parse_cache = {}
-
-import string
-def _rsplit(str, delim, numsplit):
-    parts = string.split(str, delim)
-    if len(parts) <= numsplit + 1:
-        return parts
-    else:
-        left = string.join(parts[0:-numsplit], delim)
-        right = string.join(parts[len(parts)-numsplit:], delim)
-        return [left, right]
-
-class BaseResult(tuple):
-    """Base class for the parsed result objects.
-
-    This provides the attributes shared by the two derived result
-    objects as read-only properties.  The derived classes are
-    responsible for checking the right number of arguments were
-    supplied to the constructor.
-
-    """
-
-    __slots__ = ()
-
-    # Attributes that access the basic components of the URL:
-
-    def get_scheme(self):
-        return self[0]
-    scheme = property(get_scheme)
-
-    def get_netloc(self):
-        return self[1]
-    netloc = property(get_netloc)
-
-    def get_path(self):
-        return self[2]
-    path = property(get_path)
-
-    def get_query(self):
-        return self[-2]
-    query = property(get_query)
-
-    def get_fragment(self):
-        return self[-1]
-    fragment = property(get_fragment)
-
-    # Additional attributes that provide access to parsed-out portions
-    # of the netloc:
-
-    def get_username(self):
-        netloc = self.netloc
-        if "@" in netloc:
-            userinfo = _rsplit(netloc, "@", 1)[0]
-            if ":" in userinfo:
-                userinfo = userinfo.split(":", 1)[0]
-            return userinfo
-        return None
-    username = property(get_username)
-
-    def get_password(self):
-        netloc = self.netloc
-        if "@" in netloc:
-            userinfo = _rsplit(netloc, "@", 1)[0]
-            if ":" in userinfo:
-                return userinfo.split(":", 1)[1]
-        return None
-    password = property(get_password)
-
-    def get_hostname(self):
-        netloc = self.netloc.split('@')[-1]
-        if '[' in netloc and ']' in netloc:
-            return netloc.split(']')[0][1:].lower()
-        elif ':' in netloc:
-            return netloc.split(':')[0].lower()
-        elif netloc == '':
-            return None
-        else:
-            return netloc.lower()
-    hostname = property(get_hostname)
-
-    def get_port(self):
-        netloc = self.netloc.split('@')[-1].split(']')[-1]
-        if ":" in netloc:
-            port = netloc.split(":", 1)[1]
-            return int(port, 10)
-        return None
-    port = property(get_port)
-
-
-class SplitResult(BaseResult):
-
-    __slots__ = ()
-
-    def __new__(cls, scheme, netloc, path, query, fragment):
-        return BaseResult.__new__(
-            cls, (scheme, netloc, path, query, fragment))
-
-    def geturl(self):
-        return urlunsplit(self)
-
-
-class ParseResult(BaseResult):
-
-    __slots__ = ()
-
-    def __new__(cls, scheme, netloc, path, params, query, fragment):
-        return BaseResult.__new__(
-            cls, (scheme, netloc, path, params, query, fragment))
-
-    def get_params(self):
-        return self[3]
-    params = property(get_params)
-
-    def geturl(self):
-        return urlunparse(self)
-
-
-def urlparse(url, scheme='', allow_fragments=True):
-    """Parse a URL into 6 components:
-    <scheme>://<netloc>/<path>;<params>?<query>#<fragment>
-    Return a 6-tuple: (scheme, netloc, path, params, query, fragment).
-    Note that we don't break the components up in smaller bits
-    (e.g. netloc is a single string) and we don't expand % escapes."""
-    tuple = urlsplit(url, scheme, allow_fragments)
-    scheme, netloc, url, query, fragment = tuple
-    if scheme in uses_params and ';' in url:
-        url, params = _splitparams(url)
-    else:
-        params = ''
-    return ParseResult(scheme, netloc, url, params, query, fragment)
-
-def _splitparams(url):
-    if '/'  in url:
-        i = url.find(';', url.rfind('/'))
-        if i < 0:
-            return url, ''
-    else:
-        i = url.find(';')
-    return url[:i], url[i+1:]
-
-def _splitnetloc(url, start=0):
-    for c in '/?#': # the order is important!
-        delim = url.find(c, start)
-        if delim >= 0:
-            break
-    else:
-        delim = len(url)
-    return url[start:delim], url[delim:]
-
-def urlsplit(url, scheme='', allow_fragments=True):
-    """Parse a URL into 5 components:
-    <scheme>://<netloc>/<path>?<query>#<fragment>
-    Return a 5-tuple: (scheme, netloc, path, query, fragment).
-    Note that we don't break the components up in smaller bits
-    (e.g. netloc is a single string) and we don't expand % escapes."""
-    allow_fragments = bool(allow_fragments)
-    key = url, scheme, allow_fragments
-    cached = _parse_cache.get(key, None)
-    if cached:
-        return cached
-    if len(_parse_cache) >= MAX_CACHE_SIZE: # avoid runaway growth
-        clear_cache()
-    netloc = query = fragment = ''
-    i = url.find(':')
-    if i > 0:
-        if url[:i] == 'http': # optimize the common case
-            scheme = url[:i].lower()
-            url = url[i+1:]
-            if url[:2] == '//':
-                netloc, url = _splitnetloc(url, 2)
-            if allow_fragments and '#' in url:
-                url, fragment = url.split('#', 1)
-            if '?' in url:
-                url, query = url.split('?', 1)
-            v = SplitResult(scheme, netloc, url, query, fragment)
-            _parse_cache[key] = v
-            return v
-        for c in url[:i]:
-            if c not in scheme_chars:
-                break
-        else:
-            scheme, url = url[:i].lower(), url[i+1:]
-    if scheme in uses_netloc and url[:2] == '//':
-        netloc, url = _splitnetloc(url, 2)
-    if allow_fragments and scheme in uses_fragment and '#' in url:
-        url, fragment = url.split('#', 1)
-    if scheme in uses_query and '?' in url:
-        url, query = url.split('?', 1)
-    v = SplitResult(scheme, netloc, url, query, fragment)
-    _parse_cache[key] = v
-    return v
-
-def urlunparse((scheme, netloc, url, params, query, fragment)):
-    """Put a parsed URL back together again.  This may result in a
-    slightly different, but equivalent URL, if the URL that was parsed
-    originally had redundant delimiters, e.g. a ? with an empty query
-    (the draft states that these are equivalent)."""
-    if params:
-        url = "%s;%s" % (url, params)
-    return urlunsplit((scheme, netloc, url, query, fragment))
-
-def urlunsplit((scheme, netloc, url, query, fragment)):
-    if netloc or (scheme and scheme in uses_netloc and url[:2] != '//'):
-        if url and url[:1] != '/': url = '/' + url
-        url = '//' + (netloc or '') + url
-    if scheme:
-        url = scheme + ':' + url
-    if query:
-        url = url + '?' + query
-    if fragment:
-        url = url + '#' + fragment
-    return url
-
-def urljoin(base, url, allow_fragments=True):
-    """Join a base URL and a possibly relative URL to form an absolute
-    interpretation of the latter."""
-    if not base:
-        return url
-    if not url:
-        return base
-    bscheme, bnetloc, bpath, bparams, bquery, bfragment = urlparse(base, '', allow_fragments) #@UnusedVariable
-    scheme, netloc, path, params, query, fragment = urlparse(url, bscheme, allow_fragments)
-    if scheme != bscheme or scheme not in uses_relative:
-        return url
-    if scheme in uses_netloc:
-        if netloc:
-            return urlunparse((scheme, netloc, path,
-                               params, query, fragment))
-        netloc = bnetloc
-    if path[:1] == '/':
-        return urlunparse((scheme, netloc, path,
-                           params, query, fragment))
-    if not (path or params or query):
-        return urlunparse((scheme, netloc, bpath,
-                           bparams, bquery, fragment))
-    segments = bpath.split('/')[:-1] + path.split('/')
-    # XXX The stuff below is bogus in various ways...
-    if segments[-1] == '.':
-        segments[-1] = ''
-    while '.' in segments:
-        segments.remove('.')
-    while 1:
-        i = 1
-        n = len(segments) - 1
-        while i < n:
-            if (segments[i] == '..'
-                and segments[i-1] not in ('', '..')):
-                del segments[i-1:i+1]
-                break
-            i = i+1
-        else:
-            break
-    if segments == ['', '..']:
-        segments[-1] = ''
-    elif len(segments) >= 2 and segments[-1] == '..':
-        segments[-2:] = ['']
-    return urlunparse((scheme, netloc, '/'.join(segments),
-                       params, query, fragment))
-
-def urldefrag(url):
-    """Removes any existing fragment from URL.
-
-    Returns a tuple of the defragmented URL and the fragment.  If
-    the URL contained no fragments, the second element is the
-    empty string.
-    """
-    if '#' in url:
-        s, n, p, a, q, frag = urlparse(url)
-        defrag = urlunparse((s, n, p, a, q, ''))
-        return defrag, frag
-    else:
-        return url, ''
-
-
-test_input = """
-      http://a/b/c/d
-
-      g:h        = <URL:g:h>
-      http:g     = <URL:http://a/b/c/g>
-      http:      = <URL:http://a/b/c/d>
-      g          = <URL:http://a/b/c/g>
-      ./g        = <URL:http://a/b/c/g>
-      g/         = <URL:http://a/b/c/g/>
-      /g         = <URL:http://a/g>
-      //g        = <URL:http://g>
-      ?y         = <URL:http://a/b/c/d?y>
-      g?y        = <URL:http://a/b/c/g?y>
-      g?y/./x    = <URL:http://a/b/c/g?y/./x>
-      .          = <URL:http://a/b/c/>
-      ./         = <URL:http://a/b/c/>
-      ..         = <URL:http://a/b/>
-      ../        = <URL:http://a/b/>
-      ../g       = <URL:http://a/b/g>
-      ../..      = <URL:http://a/>
-      ../../g    = <URL:http://a/g>
-      ../../../g = <URL:http://a/../g>
-      ./../g     = <URL:http://a/b/g>
-      ./g/.      = <URL:http://a/b/c/g/>
-      /./g       = <URL:http://a/./g>
-      g/./h      = <URL:http://a/b/c/g/h>
-      g/../h     = <URL:http://a/b/c/h>
-      http:g     = <URL:http://a/b/c/g>
-      http:      = <URL:http://a/b/c/d>
-      http:?y         = <URL:http://a/b/c/d?y>
-      http:g?y        = <URL:http://a/b/c/g?y>
-      http:g?y/./x    = <URL:http://a/b/c/g?y/./x>
-"""
-
-def test():
-    import sys
-    base = ''
-    if sys.argv[1:]:
-        fn = sys.argv[1]
-        if fn == '-':
-            fp = sys.stdin
-        else:
-            fp = open(fn)
-    else:
-        try:
-            from cStringIO import StringIO
-        except ImportError:
-            from StringIO import StringIO
-        fp = StringIO(test_input)
-    while 1:
-        line = fp.readline()
-        if not line: break
-        words = line.split()
-        if not words:
-            continue
-        url = words[0]
-        parts = urlparse(url)
-        print '%-10s : %s' % (url, parts)
-        abs = urljoin(base, url)
-        if not base:
-            base = abs
-        wrapped = '<URL:%s>' % abs
-        print '%-10s = %s' % (url, wrapped)
-        if len(words) == 3 and words[1] == '=':
-            if wrapped != words[2]:
-                print 'EXPECTED', words[2], '!!!!!!!!!!'
-
-if __name__ == '__main__':
-    test()

=== modified file 'po/POTFILES.in'
--- po/POTFILES.in	2014-01-24 14:44:45 +0000
+++ po/POTFILES.in	2014-04-16 20:51:42 +0000
@@ -7,7 +7,6 @@
 duplicity/selection.py
 duplicity/globals.py
 duplicity/commandline.py
-duplicity/urlparse_2_5.py
 duplicity/dup_temp.py
 duplicity/backend.py
 duplicity/asyncscheduler.py

=== modified file 'po/duplicity.pot'
--- po/duplicity.pot	2014-01-24 14:44:45 +0000
+++ po/duplicity.pot	2014-04-16 20:51:42 +0000
@@ -8,7 +8,7 @@
 msgstr ""
 "Project-Id-Version: PACKAGE VERSION\n"
 "Report-Msgid-Bugs-To: Kenneth Loafman <kenneth@xxxxxxxxxxx>\n"
-"POT-Creation-Date: 2014-01-24 06:47-0600\n"
+"POT-Creation-Date: 2014-04-16 16:34-0400\n"
 "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
 "Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
 "Language-Team: LANGUAGE <LL@xxxxxx>\n"
@@ -118,194 +118,194 @@
 msgid "Processed volume %d of %d"
 msgstr ""
 
-#: ../bin/duplicity:756
+#: ../bin/duplicity:765
 #, python-format
 msgid "Invalid data - %s hash mismatch for file:"
 msgstr ""
 
-#: ../bin/duplicity:758
+#: ../bin/duplicity:767
 #, python-format
 msgid "Calculated hash: %s"
 msgstr ""
 
-#: ../bin/duplicity:759
+#: ../bin/duplicity:768
 #, python-format
 msgid "Manifest hash: %s"
 msgstr ""
 
-#: ../bin/duplicity:797
+#: ../bin/duplicity:806
 #, python-format
 msgid "Volume was signed by key %s, not %s"
 msgstr ""
 
-#: ../bin/duplicity:827
+#: ../bin/duplicity:836
 #, python-format
 msgid "Verify complete: %s, %s."
 msgstr ""
 
-#: ../bin/duplicity:828
+#: ../bin/duplicity:837
 #, python-format
 msgid "%d file compared"
 msgid_plural "%d files compared"
 msgstr[0] ""
 msgstr[1] ""
 
-#: ../bin/duplicity:830
+#: ../bin/duplicity:839
 #, python-format
 msgid "%d difference found"
 msgid_plural "%d differences found"
 msgstr[0] ""
 msgstr[1] ""
 
-#: ../bin/duplicity:849
+#: ../bin/duplicity:858
 msgid "No extraneous files found, nothing deleted in cleanup."
 msgstr ""
 
-#: ../bin/duplicity:854
+#: ../bin/duplicity:863
 msgid "Deleting this file from backend:"
 msgid_plural "Deleting these files from backend:"
 msgstr[0] ""
 msgstr[1] ""
 
-#: ../bin/duplicity:866
+#: ../bin/duplicity:875
 msgid "Found the following file to delete:"
 msgid_plural "Found the following files to delete:"
 msgstr[0] ""
 msgstr[1] ""
 
-#: ../bin/duplicity:870
+#: ../bin/duplicity:879
 msgid "Run duplicity again with the --force option to actually delete."
 msgstr ""
 
-#: ../bin/duplicity:913
+#: ../bin/duplicity:922
 msgid "There are backup set(s) at time(s):"
 msgstr ""
 
-#: ../bin/duplicity:915
+#: ../bin/duplicity:924
 msgid "Which can't be deleted because newer sets depend on them."
 msgstr ""
 
-#: ../bin/duplicity:919
+#: ../bin/duplicity:928
 msgid ""
 "Current active backup chain is older than specified time.  However, it will "
 "not be deleted.  To remove all your backups, manually purge the repository."
 msgstr ""
 
-#: ../bin/duplicity:925
+#: ../bin/duplicity:934
 msgid "No old backup sets found, nothing deleted."
 msgstr ""
 
-#: ../bin/duplicity:928
+#: ../bin/duplicity:937
 msgid "Deleting backup chain at time:"
 msgid_plural "Deleting backup chains at times:"
 msgstr[0] ""
 msgstr[1] ""
 
-#: ../bin/duplicity:939
+#: ../bin/duplicity:948
 #, python-format
 msgid "Deleting incremental signature chain %s"
 msgstr ""
 
-#: ../bin/duplicity:941
+#: ../bin/duplicity:950
 #, python-format
 msgid "Deleting incremental backup chain %s"
 msgstr ""
 
-#: ../bin/duplicity:944
+#: ../bin/duplicity:953
 #, python-format
 msgid "Deleting complete signature chain %s"
 msgstr ""
 
-#: ../bin/duplicity:946
+#: ../bin/duplicity:955
 #, python-format
 msgid "Deleting complete backup chain %s"
 msgstr ""
 
-#: ../bin/duplicity:952
+#: ../bin/duplicity:961
 msgid "Found old backup chain at the following time:"
 msgid_plural "Found old backup chains at the following times:"
 msgstr[0] ""
 msgstr[1] ""
 
-#: ../bin/duplicity:956
+#: ../bin/duplicity:965
 msgid "Rerun command with --force option to actually delete."
 msgstr ""
 
-#: ../bin/duplicity:1033
+#: ../bin/duplicity:1042
 #, python-format
 msgid "Deleting local %s (not authoritative at backend)."
 msgstr ""
 
-#: ../bin/duplicity:1037
+#: ../bin/duplicity:1046
 #, python-format
 msgid "Unable to delete %s: %s"
 msgstr ""
 
-#: ../bin/duplicity:1065 ../duplicity/dup_temp.py:263
+#: ../bin/duplicity:1074 ../duplicity/dup_temp.py:263
 #, python-format
 msgid "Failed to read %s: %s"
 msgstr ""
 
-#: ../bin/duplicity:1079
+#: ../bin/duplicity:1088
 #, python-format
 msgid "Copying %s to local cache."
 msgstr ""
 
-#: ../bin/duplicity:1127
+#: ../bin/duplicity:1136
 msgid "Local and Remote metadata are synchronized, no sync needed."
 msgstr ""
 
-#: ../bin/duplicity:1132
+#: ../bin/duplicity:1141
 msgid "Synchronizing remote metadata to local cache..."
 msgstr ""
 
-#: ../bin/duplicity:1145
+#: ../bin/duplicity:1156
 msgid "Sync would copy the following from remote to local:"
 msgstr ""
 
-#: ../bin/duplicity:1148
+#: ../bin/duplicity:1159
 msgid "Sync would remove the following spurious local files:"
 msgstr ""
 
-#: ../bin/duplicity:1191
+#: ../bin/duplicity:1202
 msgid "Unable to get free space on temp."
 msgstr ""
 
-#: ../bin/duplicity:1199
+#: ../bin/duplicity:1210
 #, python-format
 msgid "Temp space has %d available, backup needs approx %d."
 msgstr ""
 
-#: ../bin/duplicity:1202
+#: ../bin/duplicity:1213
 #, python-format
 msgid "Temp has %d available, backup will use approx %d."
 msgstr ""
 
-#: ../bin/duplicity:1210
+#: ../bin/duplicity:1221
 msgid "Unable to get max open files."
 msgstr ""
 
-#: ../bin/duplicity:1214
+#: ../bin/duplicity:1225
 #, python-format
 msgid ""
 "Max open files of %s is too low, should be >= 1024.\n"
 "Use 'ulimit -n 1024' or higher to correct.\n"
 msgstr ""
 
-#: ../bin/duplicity:1263
+#: ../bin/duplicity:1274
 msgid ""
 "RESTART: The first volume failed to upload before termination.\n"
 "         Restart is impossible...starting backup from beginning."
 msgstr ""
 
-#: ../bin/duplicity:1269
+#: ../bin/duplicity:1280
 #, python-format
 msgid ""
 "RESTART: Volumes %d to %d failed to upload before termination.\n"
 "         Restarting backup at volume %d."
 msgstr ""
 
-#: ../bin/duplicity:1276
+#: ../bin/duplicity:1287
 #, python-format
 msgid ""
 "RESTART: Impossible backup state: manifest has %d vols, remote has %d vols.\n"
@@ -314,7 +314,7 @@
 "         backup then restart the backup from the beginning."
 msgstr ""
 
-#: ../bin/duplicity:1298
+#: ../bin/duplicity:1309
 msgid ""
 "\n"
 "PYTHONOPTIMIZE in the environment causes duplicity to fail to\n"
@@ -324,59 +324,59 @@
 "See https://bugs.launchpad.net/duplicity/+bug/931175\n";
 msgstr ""
 
-#: ../bin/duplicity:1388
+#: ../bin/duplicity:1400
 #, python-format
 msgid "Last %s backup left a partial set, restarting."
 msgstr ""
 
-#: ../bin/duplicity:1392
+#: ../bin/duplicity:1404
 #, python-format
 msgid "Cleaning up previous partial %s backup set, restarting."
 msgstr ""
 
-#: ../bin/duplicity:1403
+#: ../bin/duplicity:1415
 msgid "Last full backup date:"
 msgstr ""
 
-#: ../bin/duplicity:1405
+#: ../bin/duplicity:1417
 msgid "Last full backup date: none"
 msgstr ""
 
-#: ../bin/duplicity:1407
+#: ../bin/duplicity:1419
 msgid "Last full backup is too old, forcing full backup"
 msgstr ""
 
-#: ../bin/duplicity:1450
+#: ../bin/duplicity:1462
 msgid ""
 "When using symmetric encryption, the signing passphrase must equal the "
 "encryption passphrase."
 msgstr ""
 
-#: ../bin/duplicity:1503
+#: ../bin/duplicity:1515
 msgid "INT intercepted...exiting."
 msgstr ""
 
-#: ../bin/duplicity:1511
+#: ../bin/duplicity:1523
 #, python-format
 msgid "GPG error detail: %s"
 msgstr ""
 
-#: ../bin/duplicity:1521
+#: ../bin/duplicity:1533
 #, python-format
 msgid "User error detail: %s"
 msgstr ""
 
-#: ../bin/duplicity:1531
+#: ../bin/duplicity:1543
 #, python-format
 msgid "Backend error detail: %s"
 msgstr ""
 
-#: ../bin/rdiffdir:59 ../duplicity/commandline.py:237
+#: ../bin/rdiffdir:56 ../duplicity/commandline.py:237
 #, python-format
 msgid "Error opening file %s"
 msgstr ""
 
-#: ../bin/rdiffdir:122
+#: ../bin/rdiffdir:119
 #, python-format
 msgid "File %s already exists, will not overwrite."
 msgstr ""
@@ -493,8 +493,8 @@
 #. Used in usage help to represent a Unix-style path name. Example:
 #. --archive-dir <path>
 #: ../duplicity/commandline.py:258 ../duplicity/commandline.py:268
-#: ../duplicity/commandline.py:285 ../duplicity/commandline.py:342
-#: ../duplicity/commandline.py:530 ../duplicity/commandline.py:746
+#: ../duplicity/commandline.py:285 ../duplicity/commandline.py:351
+#: ../duplicity/commandline.py:548 ../duplicity/commandline.py:764
 msgid "path"
 msgstr ""
 
@@ -505,8 +505,8 @@
 #. Used in usage help to represent an ID for a GnuPG key. Example:
 #. --encrypt-key <gpg_key_id>
 #: ../duplicity/commandline.py:280 ../duplicity/commandline.py:287
-#: ../duplicity/commandline.py:362 ../duplicity/commandline.py:511
-#: ../duplicity/commandline.py:719
+#: ../duplicity/commandline.py:371 ../duplicity/commandline.py:529
+#: ../duplicity/commandline.py:737
 msgid "gpg-key-id"
 msgstr ""
 
@@ -514,42 +514,42 @@
 #. matching one or more files, as described in the documentation.
 #. Example:
 #. --exclude <shell_pattern>
-#: ../duplicity/commandline.py:295 ../duplicity/commandline.py:388
-#: ../duplicity/commandline.py:769
+#: ../duplicity/commandline.py:295 ../duplicity/commandline.py:397
+#: ../duplicity/commandline.py:787
 msgid "shell_pattern"
 msgstr ""
 
 #. Used in usage help to represent the name of a file. Example:
 #. --log-file <filename>
 #: ../duplicity/commandline.py:301 ../duplicity/commandline.py:308
-#: ../duplicity/commandline.py:313 ../duplicity/commandline.py:390
-#: ../duplicity/commandline.py:395 ../duplicity/commandline.py:406
-#: ../duplicity/commandline.py:715
+#: ../duplicity/commandline.py:313 ../duplicity/commandline.py:399
+#: ../duplicity/commandline.py:404 ../duplicity/commandline.py:415
+#: ../duplicity/commandline.py:733
 msgid "filename"
 msgstr ""
 
 #. Used in usage help to represent a regular expression (regexp).
-#: ../duplicity/commandline.py:320 ../duplicity/commandline.py:397
+#: ../duplicity/commandline.py:320 ../duplicity/commandline.py:406
 msgid "regular_expression"
 msgstr ""
 
 #. Used in usage help to represent a time spec for a previous
 #. point in time, as described in the documentation. Example:
 #. duplicity remove-older-than time [options] target_url
-#: ../duplicity/commandline.py:354 ../duplicity/commandline.py:462
-#: ../duplicity/commandline.py:801
+#: ../duplicity/commandline.py:363 ../duplicity/commandline.py:474
+#: ../duplicity/commandline.py:819
 msgid "time"
 msgstr ""
 
 #. Used in usage help. (Should be consistent with the "Options:"
 #. header.) Example:
 #. duplicity [full|incremental] [options] source_dir target_url
-#: ../duplicity/commandline.py:358 ../duplicity/commandline.py:465
-#: ../duplicity/commandline.py:522 ../duplicity/commandline.py:734
+#: ../duplicity/commandline.py:367 ../duplicity/commandline.py:477
+#: ../duplicity/commandline.py:540 ../duplicity/commandline.py:752
 msgid "options"
 msgstr ""
 
-#: ../duplicity/commandline.py:373
+#: ../duplicity/commandline.py:382
 #, python-format
 msgid ""
 "Running in 'ignore errors' mode due to %s; please re-consider if this was "
@@ -557,150 +557,152 @@
 msgstr ""
 
 #. Used in usage help to represent an imap mailbox
-#: ../duplicity/commandline.py:386
+#: ../duplicity/commandline.py:395
 msgid "imap_mailbox"
 msgstr ""
 
-#: ../duplicity/commandline.py:400
+#: ../duplicity/commandline.py:409
 msgid "file_descriptor"
 msgstr ""
 
 #. Used in usage help to represent a desired number of
 #. something. Example:
 #. --num-retries <number>
-#: ../duplicity/commandline.py:411 ../duplicity/commandline.py:433
-#: ../duplicity/commandline.py:448 ../duplicity/commandline.py:486
-#: ../duplicity/commandline.py:560 ../duplicity/commandline.py:729
+#: ../duplicity/commandline.py:420 ../duplicity/commandline.py:442
+#: ../duplicity/commandline.py:454 ../duplicity/commandline.py:460
+#: ../duplicity/commandline.py:498 ../duplicity/commandline.py:503
+#: ../duplicity/commandline.py:507 ../duplicity/commandline.py:578
+#: ../duplicity/commandline.py:747
 msgid "number"
 msgstr ""
 
 #. Used in usage help (noun)
-#: ../duplicity/commandline.py:414
+#: ../duplicity/commandline.py:423
 msgid "backup name"
 msgstr ""
 
 #. noun
-#: ../duplicity/commandline.py:495 ../duplicity/commandline.py:498
-#: ../duplicity/commandline.py:501 ../duplicity/commandline.py:700
+#: ../duplicity/commandline.py:513 ../duplicity/commandline.py:516
+#: ../duplicity/commandline.py:519 ../duplicity/commandline.py:718
 msgid "command"
 msgstr ""
 
-#: ../duplicity/commandline.py:519
+#: ../duplicity/commandline.py:537
 msgid "paramiko|pexpect"
 msgstr ""
 
-#: ../duplicity/commandline.py:525
+#: ../duplicity/commandline.py:543
 msgid "pem formatted bundle of certificate authorities"
 msgstr ""
 
 #. Used in usage help. Example:
 #. --timeout <seconds>
-#: ../duplicity/commandline.py:535 ../duplicity/commandline.py:763
+#: ../duplicity/commandline.py:553 ../duplicity/commandline.py:781
 msgid "seconds"
 msgstr ""
 
 #. abbreviation for "character" (noun)
-#: ../duplicity/commandline.py:541 ../duplicity/commandline.py:697
+#: ../duplicity/commandline.py:559 ../duplicity/commandline.py:715
 msgid "char"
 msgstr ""
 
-#: ../duplicity/commandline.py:663
+#: ../duplicity/commandline.py:681
 #, python-format
 msgid "Using archive dir: %s"
 msgstr ""
 
-#: ../duplicity/commandline.py:664
+#: ../duplicity/commandline.py:682
 #, python-format
 msgid "Using backup name: %s"
 msgstr ""
 
-#: ../duplicity/commandline.py:671
+#: ../duplicity/commandline.py:689
 #, python-format
 msgid "Command line error: %s"
 msgstr ""
 
-#: ../duplicity/commandline.py:672
+#: ../duplicity/commandline.py:690
 msgid "Enter 'duplicity --help' for help screen."
 msgstr ""
 
 #. Used in usage help to represent a Unix-style path name. Example:
 #. rsync://user[:password]@other_host[:port]//absolute_path
-#: ../duplicity/commandline.py:685
+#: ../duplicity/commandline.py:703
 msgid "absolute_path"
 msgstr ""
 
 #. Used in usage help. Example:
 #. tahoe://alias/some_dir
-#: ../duplicity/commandline.py:689
+#: ../duplicity/commandline.py:707
 msgid "alias"
 msgstr ""
 
 #. Used in help to represent a "bucket name" for Amazon Web
 #. Services' Simple Storage Service (S3). Example:
 #. s3://other.host/bucket_name[/prefix]
-#: ../duplicity/commandline.py:694
+#: ../duplicity/commandline.py:712
 msgid "bucket_name"
 msgstr ""
 
 #. Used in usage help to represent the name of a container in
 #. Amazon Web Services' Cloudfront. Example:
 #. cf+http://container_name
-#: ../duplicity/commandline.py:705
+#: ../duplicity/commandline.py:723
 msgid "container_name"
 msgstr ""
 
 #. noun
-#: ../duplicity/commandline.py:708
+#: ../duplicity/commandline.py:726
 msgid "count"
 msgstr ""
 
 #. Used in usage help to represent the name of a file directory
-#: ../duplicity/commandline.py:711
+#: ../duplicity/commandline.py:729
 msgid "directory"
 msgstr ""
 
 #. Used in usage help, e.g. to represent the name of a code
 #. module. Example:
 #. rsync://user[:password]@other.host[:port]::/module/some_dir
-#: ../duplicity/commandline.py:724
+#: ../duplicity/commandline.py:742
 msgid "module"
 msgstr ""
 
 #. Used in usage help to represent an internet hostname. Example:
 #. ftp://user[:password]@other.host[:port]/some_dir
-#: ../duplicity/commandline.py:738
+#: ../duplicity/commandline.py:756
 msgid "other.host"
 msgstr ""
 
 #. Used in usage help. Example:
 #. ftp://user[:password]@other.host[:port]/some_dir
-#: ../duplicity/commandline.py:742
+#: ../duplicity/commandline.py:760
 msgid "password"
 msgstr ""
 
 #. Used in usage help to represent a TCP port number. Example:
 #. ftp://user[:password]@other.host[:port]/some_dir
-#: ../duplicity/commandline.py:750
+#: ../duplicity/commandline.py:768
 msgid "port"
 msgstr ""
 
 #. Used in usage help. This represents a string to be used as a
 #. prefix to names for backup files created by Duplicity. Example:
 #. s3://other.host/bucket_name[/prefix]
-#: ../duplicity/commandline.py:755
+#: ../duplicity/commandline.py:773
 msgid "prefix"
 msgstr ""
 
 #. Used in usage help to represent a Unix-style path name. Example:
 #. rsync://user[:password]@other.host[:port]/relative_path
-#: ../duplicity/commandline.py:759
+#: ../duplicity/commandline.py:777
 msgid "relative_path"
 msgstr ""
 
 #. Used in usage help to represent the name of a single file
 #. directory or a Unix-style path to a directory. Example:
 #. file:///some_dir
-#: ../duplicity/commandline.py:774
+#: ../duplicity/commandline.py:792
 msgid "some_dir"
 msgstr ""
 
@@ -708,14 +710,14 @@
 #. directory or a Unix-style path to a directory where files will be
 #. coming FROM. Example:
 #. duplicity [full|incremental] [options] source_dir target_url
-#: ../duplicity/commandline.py:780
+#: ../duplicity/commandline.py:798
 msgid "source_dir"
 msgstr ""
 
 #. Used in usage help to represent a URL files will be coming
 #. FROM. Example:
 #. duplicity [restore] [options] source_url target_dir
-#: ../duplicity/commandline.py:785
+#: ../duplicity/commandline.py:803
 msgid "source_url"
 msgstr ""
 
@@ -723,75 +725,75 @@
 #. directory or a Unix-style path to a directory. where files will be
 #. going TO. Example:
 #. duplicity [restore] [options] source_url target_dir
-#: ../duplicity/commandline.py:791
+#: ../duplicity/commandline.py:809
 msgid "target_dir"
 msgstr ""
 
 #. Used in usage help to represent a URL files will be going TO.
 #. Example:
 #. duplicity [full|incremental] [options] source_dir target_url
-#: ../duplicity/commandline.py:796
+#: ../duplicity/commandline.py:814
 msgid "target_url"
 msgstr ""
 
 #. Used in usage help to represent a user name (i.e. login).
 #. Example:
 #. ftp://user[:password]@other.host[:port]/some_dir
-#: ../duplicity/commandline.py:806
+#: ../duplicity/commandline.py:824
 msgid "user"
 msgstr ""
 
 #. Header in usage help
-#: ../duplicity/commandline.py:823
+#: ../duplicity/commandline.py:841
 msgid "Backends and their URL formats:"
 msgstr ""
 
 #. Header in usage help
-#: ../duplicity/commandline.py:848
+#: ../duplicity/commandline.py:866
 msgid "Commands:"
 msgstr ""
 
-#: ../duplicity/commandline.py:872
+#: ../duplicity/commandline.py:890
 #, python-format
 msgid "Specified archive directory '%s' does not exist, or is not a directory"
 msgstr ""
 
-#: ../duplicity/commandline.py:881
+#: ../duplicity/commandline.py:899
 #, python-format
 msgid ""
 "Sign key should be an 8 character hex string, like 'AA0E73D2'.\n"
 "Received '%s' instead."
 msgstr ""
 
-#: ../duplicity/commandline.py:941
+#: ../duplicity/commandline.py:959
 #, python-format
 msgid ""
 "Restore destination directory %s already exists.\n"
 "Will not overwrite."
 msgstr ""
 
-#: ../duplicity/commandline.py:946
+#: ../duplicity/commandline.py:964
 #, python-format
 msgid "Verify directory %s does not exist"
 msgstr ""
 
-#: ../duplicity/commandline.py:952
+#: ../duplicity/commandline.py:970
 #, python-format
 msgid "Backup source directory %s does not exist."
 msgstr ""
 
-#: ../duplicity/commandline.py:981
+#: ../duplicity/commandline.py:999
 #, python-format
 msgid "Command line warning: %s"
 msgstr ""
 
-#: ../duplicity/commandline.py:981
+#: ../duplicity/commandline.py:999
 msgid ""
 "Selection options --exclude/--include\n"
 "currently work only when backing up,not restoring."
 msgstr ""
 
-#: ../duplicity/commandline.py:1029
+#: ../duplicity/commandline.py:1047
 #, python-format
 msgid ""
 "Bad URL '%s'.\n"
@@ -799,61 +801,61 @@
 "\"file:///usr/local\".  See the man page for more information."
 msgstr ""
 
-#: ../duplicity/commandline.py:1054
+#: ../duplicity/commandline.py:1072
 msgid "Main action: "
 msgstr ""
 
-#: ../duplicity/backend.py:87
+#: ../duplicity/backend.py:109
 #, python-format
 msgid "Import of %s %s"
 msgstr ""
 
-#: ../duplicity/backend.py:164
+#: ../duplicity/backend.py:186
 #, python-format
 msgid "Could not initialize backend: %s"
 msgstr ""
 
-#: ../duplicity/backend.py:320
+#: ../duplicity/backend.py:311
 #, python-format
 msgid "Attempt %s failed: %s: %s"
 msgstr ""
 
-#: ../duplicity/backend.py:322 ../duplicity/backend.py:352
-#: ../duplicity/backend.py:359
+#: ../duplicity/backend.py:313 ../duplicity/backend.py:343
+#: ../duplicity/backend.py:350
 #, python-format
 msgid "Backtrace of previous error: %s"
 msgstr ""
 
-#: ../duplicity/backend.py:350
+#: ../duplicity/backend.py:341
 #, python-format
 msgid "Attempt %s failed. %s: %s"
 msgstr ""
 
-#: ../duplicity/backend.py:361
+#: ../duplicity/backend.py:352
 #, python-format
 msgid "Giving up after %s attempts. %s: %s"
 msgstr ""
 
-#: ../duplicity/backend.py:546 ../duplicity/backend.py:570
+#: ../duplicity/backend.py:537 ../duplicity/backend.py:561
 #, python-format
 msgid "Reading results of '%s'"
 msgstr ""
 
-#: ../duplicity/backend.py:585
+#: ../duplicity/backend.py:576
 #, python-format
 msgid "Running '%s' failed with code %d (attempt #%d)"
 msgid_plural "Running '%s' failed with code %d (attempt #%d)"
 msgstr[0] ""
 msgstr[1] ""
 
-#: ../duplicity/backend.py:589
+#: ../duplicity/backend.py:580
 #, python-format
 msgid ""
 "Error is:\n"
 "%s"
 msgstr ""
 
-#: ../duplicity/backend.py:591
+#: ../duplicity/backend.py:582
 #, python-format
 msgid "Giving up trying to execute '%s' after %d attempt"
 msgid_plural "Giving up trying to execute '%s' after %d attempts"

=== modified file 'setup.py'
--- setup.py	2014-04-16 20:51:42 +0000
+++ setup.py	2014-04-16 20:51:42 +0000
@@ -28,8 +28,8 @@
 
 version_string = "$version"
 
-if sys.version_info[:2] < (2,4):
-    print "Sorry, duplicity requires version 2.4 or later of python"
+if sys.version_info[:2] < (2, 6):
+    print "Sorry, duplicity requires version 2.6 or later of python"
     sys.exit(1)
 
 incdir_list = libdir_list = None
@@ -53,8 +53,6 @@
                 'README',
                 'README-REPO',
                 'README-LOG',
-                'tarfile-LICENSE',
-                'tarfile-CHANGES',
                 'CHANGELOG']),
               ]
 

=== removed file 'tarfile-CHANGES'
--- tarfile-CHANGES	2011-08-23 18:14:17 +0000
+++ tarfile-CHANGES	1970-01-01 00:00:00 +0000
@@ -1,3 +0,0 @@
-tarfile.py is a copy of python2.7's tarfile.py.
-
-No changes besides 2.4 compatibility have been made.

=== removed file 'tarfile-LICENSE'
--- tarfile-LICENSE	2011-10-05 14:13:31 +0000
+++ tarfile-LICENSE	1970-01-01 00:00:00 +0000
@@ -1,92 +0,0 @@
-irdu-backup uses tarfile, written by Lars Gust�l.  The following
-notice was included in the tarfile distribution:
-
------------------------------------------------------------------
-      tarfile    - python module for accessing TAR archives
-
-                   Lars Gust�l <lars@xxxxxxxxxxxx>
------------------------------------------------------------------
-
-
-Description
------------
-
-The tarfile module provides a set of functions for accessing  TAR
-format archives. Because  it is written  in pure Python,  it does
-not require any platform specific functions. GZIP  compressed TAR
-archives are seamlessly supported.
-
-
-Requirements
-------------
-
-tarfile needs at least Python version 2.2.
-(For a tarfile for Python 1.5.2 take a look on the webpage.)
-
-
-!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
-IMPORTANT NOTE (*NIX only)
---------------------------
-
-The addition of character and block devices is enabled by a C
-extension module (_tarfile.c), because Python does not yet
-provide the major() and minor() macros.
-Currently Linux and FreeBSD are implemented. If your OS is not
-supported, then please send me a patch.
-
-!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
-
-
-Download
---------
-
-You can download the newest version at URL:
-http://www.gustaebel.de/lars/tarfile/
-
-
-Installation
-------------
-
-1. extract the tarfile-x.x.x.tar.gz archive to a temporary folder
-2. type "python setup.py install"
-
-
-Contact
--------
-
-Suggestions, comments, bug reports and patches to:
-lars@xxxxxxxxxxxx
-
-
-License
--------
-
-Copyright (C) 2002 Lars Gust�l <lars@xxxxxxxxxxxx>
-All rights reserved.
-
-Permission  is  hereby granted,  free  of charge,  to  any person
-obtaining a  copy of  this software  and associated documentation
-files  (the  "Software"),  to   deal  in  the  Software   without
-restriction,  including  without limitation  the  rights to  use,
-copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies  of  the  Software,  and to  permit  persons  to  whom the
-Software  is  furnished  to  do  so,  subject  to  the  following
-conditions:
-
-The above copyright  notice and this  permission notice shall  be
-included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS  IS", WITHOUT WARRANTY OF ANY  KIND,
-EXPRESS OR IMPLIED, INCLUDING  BUT NOT LIMITED TO  THE WARRANTIES
-OF  MERCHANTABILITY,  FITNESS   FOR  A  PARTICULAR   PURPOSE  AND
-NONINFRINGEMENT.  IN  NO  EVENT SHALL  THE  AUTHORS  OR COPYRIGHT
-HOLDERS  BE LIABLE  FOR ANY  CLAIM, DAMAGES  OR OTHER  LIABILITY,
-WHETHER  IN AN  ACTION OF  CONTRACT, TORT  OR OTHERWISE,  ARISING
-FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
-OTHER DEALINGS IN THE SOFTWARE.
-
-
-README Version
---------------
-
-$Id: tarfile-LICENSE,v 1.1 2002/10/29 01:49:46 bescoto Exp $

=== modified file 'testing/__init__.py'
--- testing/__init__.py	2014-04-16 20:51:42 +0000
+++ testing/__init__.py	2014-04-16 20:51:42 +0000
@@ -1,3 +0,0 @@
-import sys
-if sys.version_info < (2, 5,):
-	import tests

=== modified file 'testing/run-tests'
--- testing/run-tests	2014-04-16 20:51:42 +0000
+++ testing/run-tests	2014-04-16 20:51:42 +0000
@@ -46,7 +46,7 @@
 done
 
 # run against all supported python versions
-for v in 2.4 2.5 2.6 2.7; do
+for v in 2.6 2.7; do
     type python$v >& /dev/null
     if [ $? == 1 ]; then
         echo "python$v not found on system"

=== modified file 'testing/run-tests-ve'
--- testing/run-tests-ve	2014-04-16 20:51:42 +0000
+++ testing/run-tests-ve	2014-04-16 20:51:42 +0000
@@ -46,7 +46,7 @@
 done
 
 # run against all supported python versions
-for v in 2.4 2.5 2.6 2.7; do
+for v in 2.6 2.7; do
     ve=~/virtual$v
     if [ $? == 1 ]; then
         echo "virtual$v not found on system"

=== modified file 'testing/tests/__init__.py'
--- testing/tests/__init__.py	2014-04-16 20:51:42 +0000
+++ testing/tests/__init__.py	2014-04-16 20:51:42 +0000
@@ -41,12 +41,3 @@
 # Standardize time
 os.environ['TZ'] = 'US/Central'
 time.tzset()
-
-# Automatically add all submodules into this namespace.  Helps python2.4
-# unittest work.
-if sys.version_info < (2, 5,):
-    for module in os.listdir(_this_dir):
-        if module == '__init__.py' or module[-3:] != '.py':
-            continue
-        __import__(module[:-3], locals(), globals())
-    del module

=== modified file 'testing/tests/test_parsedurl.py'
--- testing/tests/test_parsedurl.py	2011-11-04 04:33:06 +0000
+++ testing/tests/test_parsedurl.py	2014-04-16 20:51:42 +0000
@@ -55,6 +55,13 @@
         assert pu.username is None, pu.username
         assert pu.port is None, pu.port
 
+        pu = duplicity.backend.ParsedUrl("file://home")
+        assert pu.scheme == "file", pu.scheme
+        assert pu.netloc == "", pu.netloc
+        assert pu.path == "//home", pu.path
+        assert pu.username is None, pu.username
+        assert pu.port is None, pu.port
+
         pu = duplicity.backend.ParsedUrl("ftp://foo@bar:pass@xxxxxxxxxxx:123/home";)
         assert pu.scheme == "ftp", pu.scheme
         assert pu.netloc == "foo@bar:pass@xxxxxxxxxxx:123", pu.netloc
@@ -121,7 +128,9 @@
     def test_errors(self):
         """Test various url errors"""
         self.assertRaises(InvalidBackendURL, duplicity.backend.ParsedUrl,
-                          "ssh://foo@bar:pass@xxxxxxxxxxx:/home")
+                          "ssh:///home")  # we require a hostname for ssh
+        self.assertRaises(InvalidBackendURL, duplicity.backend.ParsedUrl,
+                          "file:path")  # no relative paths for non-netloc schemes
         self.assertRaises(UnsupportedBackendScheme, duplicity.backend.get_backend,
                           "foo://foo@bar:pass@xxxxxxxxxxx/home")
 

=== modified file 'testing/tests/test_tarfile.py'
--- testing/tests/test_tarfile.py	2013-07-12 19:47:32 +0000
+++ testing/tests/test_tarfile.py	2014-04-16 20:51:42 +0000
@@ -1,7 +1,6 @@
 # -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
 #
-# Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
-# Copyright 2007 Kenneth Loafman <kenneth@xxxxxxxxxxx>
+# Copyright 2013 Michael Terry <mike@xxxxxxxxxxx>
 #
 # This file is part of duplicity.
 #
@@ -19,309 +18,18 @@
 # along with duplicity; if not, write to the Free Software Foundation,
 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 
-#
-# unittest for the tarfile module
-#
-# $Id: test_tarfile.py,v 1.11 2009/04/02 14:47:12 loafman Exp $
-
 import helper
-import sys, os, shutil, StringIO, tempfile, unittest, stat
-
+import unittest
+from duplicity import cached_ops
 from duplicity import tarfile
 
 helper.setup()
 
-SAMPLETAR = "testtar.tar"
-TEMPDIR   = tempfile.mktemp()
-
-def join(*args):
-    return os.path.normpath(apply(os.path.join, args))
-
-class BaseTest(unittest.TestCase):
-    """Base test for tarfile.
-    """
-
-    def setUp(self):
-        os.mkdir(TEMPDIR)
-        self.tar = tarfile.open(SAMPLETAR)
-        self.tar.errorlevel = 1
-
-    def tearDown(self):
-        self.tar.close()
-        shutil.rmtree(TEMPDIR)
-
-    def isroot(self):
-        return hasattr(os, "geteuid") and os.geteuid() == 0
-
-class Test_All(BaseTest):
-    """Allround test.
-    """
-    files_in_tempdir = ["tempdir",
-                        "tempdir/0length",
-                        "tempdir/large",
-                        "tempdir/hardlinked1",
-                        "tempdir/hardlinked2",
-                        "tempdir/fifo",
-                        "tempdir/symlink"]
-
-    tempdir_data = {"0length": "",
-                     "large": "hello, world!" * 10000,
-                     "hardlinked1": "foo",
-                     "hardlinked2": "foo"}
-
-    def test_iteration(self):
-        """Test iteration through temp2.tar"""
-        self.make_temptar()
-        i = 0
-        tf = tarfile.TarFile("none", "r", FileLogger(open("temp2.tar", "rb")))
-        tf.debug = 3
-        for tarinfo in tf: i += 1 #@UnusedVariable
-        assert i >= 6, i
-
-    def _test_extraction(self):
-        """Test if regular files and links are extracted correctly.
-        """
-        for tarinfo in self.tar:
-            if tarinfo.isreg() or tarinfo.islnk() or tarinfo.issym():
-                self.tar.extract(tarinfo, TEMPDIR)
-                name  = join(TEMPDIR, tarinfo.name)
-                data1 = file(name, "rb").read()
-                data2 = self.tar.extractfile(tarinfo).read()
-                self.assert_(data1 == data2,
-                             "%s was not extracted successfully."
-                             % tarinfo.name)
-
-                if not tarinfo.issym():
-                    self.assert_(tarinfo.mtime == os.path.getmtime(name),
-                                "%s's modification time was not set correctly."
-                                % tarinfo.name)
-
-            if tarinfo.isdev():
-                if hasattr(os, "mkfifo") and tarinfo.isfifo():
-                    self.tar.extract(tarinfo, TEMPDIR)
-                    name = join(TEMPDIR, tarinfo.name)
-                    self.assert_(tarinfo.mtime == os.path.getmtime(name),
-                                "%s's modification time was not set correctly."
-                                % tarinfo.name)
-
-                elif hasattr(os, "mknod") and self.isroot():
-                    self.tar.extract(tarinfo, TEMPDIR)
-                    name = join(TEMPDIR, tarinfo.name)
-                    self.assert_(tarinfo.mtime == os.path.getmtime(name),
-                                "%s's modification time was not set correctly."
-                                % tarinfo.name)
-
-    def test_addition(self):
-        """Test if regular files are added correctly.
-           For this, we extract all regular files from our sample tar
-           and add them to a new one, which we check afterwards.
-        """
-        files = []
-        for tarinfo in self.tar:
-            if tarinfo.isreg():
-                self.tar.extract(tarinfo, TEMPDIR)
-                files.append(tarinfo.name)
-
-        buf = StringIO.StringIO()
-        tar = tarfile.open("test.tar", "w", buf)
-        for f in files:
-            path = join(TEMPDIR, f)
-            tarinfo = tar.gettarinfo(path)
-            tarinfo.name = f
-            tar.addfile(tarinfo, file(path, "rb"))
-        tar.close()
-
-        buf.seek(0)
-        tar = tarfile.open("test.tar", "r", buf)
-        for tarinfo in tar:
-            data1 = file(join(TEMPDIR, tarinfo.name), "rb").read()
-            data2 = tar.extractfile(tarinfo).read()
-            self.assert_(data1 == data2)
-        tar.close()
-
-    def make_tempdir(self):
-        """Make a temp directory with assorted files in it"""
-        try:
-            os.lstat("tempdir")
-        except OSError:
-            pass
-        else: # assume already exists
-            assert not os.system("rm -r tempdir")
-        os.mkdir("tempdir")
-
-        def write_file(name):
-            """Write appropriate data into file named name in tempdir"""
-            fp = open("tempdir/%s" % (name,), "wb")
-            fp.write(self.tempdir_data[name])
-            fp.close()
-
-        # Make 0length file
-        write_file("0length")
-        os.chmod("tempdir/%s" % ("0length",), 0604)
-
-        # Make regular file 130000 bytes in length
-        write_file("large")
-
-        # Make hard linked files
-        write_file("hardlinked1")
-        os.link("tempdir/hardlinked1", "tempdir/hardlinked2")
-
-        # Make a fifo
-        os.mkfifo("tempdir/fifo")
-
-        # Make symlink
-        os.symlink("foobar", "tempdir/symlink")
-
-    def make_temptar(self):
-        """Tar up tempdir, write to "temp2.tar" """
-        try:
-            os.lstat("temp2.tar")
-        except OSError:
-            pass
-        else:
-            assert not os.system("rm temp2.tar")
-
-        self.make_tempdir()
-        tf = tarfile.TarFile("temp2.tar", "w")
-        for filename in self.files_in_tempdir:
-            tf.add(filename, filename, 0)
-        tf.close()
-
-    def test_tarfile_creation(self):
-        """Create directory, make tarfile, extract using gnutar, compare"""
-        self.make_temptar()
-        self.extract_and_compare_tarfile()
-
-    def extract_and_compare_tarfile(self):
-        old_umask = os.umask(022)
-        os.system("rm -r tempdir")
-        assert not os.system("tar -xf temp2.tar")
-
-        def compare_data(name):
-            """Assert data is what should be"""
-            fp = open("tempdir/" + name, "rb")
-            buf = fp.read()
-            fp.close()
-            assert buf == self.tempdir_data[name]
-
-        s = os.lstat("tempdir")
-        assert stat.S_ISDIR(s.st_mode)
-
-        for key in self.tempdir_data: compare_data(key)
-
-        # Check to make sure permissions saved
-        s = os.lstat("tempdir/0length")
-        assert stat.S_IMODE(s.st_mode) == 0604, stat.S_IMODE(s.st_mode)
-
-        s = os.lstat("tempdir/fifo")
-        assert stat.S_ISFIFO(s.st_mode)
-
-        # Check to make sure hardlinked files still hardlinked
-        s1 = os.lstat("tempdir/hardlinked1")
-        s2 = os.lstat("tempdir/hardlinked2")
-        assert s1.st_ino == s2.st_ino
-
-        # Check symlink
-        s = os.lstat("tempdir/symlink")
-        assert stat.S_ISLNK(s.st_mode)
-
-        os.umask(old_umask)
-
-class Test_FObj(BaseTest):
-    """Test for read operations via file-object.
-    """
-
-    def _test_sparse(self):
-        """Test extraction of the sparse file.
-        """
-        BLOCK = 4096
-        for tarinfo in self.tar:
-            if tarinfo.issparse():
-                f = self.tar.extractfile(tarinfo)
-                b = 0
-                block = 0
-                while 1:
-                    buf = f.read(BLOCK)
-                    if not buf:
-                        break
-                    block += 1
-                    self.assert_(BLOCK == len(buf))
-                    if not b:
-                        self.assert_("\0" * BLOCK == buf,
-                                     "sparse block is broken")
-                    else:
-                        self.assert_("0123456789ABCDEF" * 256 == buf,
-                                     "sparse block is broken")
-                    b = 1 - b
-                self.assert_(block == 24, "too few sparse blocks")
-                f.close()
-
-    def _test_readlines(self):
-        """Test readlines() method of _FileObject.
-        """
-        self.tar.extract("pep.txt", TEMPDIR)
-        lines1 = file(join(TEMPDIR, "pep.txt"), "r").readlines()
-        lines2 = self.tar.extractfile("pep.txt").readlines()
-        self.assert_(lines1 == lines2, "readline() does not work correctly")
-
-    def _test_seek(self):
-        """Test seek() method of _FileObject, incl. random reading.
-        """
-        self.tar.extract("pep.txt", TEMPDIR)
-        data = file(join(TEMPDIR, "pep.txt"), "rb").read()
-
-        tarinfo = self.tar.getmember("pep.txt")
-        fobj = self.tar.extractfile(tarinfo)
-
-        text = fobj.read() #@UnusedVariable
-        fobj.seek(0)
-        self.assert_(0 == fobj.tell(),
-                     "seek() to file's start failed")
-        fobj.seek(4096, 0)
-        self.assert_(4096 == fobj.tell(),
-                     "seek() to absolute position failed")
-        fobj.seek(-2048, 1)
-        self.assert_(2048 == fobj.tell(),
-                     "seek() to negative relative position failed")
-        fobj.seek(2048, 1)
-        self.assert_(4096 == fobj.tell(),
-                     "seek() to positive relative position failed")
-        s = fobj.read(10)
-        self.assert_(s == data[4096:4106],
-                     "read() after seek failed")
-        fobj.seek(0, 2)
-        self.assert_(tarinfo.size == fobj.tell(),
-                     "seek() to file's end failed")
-        self.assert_(fobj.read() == "",
-                     "read() at file's end did not return empty string")
-        fobj.seek(-tarinfo.size, 2)
-        self.assert_(0 == fobj.tell(),
-                     "relative seek() to file's start failed")
-        fobj.seek(1024)
-        s1 = fobj.readlines()
-        fobj.seek(1024)
-        s2 = fobj.readlines()
-        self.assert_(s1 == s2,
-                     "readlines() after seek failed")
-        fobj.close()
-
-class FileLogger:
-    """Like a file but log requests"""
-    def __init__(self, infp):
-        self.infp = infp
-    def read(self, length):
-        #print "Reading ", length
-        return self.infp.read(length)
-    def seek(self, position):
-        #print "Seeking to ", position
-        return self.infp.seek(position)
-    def tell(self):
-        #print "Telling"
-        return self.infp.tell()
-    def close(self):
-        #print "Closing"
-        return self.infp.close()
-
+
+class TarfileTest(unittest.TestCase):
+    def test_cached_ops(self):
+        self.assertTrue(tarfile.grp is cached_ops)
+        self.assertTrue(tarfile.pwd is cached_ops)
 
 if __name__ == "__main__":
     unittest.main()

=== modified file 'testing/tests/test_unicode.py'
--- testing/tests/test_unicode.py	2013-12-27 06:39:00 +0000
+++ testing/tests/test_unicode.py	2014-04-16 20:51:42 +0000
@@ -29,13 +29,11 @@
         if 'duplicity' in sys.modules:
             del(sys.modules["duplicity"])
 
-    @patch('gettext.translation')
+    @patch('gettext.install')
     def test_module_install(self, gettext_mock):
         """Make sure we convert translations to unicode"""
         import duplicity
-        gettext_mock.assert_called_once_with('duplicity', fallback=True)
-        gettext_mock.return_value.install.assert_called_once_with(unicode=True)
-        assert ngettext is gettext_mock.return_value.ungettext
+        gettext_mock.assert_called_once_with('duplicity', unicode=True, names=['ngettext'])
 
 if __name__ == "__main__":
     unittest.main()

=== removed file 'testing/testtar.tar'
Binary files testing/testtar.tar	2002-10-29 01:49:46 +0000 and testing/testtar.tar	1970-01-01 00:00:00 +0000 differ

Follow ups