duplicity-team team mailing list archive
-
duplicity-team team
-
Mailing list archive
-
Message #04866
[Merge] lp:~mgorse/duplicity/0.8-series into lp:duplicity
Mgorse has proposed merging lp:~mgorse/duplicity/0.8-series into lp:duplicity.
Requested reviews:
duplicity-team (duplicity-team)
For more details, see:
https://code.launchpad.net/~mgorse/duplicity/0.8-series/+merge/359864
First pass at a python 3 port.
--
Your team duplicity-team is requested to review the proposed merge of lp:~mgorse/duplicity/0.8-series into lp:duplicity.
=== modified file 'bin/duplicity'
--- bin/duplicity 2018-10-11 22:38:11 +0000
+++ bin/duplicity 2018-11-29 19:03:43 +0000
@@ -27,6 +27,11 @@
# Please send mail to me or the mailing list if you find bugs or have
# any suggestions.
+from builtins import filter
+from builtins import next
+from builtins import map
+from builtins import range
+from builtins import object
import duplicity.errors
import copy
import gzip
@@ -34,7 +39,7 @@
import platform
import re
import resource
-import statvfs
+from os import statvfs
import sys
import threading
import time
@@ -62,7 +67,7 @@
from duplicity import util
if u'--pydevd' in sys.argv or os.getenv(u'PYDEVD', None):
- import pydevd # @UnresolvedImport # pylint: disable=import-error
+ import pydevd # pylint: disable=import-error
pydevd.settrace()
# In a dev environment the path is screwed so fix it.
base = sys.path.pop(0)
@@ -81,7 +86,8 @@
# in non-ascii characters.
import getpass
import locale
- message = message.encode(locale.getpreferredencoding(), u'replace')
+ if sys.version_info.major == 2:
+ message = message.encode(locale.getpreferredencoding(), u'replace')
return getpass.getpass(message)
@@ -733,7 +739,7 @@
@param col_stats: collection status
"""
if globals.restore_dir:
- index = tuple(globals.restore_dir.split(u"/"))
+ index = tuple(globals.restore_dir.split(b"/"))
else:
index = ()
time = globals.restore_time or dup_time.curtime
@@ -1029,7 +1035,7 @@
return
for src_chain in src_chainlist:
try:
- tgt_chain = filter(lambda chain: chain.start_time == src_chain.start_time, tgt_chainlist)[0]
+ tgt_chain = list(filter(lambda chain: chain.start_time == src_chain.start_time, tgt_chainlist))[0]
except IndexError:
tgt_chain = None
@@ -1064,7 +1070,7 @@
sorted(tgt_chainlist, key=lambda chain: chain.start_time)
for src_chain in src_chainlist:
try:
- tgt_chain = filter(lambda chain: chain.start_time == src_chain.start_time, tgt_chainlist)[0]
+ tgt_chain = list(filter(lambda chain: chain.start_time == src_chain.start_time, tgt_chainlist))[0]
except IndexError:
tgt_chain = None
@@ -1085,7 +1091,7 @@
mf_filename = file_naming.get(src_set.type, manifest=True)
mf_tdp = dup_temp.new_tempduppath(file_naming.parse(mf_filename))
mf = manifest.Manifest(fh=mf_tdp.filtered_open(mode=u'wb'))
- for i, filename in src_set.volume_name_dict.iteritems():
+ for i, filename in src_set.volume_name_dict.items():
log.Notice(_(u"Replicating %s.") % (filename,))
fileobj = restore_get_enc_fileobj(globals.src_backend, filename, rmf.volume_info_dict[i])
filename = file_naming.get(src_set.type, i, encrypted=globals.encryption, gzipped=globals.compression)
@@ -1171,7 +1177,7 @@
file = open(filename, u"wb")
while True:
try:
- data = src_iter.next().data
+ data = src_iter.__next__().data
except StopIteration:
break
file.write(data)
@@ -1207,7 +1213,7 @@
u"""
Copy remote file fn to local cache.
"""
- class Block:
+ class Block(object):
u"""
Data block to return from SrcIter
"""
@@ -1215,7 +1221,7 @@
def __init__(self, data):
self.data = data
- class SrcIter:
+ class SrcIter(object):
u"""
Iterate over source and return Block of data.
"""
@@ -1223,12 +1229,15 @@
def __init__(self, fileobj):
self.fileobj = fileobj
- def next(self):
+ def __next__(self):
try:
res = Block(self.fileobj.read(self.get_read_size()))
except Exception:
if hasattr(self.fileobj, u'name'):
name = self.fileobj.name
+ # name may be a path
+ if hasattr(name, u'name'):
+ name = name.name
else:
name = None
log.FatalError(_(u"Failed to read %s: %s") %
@@ -1243,7 +1252,7 @@
return 128 * 1024
def get_footer(self):
- return u""
+ return b""
log.Notice(_(u"Copying %s to local cache.") % util.fsdecode(fn))
@@ -1270,8 +1279,8 @@
# we have the list of metafiles on both sides. remote is always
# authoritative. figure out which are local spurious (should not
# be there) and missing (should be there but are not).
- local_keys = local_metafiles.keys()
- remote_keys = remote_metafiles.keys()
+ local_keys = list(local_metafiles.keys())
+ remote_keys = list(remote_metafiles.keys())
local_missing = []
local_spurious = []
@@ -1361,7 +1370,7 @@
log.ErrorCode.get_freespace_failed)
# Calculate space we need for at least 2 volumes of full or inc
# plus about 30% of one volume for the signature files.
- freespace = stats[statvfs.F_FRSIZE] * stats[statvfs.F_BAVAIL]
+ freespace = stats.f_frsize * stats.f_bavail
needspace = (((globals.async_concurrency + 1) * globals.volsize) +
int(0.30 * globals.volsize))
if freespace < needspace:
@@ -1398,7 +1407,7 @@
log.Log(u'=' * 80, verbosity)
-class Restart:
+class Restart(object):
u"""
Class to aid in restart of inc or full backup.
Instance in globals.restart if restart in progress.
@@ -1487,7 +1496,7 @@
# determine what action we're performing and process command line
action = commandline.ProcessCommandLine(sys.argv[1:])
- globals.lockpath = os.path.join(globals.archive_dir_path.name, u"lockfile")
+ globals.lockpath = os.path.join(globals.archive_dir_path.name, b"lockfile")
globals.lockfile = fasteners.process_lock.InterProcessLock(globals.lockpath)
log.Debug(_(u"Acquiring lockfile %s") % globals.lockpath)
if not globals.lockfile.acquire(blocking=False):
@@ -1566,7 +1575,8 @@
log.Notice(_(u"Last full backup date:") + u" " + dup_time.timetopretty(last_full_time))
else:
log.Notice(_(u"Last full backup date: none"))
- if not globals.restart and action == u"inc" and last_full_time < globals.full_force_time:
+ if not globals.restart and action == u"inc" and globals.full_force_time is not None and \
+ last_full_time < globals.full_force_time:
log.Notice(_(u"Last full backup is too old, forcing full backup"))
action = u"full"
log.PrintCollectionStatus(col_stats)
=== modified file 'bin/rdiffdir'
--- bin/rdiffdir 2018-10-12 01:44:49 +0000
+++ bin/rdiffdir 2018-11-29 19:03:43 +0000
@@ -26,6 +26,7 @@
# any suggestions.
from __future__ import print_function
+from builtins import str
import sys
import getopt
import gzip
=== modified file 'duplicity/__init__.py'
--- duplicity/__init__.py 2018-10-10 20:25:04 +0000
+++ duplicity/__init__.py 2018-11-29 19:03:43 +0000
@@ -19,5 +19,9 @@
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+import sys
import gettext
-gettext.install(u'duplicity', unicode=True, names=[u'ngettext'])
+if sys.version_info.major >= 3:
+ gettext.install(u'duplicity', names=[u'ngettext'])
+else:
+ gettext.install(u'duplicity', names=[u'ngettext'])
=== modified file 'duplicity/_librsyncmodule.c'
--- duplicity/_librsyncmodule.c 2017-07-11 14:55:38 +0000
+++ duplicity/_librsyncmodule.c 2018-11-29 19:03:43 +0000
@@ -89,7 +89,7 @@
rs_buffers_t buf;
rs_result result;
- if (!PyArg_ParseTuple(args, "s#:cycle", &inbuf, &inbuf_length))
+ if (!PyArg_ParseTuple(args, "y#:cycle", &inbuf, &inbuf_length))
return NULL;
buf.next_in = inbuf;
@@ -105,7 +105,7 @@
return NULL;
}
- return Py_BuildValue("(ils#)", (result == RS_DONE),
+ return Py_BuildValue("(ily#)", (result == RS_DONE),
(long)inbuf_length - (long)buf.avail_in,
outbuf, RS_JOB_BLOCKSIZE - (long)buf.avail_out);
}
@@ -169,7 +169,7 @@
rs_buffers_t buf;
rs_result result;
- if (!PyArg_ParseTuple(args,"s#:new_deltamaker", &sig_string, &sig_length))
+ if (!PyArg_ParseTuple(args,"y#:new_deltamaker", &sig_string, &sig_length))
return NULL;
dm = PyObject_New(_librsync_DeltaMakerObject, &_librsync_DeltaMakerType);
@@ -222,7 +222,7 @@
rs_buffers_t buf;
rs_result result;
- if (!PyArg_ParseTuple(args, "s#:cycle", &inbuf, &inbuf_length))
+ if (!PyArg_ParseTuple(args, "y#:cycle", &inbuf, &inbuf_length))
return NULL;
buf.next_in = inbuf;
@@ -237,7 +237,7 @@
return NULL;
}
- return Py_BuildValue("(ils#)", (result == RS_DONE),
+ return Py_BuildValue("(ily#)", (result == RS_DONE),
(long)inbuf_length - (long)buf.avail_in,
outbuf, RS_JOB_BLOCKSIZE - (long)buf.avail_out);
}
@@ -351,7 +351,7 @@
rs_buffers_t buf;
rs_result result;
- if (!PyArg_ParseTuple(args, "s#:cycle", &inbuf, &inbuf_length))
+ if (!PyArg_ParseTuple(args, "y#:cycle", &inbuf, &inbuf_length))
return NULL;
buf.next_in = inbuf;
@@ -366,7 +366,7 @@
return NULL;
}
- return Py_BuildValue("(ils#)", (result == RS_DONE),
+ return Py_BuildValue("(ily#)", (result == RS_DONE),
(long)inbuf_length - (long)buf.avail_in,
outbuf, RS_JOB_BLOCKSIZE - (long)buf.avail_out);
}
=== modified file 'duplicity/asyncscheduler.py'
--- duplicity/asyncscheduler.py 2018-09-11 21:35:37 +0000
+++ duplicity/asyncscheduler.py 2018-11-29 19:03:43 +0000
@@ -25,6 +25,10 @@
dependency guarantees.
"""
+from future import standard_library
+standard_library.install_aliases()
+from builtins import object
+import gettext
import duplicity
from duplicity import log
from duplicity.dup_threading import require_threading
@@ -36,7 +40,7 @@
threading = duplicity.dup_threading.threading_module()
-class AsyncScheduler:
+class AsyncScheduler(object):
u"""
Easy-to-use scheduler of function calls to be executed
concurrently. A very simple dependency mechanism exists in the
=== modified file 'duplicity/backend.py'
--- duplicity/backend.py 2018-10-11 21:54:47 +0000
+++ duplicity/backend.py 2018-11-29 19:03:43 +0000
@@ -24,6 +24,11 @@
intended to be used by the backends themselves.
"""
+from future import standard_library
+standard_library.install_aliases()
+from builtins import str
+from builtins import range
+from builtins import object
import errno
import os
import sys
@@ -34,8 +39,9 @@
import gettext
import re
import types
-import urllib
-import urlparse
+import urllib.request # pylint: disable=import-error
+import urllib.parse # pylint: disable=import-error
+import urllib.error # pylint: disable=import-error
from duplicity import dup_temp
from duplicity import file_naming
@@ -56,7 +62,6 @@
import duplicity.backends
-
# todo: this should really NOT be done here
socket.setdefaulttimeout(globals.timeout)
@@ -226,7 +231,7 @@
return obj
-class ParsedUrl:
+class ParsedUrl(object):
u"""
Parse the given URL as a duplicity backend URL.
@@ -242,7 +247,7 @@
# Python < 2.6.5 still examine urlparse.uses_netlock when parsing urls,
# so stuff our custom list in there before we parse.
- urlparse.uses_netloc = uses_netloc
+ urllib.parse.uses_netloc = uses_netloc
# While useful in some cases, the fact is that the urlparser makes
# all the properties in the URL deferred or lazy. This means that
@@ -250,7 +255,7 @@
# problems here, so they will be caught early.
try:
- pu = urlparse.urlparse(url_string)
+ pu = urllib.parse.urlparse(url_string)
except Exception:
raise InvalidBackendURL(u"Syntax error in: %s" % url_string)
@@ -267,7 +272,7 @@
try:
self.path = pu.path
if self.path:
- self.path = urllib.unquote(self.path)
+ self.path = urllib.parse.unquote(self.path)
except Exception:
raise InvalidBackendURL(u"Syntax error (path) in: %s" % url_string)
@@ -276,7 +281,7 @@
except Exception:
raise InvalidBackendURL(u"Syntax error (username) in: %s" % url_string)
if self.username:
- self.username = urllib.unquote(pu.username)
+ self.username = urllib.parse.unquote(pu.username)
else:
self.username = None
@@ -285,7 +290,7 @@
except Exception:
raise InvalidBackendURL(u"Syntax error (password) in: %s" % url_string)
if self.password:
- self.password = urllib.unquote(self.password)
+ self.password = urllib.parse.unquote(self.password)
else:
self.password = None
@@ -386,7 +391,7 @@
return util.escape(f.uc_name)
else:
return util.escape(f)
- extra = u' '.join([operation] + [make_filename(x) for x in args if x])
+ extra = u' '.join([operation] + [make_filename(x) for x in args if (x and isinstance(x, str))])
log.FatalError(_(u"Giving up after %s attempts. %s: %s")
% (n, e.__class__.__name__,
util.uexc(e)), code=code, extra=extra)
@@ -496,7 +501,7 @@
return 0, u'', u''
except (KeyError, ValueError):
raise BackendException(u"Error running '%s': returned %d, with output:\n%s" %
- (logstr, result, stdout + u'\n' + stderr))
+ (logstr, result, stdout.decode() + u'\n' + stderr.decode()))
return result, stdout, stderr
@@ -564,7 +569,7 @@
"""
def tobytes(filename):
u"Convert a (maybe unicode) filename to bytes"
- if isinstance(filename, unicode):
+ if isinstance(filename, str):
# There shouldn't be any encoding errors for files we care
# about, since duplicity filenames are ascii. But user files
# may be in the same directory. So just replace characters.
=== modified file 'duplicity/backends/_boto_multi.py'
--- duplicity/backends/_boto_multi.py 2018-07-23 14:55:39 +0000
+++ duplicity/backends/_boto_multi.py 2018-11-29 19:03:43 +0000
@@ -20,10 +20,15 @@
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+from __future__ import division
+from future import standard_library
+standard_library.install_aliases()
+from builtins import range
+from past.utils import old_div
import os
import sys
import threading
-import Queue
+import queue
import time
import traceback
@@ -63,7 +68,7 @@
try:
args = self.queue.get(True, 1)
progress.report_transfer(args[0], args[1])
- except Queue.Empty as e:
+ except queue.Empty as e:
pass
@@ -122,7 +127,7 @@
if bytes < chunk_size:
chunks = 1
else:
- chunks = bytes / chunk_size
+ chunks = old_div(bytes, chunk_size)
if (bytes % chunk_size):
chunks += 1
@@ -211,7 +216,8 @@
log.Debug((u"{name}: Uploaded chunk {chunk}"
u"at roughly {speed} bytes/second").format(name=worker_name,
chunk=offset + 1,
- speed=(bytes / max(1, abs(end - start)))))
+ speed=(old_div(bytes, max(1,
+ abs(end - start))))))
break
conn.close()
conn = None
=== modified file 'duplicity/backends/_boto_single.py'
--- duplicity/backends/_boto_single.py 2018-07-23 14:55:39 +0000
+++ duplicity/backends/_boto_single.py 2018-11-29 19:03:43 +0000
@@ -19,6 +19,9 @@
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+from __future__ import division
+from builtins import str
+from past.utils import old_div
import os
import time
@@ -242,7 +245,7 @@
self.upload(source_path.name, key, headers)
upload_end = time.time()
total_s = abs(upload_end - upload_start) or 1 # prevent a zero value!
- rough_upload_speed = os.path.getsize(source_path.name) / total_s
+ rough_upload_speed = old_div(os.path.getsize(source_path.name), total_s)
log.Debug(u"Uploaded %s/%s to %s Storage at roughly %f bytes/second" %
(self.straight_url, remote_filename, storage_class,
rough_upload_speed))
=== modified file 'duplicity/backends/_cf_cloudfiles.py'
--- duplicity/backends/_cf_cloudfiles.py 2018-07-23 14:55:39 +0000
+++ duplicity/backends/_cf_cloudfiles.py 2018-11-29 19:03:43 +0000
@@ -18,6 +18,7 @@
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+from builtins import str
import os
import duplicity.backend
=== modified file 'duplicity/backends/_cf_pyrax.py'
--- duplicity/backends/_cf_pyrax.py 2018-07-23 14:55:39 +0000
+++ duplicity/backends/_cf_pyrax.py 2018-11-29 19:03:43 +0000
@@ -18,6 +18,7 @@
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+from builtins import str
import os
import duplicity.backend
=== modified file 'duplicity/backends/adbackend.py'
--- duplicity/backends/adbackend.py 2018-10-11 21:54:47 +0000
+++ duplicity/backends/adbackend.py 2018-11-29 19:03:43 +0000
@@ -1,4 +1,3 @@
-from __future__ import print_function
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright 2016 Stefan Breunig <stefan-duplicity@xxxxxxxxxxx>
@@ -20,6 +19,10 @@
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+from __future__ import print_function
+from __future__ import division
+from builtins import input
+from past.utils import old_div
import os.path
import json
import sys
@@ -147,7 +150,7 @@
print(authorization_url)
print(u'')
- redirected_to = (raw_input(u'URL of the resulting page: ')
+ redirected_to = (input(u'URL of the resulting page: ')
.replace(u'http://', u'https://', 1)).strip()
token = self.http_client.fetch_token(
@@ -319,7 +322,7 @@
u'waiting for %d seconds to see if Amazon Drive finished the '
u'upload anyway' % (remote_filename, response.status_code,
globals.timeout))
- tries = globals.timeout / 15
+ tries = old_div(globals.timeout, 15)
while tries >= 0:
tries -= 1
time.sleep(15)
@@ -390,7 +393,7 @@
self.names_to_ids = {f[u'name']: f[u'id'] for f in files}
- return self.names_to_ids.keys()
+ return list(self.names_to_ids.keys())
def _delete(self, remote_filename):
u"""Delete file from Amazon Drive"""
=== modified file 'duplicity/backends/azurebackend.py'
--- duplicity/backends/azurebackend.py 2018-07-23 14:55:39 +0000
+++ duplicity/backends/azurebackend.py 2018-11-29 19:03:43 +0000
@@ -19,6 +19,7 @@
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+from builtins import str
import os
import duplicity.backend
@@ -110,7 +111,7 @@
pass
except Exception as e:
log.FatalError(u"Could not create Azure container: %s"
- % unicode(e.message).split(u'\n', 1)[0],
+ % str(e.message).split(u'\n', 1)[0],
log.ErrorCode.connection_failed)
def _put(self, source_path, remote_filename):
=== modified file 'duplicity/backends/b2backend.py'
--- duplicity/backends/b2backend.py 2018-07-23 14:55:39 +0000
+++ duplicity/backends/b2backend.py 2018-11-29 19:03:43 +0000
@@ -22,9 +22,12 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
+from future import standard_library
+standard_library.install_aliases()
+from builtins import object
import os
import hashlib
-from urllib import quote_plus
+from urllib.parse import quote_plus # pylint: disable=import-error
import duplicity.backend
from duplicity.errors import BackendException, FatalBackendException
@@ -32,7 +35,7 @@
from duplicity import progress
-class B2ProgressListener:
+class B2ProgressListener(object):
def set_total_bytes(self, total_byte_count):
self.total_byte_count = total_byte_count
=== modified file 'duplicity/backends/dpbxbackend.py'
--- duplicity/backends/dpbxbackend.py 2018-10-11 21:54:47 +0000
+++ duplicity/backends/dpbxbackend.py 2018-11-29 19:03:43 +0000
@@ -1,4 +1,3 @@
-from __future__ import print_function
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright 2013 jno <jno@xxxxxxxxx>
@@ -26,13 +25,22 @@
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-import StringIO
+from __future__ import print_function
+from __future__ import division
+from future import standard_library
+standard_library.install_aliases()
+from builtins import input
+from builtins import str
+from past.utils import old_div
+import io
import os
import re
import sys
import time
import traceback
-import urllib
+import urllib.request # pylint: disable=import-error
+import urllib.parse # pylint: disable=import-error
+import urllib.error # pylint: disable=import-error
from duplicity import log, globals
from duplicity import progress
@@ -41,7 +49,6 @@
from requests.exceptions import ConnectionError
import duplicity.backend
-
# This is chunk size for upload using Dpbx chumked API v2. It doesn't
# make sense to make it much large since Dpbx SDK uses connection pool
# internally. So multiple chunks will sent using same keep-alive socket
@@ -57,7 +64,7 @@
def log_exception(e):
log.Error(u'Exception [%s]:' % (e,))
- f = StringIO.StringIO()
+ f = io.StringIO()
traceback.print_exc(file=f)
f.seek(0)
for s in f.readlines():
@@ -155,7 +162,7 @@
print(u"2. Click \"Allow\" (you might have to log in first).")
print(u"3. Copy the authorization code.")
print(u'-' * 72)
- auth_code = raw_input(u"Enter the authorization code here: ").strip()
+ auth_code = input(u"Enter the authorization code here: ").strip()
try:
log.Debug(u'dpbx,auth_flow.finish(%s)' % auth_code)
authresult = auth_flow.finish(auth_code)
@@ -202,7 +209,7 @@
@command()
def _put(self, source_path, remote_filename):
- remote_dir = urllib.unquote(self.parsed_url.path.lstrip(u'/'))
+ remote_dir = urllib.parse.unquote(self.parsed_url.path.lstrip(u'/'))
remote_path = u'/' + os.path.join(remote_dir, remote_filename).rstrip()
file_size = os.path.getsize(source_path.name)
@@ -341,7 +348,7 @@
# reupload
log.Info(u'dpbx: sleeping a bit before chunk retry')
time.sleep(30)
- current_chunk_size = DPBX_UPLOAD_CHUNK_SIZE / 5
+ current_chunk_size = old_div(DPBX_UPLOAD_CHUNK_SIZE, 5)
requested_offset = None
continue
@@ -361,7 +368,7 @@
if not self.user_authenticated():
self.login()
- remote_dir = urllib.unquote(self.parsed_url.path.lstrip(u'/'))
+ remote_dir = urllib.parse.unquote(self.parsed_url.path.lstrip(u'/'))
remote_path = u'/' + os.path.join(remote_dir, remote_filename).rstrip()
log.Debug(u'dpbx,files_download(%s)' % remote_path)
@@ -396,7 +403,7 @@
# Do a long listing to avoid connection reset
if not self.user_authenticated():
self.login()
- remote_dir = u'/' + urllib.unquote(self.parsed_url.path.lstrip(u'/')).rstrip()
+ remote_dir = u'/' + urllib.parse.unquote(self.parsed_url.path.lstrip(u'/')).rstrip()
log.Debug(u'dpbx.files_list_folder(%s)' % remote_dir)
res = []
@@ -426,7 +433,7 @@
if not self.user_authenticated():
self.login()
- remote_dir = urllib.unquote(self.parsed_url.path.lstrip(u'/'))
+ remote_dir = urllib.parse.unquote(self.parsed_url.path.lstrip(u'/'))
remote_path = u'/' + os.path.join(remote_dir, filename).rstrip()
log.Debug(u'dpbx.files_delete(%s)' % remote_path)
@@ -445,7 +452,7 @@
def _query(self, filename):
if not self.user_authenticated():
self.login()
- remote_dir = urllib.unquote(self.parsed_url.path.lstrip(u'/'))
+ remote_dir = urllib.parse.unquote(self.parsed_url.path.lstrip(u'/'))
remote_path = u'/' + os.path.join(remote_dir, filename).rstrip()
log.Debug(u'dpbx.files_get_metadata(%s)' % remote_path)
=== modified file 'duplicity/backends/gdocsbackend.py'
--- duplicity/backends/gdocsbackend.py 2018-10-11 21:54:47 +0000
+++ duplicity/backends/gdocsbackend.py 2018-11-29 19:03:43 +0000
@@ -1,4 +1,3 @@
-from __future__ import print_function
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright 2011 Carlos Abalde <carlos.abalde@xxxxxxxxx>
@@ -19,9 +18,16 @@
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+from __future__ import print_function
+from future import standard_library
+standard_library.install_aliases()
+from builtins import input
+from builtins import str
import os.path
import string
-import urllib
+import urllib.request # pylint: disable=import-error
+import urllib.parse # pylint: disable=import-error
+import urllib.error # pylint: disable=import-error
import duplicity.backend
from duplicity.errors import BackendException
@@ -138,7 +144,7 @@
print(u'A captcha challenge in required. Please visit ' + challenge.captcha_url)
answer = None
while not answer:
- answer = raw_input(u'Answer to the challenge? ')
+ answer = eval(input(u'Answer to the challenge? '))
self._authorize(email, password, challenge.captcha_token, answer)
except gdata.client.BadAuthentication:
raise BackendException(
@@ -158,7 +164,7 @@
else:
uri += u'?showfolders=true'
if title:
- uri += u'&title=' + urllib.quote(title) + u'&title-exact=true'
+ uri += u'&title=' + urllib.parse.quote(title) + u'&title-exact=true'
# Fetch entries.
entries = self.client.get_all_resources(uri=uri)
=== modified file 'duplicity/backends/hsibackend.py'
--- duplicity/backends/hsibackend.py 2018-07-23 14:55:39 +0000
+++ duplicity/backends/hsibackend.py 2018-11-29 19:03:43 +0000
@@ -19,8 +19,10 @@
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+from builtins import range
import os
import duplicity.backend
+from duplicity import util
hsi_command = u"hsi"
@@ -36,24 +38,30 @@
self.remote_prefix = u""
def _put(self, source_path, remote_filename):
- commandline = u'%s "put %s : %s%s"' % (hsi_command, source_path.name, self.remote_prefix, remote_filename)
+ if isinstance(remote_filename, b"".__class__):
+ remote_filename = util.fsdecode(remote_filename)
+ commandline = u'%s "put %s : %s%s"' % (hsi_command, source_path.uc_name, self.remote_prefix, remote_filename)
self.subprocess_popen(commandline)
def _get(self, remote_filename, local_path):
- commandline = u'%s "get %s : %s%s"' % (hsi_command, local_path.name, self.remote_prefix, remote_filename)
+ if isinstance(remote_filename, b"".__class__):
+ remote_filename = util.fsdecode(remote_filename)
+ commandline = u'%s "get %s : %s%s"' % (hsi_command, local_path.uc_name, self.remote_prefix, remote_filename)
self.subprocess_popen(commandline)
def _list(self):
import sys
commandline = u'%s "ls -l %s"' % (hsi_command, self.remote_dir)
l = self.subprocess_popen(commandline)[2]
- l = l.split(os.linesep)[3:]
+ l = l.split(os.linesep.encode())[3:]
for i in range(0, len(l)):
if l[i]:
l[i] = l[i].split()[-1]
return [x for x in l if x]
def _delete(self, filename):
+ if isinstance(filename, b"".__class__):
+ filename = util.fsdecode(filename)
commandline = u'%s "rm %s%s"' % (hsi_command, self.remote_prefix, filename)
self.subprocess_popen(commandline)
=== modified file 'duplicity/backends/hubicbackend.py'
--- duplicity/backends/hubicbackend.py 2018-07-23 14:55:39 +0000
+++ duplicity/backends/hubicbackend.py 2018-11-29 19:03:43 +0000
@@ -18,6 +18,7 @@
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+from builtins import str
import os
from duplicity import log
=== modified file 'duplicity/backends/imapbackend.py'
--- duplicity/backends/imapbackend.py 2018-07-23 14:55:39 +0000
+++ duplicity/backends/imapbackend.py 2018-11-29 19:03:43 +0000
@@ -20,15 +20,23 @@
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+import sys
+from future import standard_library
+standard_library.install_aliases()
+from builtins import input
import imaplib
import re
import os
import time
import socket
-import StringIO
-import rfc822
+import io
import getpass
import email
+from email.parser import Parser
+try:
+ from email.policy import default # pylint: disable=import-error
+except:
+ pass
import duplicity.backend
from duplicity import globals
@@ -48,7 +56,7 @@
# Set the username
if (parsed_url.username is None):
- username = raw_input(u'Enter account userid: ')
+ username = eval(input(u'Enter account userid: '))
else:
username = parsed_url.username
@@ -218,10 +226,12 @@
for msg in list:
if (len(msg) == 1):
continue
- io = StringIO.StringIO(msg[1]) # pylint: disable=unsubscriptable-object
- m = rfc822.Message(io)
- subj = m.getheader(u"subject")
- header_from = m.getheader(u"from")
+ if sys.version_info.major >= 3:
+ headers = Parser(policy=default).parsestr(msg[1]) # pylint: disable=unsubscriptable-object
+ else:
+ headers = Parser().parsestr(msg[1]) # pylint: disable=unsubscriptable-object
+ subj = headers[u"subject"]
+ header_from = headers[u"from"]
# Catch messages with empty headers which cause an exception.
if (not (header_from is None)):
=== modified file 'duplicity/backends/lftpbackend.py'
--- duplicity/backends/lftpbackend.py 2018-07-23 14:55:39 +0000
+++ duplicity/backends/lftpbackend.py 2018-11-29 19:03:43 +0000
@@ -24,10 +24,14 @@
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+from future import standard_library
+standard_library.install_aliases()
import os
import os.path
import re
-import urllib
+import urllib.request # pylint: disable=import-error
+import urllib.parse # pylint: disable=import-error
+import urllib.error # pylint: disable=import-error
try:
from shlex import quote as cmd_quote
except ImportError:
@@ -37,6 +41,7 @@
from duplicity import globals
from duplicity import log
from duplicity import tempdir
+from duplicity import util
class LFTPBackend(duplicity.backend.Backend):
@@ -104,34 +109,35 @@
break
# save config into a reusable temp file
- self.tempfile, self.tempname = tempdir.default().mkstemp()
- os.write(self.tempfile, u"set ssl:verify-certificate " +
- (u"false" if globals.ssl_no_check_certificate else u"true") + u"\n")
+ self.tempfd, self.tempname = tempdir.default().mkstemp()
+ self.tempfile = os.fdopen(self.tempfd, u"w")
+ self.tempfile.write(u"set ssl:verify-certificate " +
+ (u"false" if globals.ssl_no_check_certificate else u"true") + u"\n")
if self.cacert_file:
- os.write(self.tempfile, u"set ssl:ca-file " + cmd_quote(self.cacert_file) + u"\n")
+ self.tempfile.write(u"set ssl:ca-file " + cmd_quote(self.cacert_file) + u"\n")
if globals.ssl_cacert_path:
- os.write(self.tempfile, u"set ssl:ca-path " + cmd_quote(globals.ssl_cacert_path) + u"\n")
+ self.tempfile.write(u"set ssl:ca-path " + cmd_quote(globals.ssl_cacert_path) + u"\n")
if self.parsed_url.scheme == u'ftps':
- os.write(self.tempfile, u"set ftp:ssl-allow true\n")
- os.write(self.tempfile, u"set ftp:ssl-protect-data true\n")
- os.write(self.tempfile, u"set ftp:ssl-protect-list true\n")
+ self.tempfile.write(u"set ftp:ssl-allow true\n")
+ self.tempfile.write(u"set ftp:ssl-protect-data true\n")
+ self.tempfile.write(u"set ftp:ssl-protect-list true\n")
elif self.parsed_url.scheme == u'ftpes':
- os.write(self.tempfile, u"set ftp:ssl-force on\n")
- os.write(self.tempfile, u"set ftp:ssl-protect-data on\n")
- os.write(self.tempfile, u"set ftp:ssl-protect-list on\n")
+ self.tempfile.write(u"set ftp:ssl-force on\n")
+ self.tempfile.write(u"set ftp:ssl-protect-data on\n")
+ self.tempfile.write(u"set ftp:ssl-protect-list on\n")
else:
- os.write(self.tempfile, u"set ftp:ssl-allow false\n")
- os.write(self.tempfile, u"set http:use-propfind true\n")
- os.write(self.tempfile, u"set net:timeout %s\n" % globals.timeout)
- os.write(self.tempfile, u"set net:max-retries %s\n" % globals.num_retries)
- os.write(self.tempfile, u"set ftp:passive-mode %s\n" % self.conn_opt)
+ self.tempfile.write(u"set ftp:ssl-allow false\n")
+ self.tempfile.write(u"set http:use-propfind true\n")
+ self.tempfile.write(u"set net:timeout %s\n" % globals.timeout)
+ self.tempfile.write(u"set net:max-retries %s\n" % globals.num_retries)
+ self.tempfile.write(u"set ftp:passive-mode %s\n" % self.conn_opt)
if log.getverbosity() >= log.DEBUG:
- os.write(self.tempfile, u"debug\n")
+ self.tempfd.write(u"debug\n")
if self.parsed_url.scheme == u'ftpes':
- os.write(self.tempfile, u"open %s %s\n" % (self.authflag, self.url_string.replace(u'ftpes', u'ftp')))
+ self.tempfile.write(u"open %s %s\n" % (self.authflag, self.url_string.replace(u'ftpes', u'ftp')))
else:
- os.write(self.tempfile, u"open %s %s\n" % (self.authflag, self.url_string))
- os.close(self.tempfile)
+ self.tempfile.write(u"open %s %s\n" % (self.authflag, self.url_string))
+ os.close(self.tempfd)
# print settings in debug mode
if log.getverbosity() >= log.DEBUG:
f = open(self.tempname, u'r')
@@ -139,11 +145,13 @@
u"%s" % f.read())
def _put(self, source_path, remote_filename):
+ if isinstance(remote_filename, b"".__class__):
+ remote_filename = util.fsdecode(remote_filename)
commandline = u"lftp -c \"source %s; mkdir -p %s; put %s -o %s\"" % (
self.tempname,
cmd_quote(self.remote_path),
- cmd_quote(source_path.name),
- cmd_quote(self.remote_path) + remote_filename
+ cmd_quote(source_path.uc_name),
+ cmd_quote(self.remote_path) + util.fsdecode(remote_filename)
)
log.Debug(u"CMD: %s" % commandline)
s, l, e = self.subprocess_popen(commandline)
@@ -154,10 +162,12 @@
u"%s" % (l))
def _get(self, remote_filename, local_path):
+ if isinstance(remote_filename, b"".__class__):
+ remote_filename = util.fsdecode(remote_filename)
commandline = u"lftp -c \"source %s; get %s -o %s\"" % (
cmd_quote(self.tempname),
cmd_quote(self.remote_path) + remote_filename,
- cmd_quote(local_path.name)
+ cmd_quote(local_path.uc_name)
)
log.Debug(u"CMD: %s" % commandline)
_, l, e = self.subprocess_popen(commandline)
@@ -169,7 +179,7 @@
def _list(self):
# Do a long listing to avoid connection reset
# remote_dir = urllib.unquote(self.parsed_url.path.lstrip('/')).rstrip()
- remote_dir = urllib.unquote(self.parsed_url.path)
+ remote_dir = urllib.parse.unquote(self.parsed_url.path)
# print remote_dir
quoted_path = cmd_quote(self.remote_path)
# failing to cd into the folder might be because it was not created already
@@ -185,13 +195,13 @@
u"%s" % (l))
# Look for our files as the last element of a long list line
- return [x.split()[-1] for x in l.split(u'\n') if x]
+ return [x.split()[-1] for x in l.split(b'\n') if x]
def _delete(self, filename):
commandline = u"lftp -c \"source %s; cd %s; rm %s\"" % (
cmd_quote(self.tempname),
cmd_quote(self.remote_path),
- cmd_quote(filename)
+ cmd_quote(util.fsdecode(filename))
)
log.Debug(u"CMD: %s" % commandline)
_, l, e = self.subprocess_popen(commandline)
=== modified file 'duplicity/backends/mediafirebackend.py'
--- duplicity/backends/mediafirebackend.py 2018-07-23 14:55:39 +0000
+++ duplicity/backends/mediafirebackend.py 2018-11-29 19:03:43 +0000
@@ -20,6 +20,7 @@
u"""MediaFire Duplicity Backend"""
+from builtins import str
import os
import duplicity.backend
=== modified file 'duplicity/backends/megabackend.py'
--- duplicity/backends/megabackend.py 2018-10-11 21:54:47 +0000
+++ duplicity/backends/megabackend.py 2018-11-29 19:03:43 +0000
@@ -1,4 +1,3 @@
-from __future__ import print_function
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright 2017 Tomas Vondra (Launchpad id: tomas-v)
@@ -20,6 +19,7 @@
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+from __future__ import print_function
import duplicity.backend
from duplicity import log
from duplicity.errors import BackendException
@@ -80,7 +80,7 @@
def _makedir_recursive(self, path):
u'creates a remote directory (recursively the whole path), ingores errors'
- print (u"mkdir: %s" % (u'/'.join(path),))
+ print(u"mkdir: %s" % (u'/'.join(path),))
p = self._root
@@ -119,7 +119,7 @@
def folder_contents(self, files_only=False):
u'lists contents of a folder, optionally ignoring subdirectories'
- print (u"megals: %s" % (self._folder,))
+ print(u"megals: %s" % (self._folder,))
if self._megarc:
cmd = [u'megals', u'--config', self._megarc, self._folder]
@@ -140,7 +140,7 @@
def download(self, remote_file, local_file):
- print (u"megaget: %s" % (remote_file,))
+ print(u"megaget: %s" % (remote_file,))
if self._megarc:
cmd = [u'megaget', u'--config', self._megarc, u'--no-progress',
@@ -153,7 +153,7 @@
def upload(self, local_file, remote_file):
- print (u"megaput: %s" % (remote_file,))
+ print(u"megaput: %s" % (remote_file,))
if self._megarc:
cmd = [u'megaput', u'--config', self._megarc, u'--no-progress',
@@ -166,7 +166,7 @@
def delete(self, remote_file):
- print (u"megarm: %s" % (remote_file,))
+ print(u"megarm: %s" % (remote_file,))
if self._megarc:
cmd = [u'megarm', u'--config', self._megarc, self._folder + u'/' + remote_file]
=== modified file 'duplicity/backends/multibackend.py'
--- duplicity/backends/multibackend.py 2018-07-23 14:55:39 +0000
+++ duplicity/backends/multibackend.py 2018-11-29 19:03:43 +0000
@@ -23,11 +23,14 @@
#
+from future import standard_library
+standard_library.install_aliases()
import os
import os.path
import string
-import urllib
-import urlparse
+import urllib.request # pylint: disable=import-error
+import urllib.parse # pylint: disable=import-error
+import urllib.error # pylint: disable=import-error
import json
import duplicity.backend
@@ -77,11 +80,11 @@
@staticmethod
def get_query_params(parsed_url):
# Reparse so the query string is available
- reparsed_url = urlparse.urlparse(parsed_url.geturl())
+ reparsed_url = urllib.parse.urlparse(parsed_url.geturl())
if len(reparsed_url.query) == 0:
return dict()
try:
- queryMultiDict = urlparse.parse_qs(reparsed_url.query, strict_parsing=True)
+ queryMultiDict = urllib.parse.parse_qs(reparsed_url.query, strict_parsing=True)
except ValueError as e:
log.Log(_(u"MultiBackend: Could not parse query string %s: %s ")
% (reparsed_url.query, e),
@@ -90,7 +93,7 @@
queryDict = dict()
# Convert the multi-dict to a single dictionary
# while checking to make sure that no unrecognized values are found
- for name, valueList in queryMultiDict.items():
+ for name, valueList in list(queryMultiDict.items()):
if len(valueList) != 1:
log.Log(_(u"MultiBackend: Invalid query string %s: more than one value for %s")
% (reparsed_url.query, name),
@@ -205,7 +208,7 @@
def _eligible_stores(self, filename):
if self.__affinities:
- matching_prefixes = [k for k in self.__affinities.keys() if filename.startswith(k)]
+ matching_prefixes = [k for k in list(self.__affinities.keys()) if filename.startswith(k)]
matching_stores = {store for prefix in matching_prefixes for store in self.__affinities[prefix]}
if matching_stores:
# Distinct stores with matching prefix
=== modified file 'duplicity/backends/ncftpbackend.py'
--- duplicity/backends/ncftpbackend.py 2018-07-23 14:55:39 +0000
+++ duplicity/backends/ncftpbackend.py 2018-11-29 19:03:43 +0000
@@ -19,8 +19,12 @@
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+from future import standard_library
+standard_library.install_aliases()
import os.path
-import urllib
+import urllib.request # pylint: disable=import-error
+import urllib.parse # pylint: disable=import-error
+import urllib.error # pylint: disable=import-error
import re
import duplicity.backend
@@ -93,13 +97,15 @@
self.flags += u" -P '%s'" % (parsed_url.port)
def _put(self, source_path, remote_filename):
- remote_path = os.path.join(urllib.unquote(re.sub(u'^/', u'', self.parsed_url.path)), remote_filename).rstrip()
+ remote_path = os.path.join(urllib.parse.unquote(re.sub(u'^/', u'', self.parsed_url.path)),
+ remote_filename).rstrip()
commandline = u"ncftpput %s -m -V -C '%s' '%s'" % \
(self.flags, source_path.name, remote_path)
self.subprocess_popen(commandline)
def _get(self, remote_filename, local_path):
- remote_path = os.path.join(urllib.unquote(re.sub(u'^/', u'', self.parsed_url.path)), remote_filename).rstrip()
+ remote_path = os.path.join(urllib.parse.unquote(re.sub(u'^/', u'', self.parsed_url.path)),
+ remote_filename).rstrip()
commandline = u"ncftpget %s -V -C '%s' '%s' '%s'" % \
(self.flags, self.parsed_url.hostname, remote_path.lstrip(u'/'), local_path.name)
self.subprocess_popen(commandline)
=== modified file 'duplicity/backends/onedrivebackend.py'
--- duplicity/backends/onedrivebackend.py 2018-10-11 21:54:47 +0000
+++ duplicity/backends/onedrivebackend.py 2018-11-29 19:03:43 +0000
@@ -1,4 +1,3 @@
-from __future__ import print_function
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
# vim:tabstop=4:shiftwidth=4:expandtab
#
@@ -22,6 +21,9 @@
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+from __future__ import print_function
+from builtins import input
+from builtins import str
import time
import json
import os
@@ -150,7 +152,7 @@
u'page the dialog leads to.' % authorization_url)
print()
- redirected_to = raw_input(u'URL of the blank page: ').strip()
+ redirected_to = input(u'URL of the blank page: ').strip()
token = self.http_client.fetch_token(
self.OAUTH_TOKEN_URI,
@@ -198,7 +200,7 @@
u'Could not resolve/create directory "%s" on '
u'OneDrive: %s not in %s (files of folder %s)' % (
self.directory, component,
- names_to_ids.keys(), object_id)))
+ list(names_to_ids.keys()), object_id)))
break
object_id = names_to_ids[component]
self.directory_id = object_id
=== modified file 'duplicity/backends/par2backend.py'
--- duplicity/backends/par2backend.py 2018-07-23 14:55:39 +0000
+++ duplicity/backends/par2backend.py 2018-11-29 19:03:43 +0000
@@ -16,7 +16,8 @@
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-from future_builtins import filter
+from builtins import filter
+from future.builtins import filter
import os
import re
@@ -24,6 +25,7 @@
from duplicity.errors import BackendException
from duplicity import log
from duplicity import globals
+from duplicity import util
class Par2Backend(backend.Backend):
@@ -77,12 +79,13 @@
source_symlink = par2temp.append(remote_filename)
source_target = source_path.get_canonical()
if not os.path.isabs(source_target):
- source_target = os.path.join(os.getcwd(), source_target)
+ source_target = os.path.join(util.fsencode(os.getcwd()), source_target)
os.symlink(source_target, source_symlink.get_canonical())
source_symlink.setdata()
log.Info(u"Create Par2 recovery files")
- par2create = u'par2 c -r%d -n1 %s %s' % (self.redundancy, self.common_options, source_symlink.get_canonical())
+ par2create = u'par2 c -r%d -n1 %s %s' % (self.redundancy, self.common_options,
+ util.fsdecode(source_symlink.get_canonical()))
out, returncode = pexpect.run(par2create, None, True)
source_symlink.delete()
@@ -111,6 +114,7 @@
If "par2 verify" detect an error transfer the Par2-volumes into the
temp-dir and try to repair.
"""
+
par2temp = local_path.get_temp_in_same_dir()
par2temp.mkdir()
local_path_temp = par2temp.append(remote_filename)
@@ -118,7 +122,7 @@
self.wrapped_backend._get(remote_filename, local_path_temp)
try:
- par2file = par2temp.append(remote_filename + u'.par2')
+ par2file = par2temp.append(remote_filename + b'.par2')
self.wrapped_backend._get(par2file.get_filename(), par2file)
par2verify = u'par2 v %s %s %s' % (self.common_options,
@@ -128,8 +132,8 @@
if returncode:
log.Warn(u"File is corrupt. Try to repair %s" % remote_filename)
- par2volumes = filter(re.compile(r'%s\.vol[\d+]*\.par2' % remote_filename).match,
- self.wrapped_backend._list())
+ par2volumes = list(filter(re.compile(b'%s\\.vol[\\d+]*\\.par2' % remote_filename).match,
+ self.wrapped_backend._list()))
for filename in par2volumes:
file = par2temp.append(filename)
@@ -158,7 +162,7 @@
remote_list = self.unfiltered_list()
- c = re.compile(r'%s(?:\.vol[\d+]*)?\.par2' % filename)
+ c = re.compile(b'%s(?:\\.vol[\\d+]*)?\\.par2' % filename)
for remote_filename in remote_list:
if c.match(remote_filename):
self.wrapped_backend._delete(remote_filename)
@@ -169,7 +173,7 @@
remote_list = self.unfiltered_list()
for filename in filename_list[:]:
- c = re.compile(r'%s(?:\.vol[\d+]*)?\.par2' % filename)
+ c = re.compile(b'%s(?:\\.vol[\\d+]*)?\\.par2' % filename)
for remote_filename in remote_list:
if c.match(remote_filename):
# insert here to make sure par2 files will be removed first
@@ -189,7 +193,7 @@
"""
remote_list = self.wrapped_backend._list()
- c = re.compile(r'(?!.*\.par2$)')
+ c = re.compile(b'(?!.*\\.par2$)')
filtered_list = []
for filename in remote_list:
if c.match(filename):
=== modified file 'duplicity/backends/pcabackend.py'
--- duplicity/backends/pcabackend.py 2018-07-23 14:55:39 +0000
+++ duplicity/backends/pcabackend.py 2018-11-29 19:03:43 +0000
@@ -19,6 +19,7 @@
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+from builtins import str
import os
import duplicity.backend
=== modified file 'duplicity/backends/pydrivebackend.py'
--- duplicity/backends/pydrivebackend.py 2018-07-23 14:55:39 +0000
+++ duplicity/backends/pydrivebackend.py 2018-11-29 19:03:43 +0000
@@ -16,6 +16,8 @@
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+from builtins import next
+from builtins import str
import string
import os
@@ -185,7 +187,7 @@
# not yet appear in the listing.
# Note: do not use iterkeys() here, because file_by_name will modify
# the cache if it finds invalid entries.
- for filename in self.id_cache.keys():
+ for filename in list(self.id_cache.keys()):
if (filename not in filenames) and (self.file_by_name(filename) is not None):
filenames.add(filename)
return list(filenames)
=== modified file 'duplicity/backends/pyrax_identity/hubic.py'
--- duplicity/backends/pyrax_identity/hubic.py 2018-10-11 21:54:47 +0000
+++ duplicity/backends/pyrax_identity/hubic.py 2018-11-29 19:03:43 +0000
@@ -4,11 +4,14 @@
# Licensed under the MIT license
from __future__ import print_function
-import ConfigParser
+from future import standard_library
+standard_library.install_aliases()
+from builtins import str
+import configparser
import os
import re
import time
-import urlparse
+import urllib.parse # pylint: disable=import-error
from requests.compat import quote, quote_plus
import requests
@@ -69,8 +72,8 @@
def _parse_error(self, resp):
if u'location' not in resp.headers:
return None
- query = urlparse.urlsplit(resp.headers[u'location']).query
- qs = dict(urlparse.parse_qsl(query))
+ query = urllib.parse.urlsplit(resp.headers[u'location']).query
+ qs = dict(urllib.parse.parse_qsl(query))
return {u'error': qs[u'error'], u'error_description': qs[u'error_description']}
def _get_access_token(self, code):
@@ -96,7 +99,7 @@
oauth_token = r.json()
- config = ConfigParser.ConfigParser()
+ config = configparser.ConfigParser()
config.read(TOKENS_FILE)
if not config.has_section(u"hubic"):
@@ -134,7 +137,7 @@
def _refresh_access_token(self):
- config = ConfigParser.ConfigParser()
+ config = configparser.ConfigParser()
config.read(TOKENS_FILE)
refresh_token = config.get(u"hubic", u"refresh_token")
@@ -190,7 +193,7 @@
raise exc.AuthenticationFailed(u"Unable to get oauth access token from json")
def authenticate(self):
- config = ConfigParser.ConfigParser()
+ config = configparser.ConfigParser()
config.read(TOKENS_FILE)
if config.has_option(u"hubic", u"refresh_token"):
@@ -245,8 +248,8 @@
)
try:
- query = urlparse.urlsplit(r.headers[u'location']).query
- code = dict(urlparse.parse_qsl(query))[u'code']
+ query = urllib.parse.urlsplit(r.headers[u'location']).query
+ code = dict(urllib.parse.parse_qsl(query))[u'code']
except:
raise exc.AuthenticationFailed(u"Unable to authorize client_id, "
u"invalid login/password ?")
=== modified file 'duplicity/backends/rsyncbackend.py'
--- duplicity/backends/rsyncbackend.py 2018-10-11 21:54:47 +0000
+++ duplicity/backends/rsyncbackend.py 2018-11-29 19:03:43 +0000
@@ -1,4 +1,3 @@
-from __future__ import print_function
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright 2002 Ben Escoto <ben@xxxxxxxxxxx>
@@ -20,6 +19,8 @@
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+from __future__ import print_function
+from builtins import map
import os
import re
import tempfile
=== modified file 'duplicity/backends/ssh_paramiko_backend.py'
--- duplicity/backends/ssh_paramiko_backend.py 2018-07-24 16:17:12 +0000
+++ duplicity/backends/ssh_paramiko_backend.py 2018-11-29 19:03:43 +0000
@@ -23,6 +23,11 @@
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+from __future__ import division
+from builtins import oct
+from builtins import input
+from builtins import zip
+from past.utils import old_div
import re
import string
import os
@@ -101,7 +106,7 @@
fingerprint)
while True:
sys.stdout.write(question)
- choice = raw_input().lower()
+ choice = input().lower()
if choice in [u'yes', u'y']:
paramiko.AutoAddPolicy.missing_host_key(self, client,
hostname, key)
@@ -236,7 +241,7 @@
self.config[u'user'],
self.config[u'hostname'],
self.config[u'port'], e))
- self.client.get_transport().set_keepalive((int)(globals.timeout / 2))
+ self.client.get_transport().set_keepalive((int)(old_div(globals.timeout, 2)))
self.scheme = duplicity.backend.strip_prefix(parsed_url.scheme,
u'paramiko')
=== modified file 'duplicity/backends/ssh_pexpect_backend.py'
--- duplicity/backends/ssh_pexpect_backend.py 2018-07-23 14:55:39 +0000
+++ duplicity/backends/ssh_pexpect_backend.py 2018-11-29 19:03:43 +0000
@@ -24,7 +24,12 @@
# have the same syntax. Also these strings will be executed by the
# shell, so shouldn't have strange characters in them.
-from future_builtins import map
+from __future__ import division
+from future import standard_library
+standard_library.install_aliases()
+from builtins import map
+from past.utils import old_div
+from future.builtins import map
import re
import string
@@ -78,7 +83,7 @@
globals.ssh_options = globals.ssh_options + u" -oPort=%s" % parsed_url.port
# set some defaults if user has not specified already.
if u"ServerAliveInterval" not in globals.ssh_options:
- globals.ssh_options += u" -oServerAliveInterval=%d" % ((int)(globals.timeout / 2))
+ globals.ssh_options += u" -oServerAliveInterval=%d" % ((int)(old_div(globals.timeout, 2)))
if u"ServerAliveCountMax" not in globals.ssh_options:
globals.ssh_options += u" -oServerAliveCountMax=2"
=== modified file 'duplicity/backends/swiftbackend.py'
--- duplicity/backends/swiftbackend.py 2018-07-23 14:55:39 +0000
+++ duplicity/backends/swiftbackend.py 2018-11-29 19:03:43 +0000
@@ -18,6 +18,7 @@
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+from builtins import str
import os
import duplicity.backend
=== modified file 'duplicity/backends/tahoebackend.py'
--- duplicity/backends/tahoebackend.py 2018-07-23 14:55:39 +0000
+++ duplicity/backends/tahoebackend.py 2018-11-29 19:03:43 +0000
@@ -20,6 +20,7 @@
import duplicity.backend
from duplicity import log
+from duplicity import util
from duplicity.errors import BackendException
@@ -49,6 +50,8 @@
else:
return u"%s:" % self.alias
+ if isinstance(filename, b"".__class__):
+ filename = util.fsdecode(filename)
if self.directory != u"":
return u"%s:%s/%s" % (self.alias, self.directory, filename)
else:
@@ -60,14 +63,14 @@
return output
def _put(self, source_path, remote_filename):
- self.run(u"tahoe", u"cp", source_path.name, self.get_remote_path(remote_filename))
+ self.run(u"tahoe", u"cp", source_path.uc_name, self.get_remote_path(remote_filename))
def _get(self, remote_filename, local_path):
- self.run(u"tahoe", u"cp", self.get_remote_path(remote_filename), local_path.name)
+ self.run(u"tahoe", u"cp", self.get_remote_path(remote_filename), local_path.uc_name)
def _list(self):
output = self.run(u"tahoe", u"ls", self.get_remote_path())
- return output.split(u'\n') if output else []
+ return output.split(b'\n') if output else []
def _delete(self, filename):
self.run(u"tahoe", u"rm", self.get_remote_path(filename))
=== modified file 'duplicity/backends/webdavbackend.py'
--- duplicity/backends/webdavbackend.py 2018-07-23 14:55:39 +0000
+++ duplicity/backends/webdavbackend.py 2018-11-29 19:03:43 +0000
@@ -21,13 +21,17 @@
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+from future import standard_library
+standard_library.install_aliases()
+from builtins import str
+from builtins import range
import base64
-import httplib
+import http.client
import os
import re
-import urllib
-import urllib2
-import urlparse
+import urllib.request # pylint: disable=import-error
+import urllib.parse # pylint: disable=import-error
+import urllib.error # pylint: disable=import-error
import xml.dom.minidom
import duplicity.backend
@@ -37,7 +41,7 @@
from duplicity.errors import BackendException, FatalBackendException
-class CustomMethodRequest(urllib2.Request):
+class CustomMethodRequest(urllib.request.Request):
u"""
This request subclass allows explicit specification of
the HTTP request method. Basic urllib2.Request class
@@ -45,13 +49,13 @@
"""
def __init__(self, method, *args, **kwargs):
self.method = method
- urllib2.Request.__init__(self, *args, **kwargs)
+ urllib.request.Request.__init__(self, *args, **kwargs)
def get_method(self):
return self.method
-class VerifiedHTTPSConnection(httplib.HTTPSConnection):
+class VerifiedHTTPSConnection(http.client.HTTPSConnection):
def __init__(self, *args, **kwargs):
try:
global socket, ssl
@@ -60,7 +64,7 @@
except ImportError:
raise FatalBackendException(_(u"Missing socket or ssl python modules."))
- httplib.HTTPSConnection.__init__(self, *args, **kwargs)
+ http.client.HTTPSConnection.__init__(self, *args, **kwargs)
self.cacert_file = globals.ssl_cacert_file
self.cacert_candidates = [u"~/.duplicity/cacert.pem",
@@ -117,7 +121,7 @@
def request(self, *args, **kwargs): # pylint: disable=method-hidden
try:
- return httplib.HTTPSConnection.request(self, *args, **kwargs)
+ return http.client.HTTPSConnection.request(self, *args, **kwargs)
except ssl.SSLError as e:
# encapsulate ssl errors
raise BackendException(u"SSL failed: %s" % util.uexc(e),
@@ -185,10 +189,10 @@
self._close()
# http schemes needed for redirect urls from servers
if self.parsed_url.scheme in [u'webdav', u'http']:
- self.conn = httplib.HTTPConnection(self.parsed_url.hostname, self.parsed_url.port)
+ self.conn = http.client.HTTPConnection(self.parsed_url.hostname, self.parsed_url.port)
elif self.parsed_url.scheme in [u'webdavs', u'https']:
if globals.ssl_no_check_certificate:
- self.conn = httplib.HTTPSConnection(self.parsed_url.hostname, self.parsed_url.port)
+ self.conn = http.client.HTTPSConnection(self.parsed_url.hostname, self.parsed_url.port)
else:
self.conn = VerifiedHTTPSConnection(self.parsed_url.hostname, self.parsed_url.port)
else:
@@ -206,7 +210,7 @@
self._close() # or we get previous request's data or exception
self.connect()
- quoted_path = urllib.quote(path, u"/:~")
+ quoted_path = urllib.parse.quote(path, u"/:~")
if self.digest_challenge is not None:
self.headers[u'Authorization'] = self.get_digest_authorization(path)
@@ -221,7 +225,7 @@
redirect_url = response.getheader(u'location', None)
response.close()
if redirect_url:
- log.Notice(_(u"WebDAV redirect to: %s ") % urllib.unquote(redirect_url))
+ log.Notice(_(u"WebDAV redirect to: %s ") % urllib.parse.unquote(redirect_url))
if redirected > 10:
raise FatalBackendException(_(u"WebDAV redirected 10 times. Giving up."))
self.parsed_url = duplicity.backend.ParsedUrl(redirect_url)
@@ -291,9 +295,9 @@
"""
u = self.parsed_url
if self.digest_auth_handler is None:
- pw_manager = urllib2.HTTPPasswordMgrWithDefaultRealm()
+ pw_manager = urllib.request.HTTPPasswordMgrWithDefaultRealm()
pw_manager.add_password(None, self.conn.host, self.username, self.password)
- self.digest_auth_handler = urllib2.HTTPDigestAuthHandler(pw_manager)
+ self.digest_auth_handler = urllib.request.HTTPDigestAuthHandler(pw_manager)
# building a dummy request that gets never sent,
# needed for call to auth_handler.get_authorization
@@ -372,7 +376,7 @@
@return: A matching filename, or None if the href did not match.
"""
raw_filename = self.getText(href.childNodes).strip()
- parsed_url = urlparse.urlparse(urllib.unquote(raw_filename))
+ parsed_url = urllib.parse.urlparse(urllib.parse.unquote(raw_filename))
filename = parsed_url.path
log.Debug(_(u"WebDAV path decoding and translation: "
u"%s -> %s") % (raw_filename, filename))
=== modified file 'duplicity/cached_ops.py'
--- duplicity/cached_ops.py 2018-09-11 21:35:37 +0000
+++ duplicity/cached_ops.py 2018-11-29 19:03:43 +0000
@@ -20,6 +20,7 @@
u"""Cache-wrapped functions for grp and pwd lookups."""
+from builtins import object
import grp
import pwd
=== modified file 'duplicity/collections.py'
--- duplicity/collections.py 2018-10-11 21:54:47 +0000
+++ duplicity/collections.py 2018-11-29 19:03:43 +0000
@@ -21,7 +21,14 @@
u"""Classes and functions on collections of backup volumes"""
-from future_builtins import filter, map
+from past.builtins import cmp
+from builtins import filter
+from builtins import str
+from builtins import zip
+from builtins import map
+from builtins import range
+from builtins import object
+from future.builtins import filter, map
import types
import gettext
@@ -48,7 +55,7 @@
pass
-class BackupSet:
+class BackupSet(object):
u"""
Backup set - the backup information produced by one session
"""
@@ -193,7 +200,7 @@
filelist = []
if self.remote_manifest_name:
filelist.append(self.remote_manifest_name)
- filelist.extend(self.volume_name_dict.values())
+ filelist.extend(list(self.volume_name_dict.values()))
return u"[%s]" % u", ".join(map(util.fsdecode, filelist))
def get_timestr(self):
@@ -268,7 +275,7 @@
Return sorted list of (remote) filenames of files in set
"""
assert self.info_set
- volume_num_list = self.volume_name_dict.keys()
+ volume_num_list = list(self.volume_name_dict.keys())
volume_num_list.sort()
volume_filenames = [self.volume_name_dict[x] for x in volume_num_list]
if self.remote_manifest_name:
@@ -298,7 +305,7 @@
u"""
Return the number of volumes in the set
"""
- return len(self.volume_name_dict.keys())
+ return len(list(self.volume_name_dict.keys()))
def __eq__(self, other):
u"""
@@ -311,7 +318,7 @@
len(self) == len(other)
-class BackupChain:
+class BackupChain(object):
u"""
BackupChain - a number of linked BackupSets
@@ -463,7 +470,7 @@
return self.incset_list
-class SignatureChain:
+class SignatureChain(object):
u"""
A number of linked SignatureSets
@@ -593,14 +600,13 @@
inclist = self.inclist
if time:
- inclist = filter(lambda n: file_naming.parse(n).end_time <= time,
- inclist)
+ inclist = [n for n in inclist if file_naming.parse(n).end_time <= time]
l.extend(inclist)
return l
-class CollectionsStatus:
+class CollectionsStatus(object):
u"""
Hold information about available chains and sets
"""
@@ -669,13 +675,13 @@
for i in range(len(self.other_backup_chains)):
l.append(_(u"Secondary chain %d of %d:") %
(i + 1, len(self.other_backup_chains)))
- l.append(unicode(self.other_backup_chains[i]))
+ l.append(str(self.other_backup_chains[i]))
l.append(u"")
if self.matched_chain_pair:
l.append(u"\n" + _(u"Found primary backup chain with matching "
u"signature chain:"))
- l.append(unicode(self.matched_chain_pair[1]))
+ l.append(str(self.matched_chain_pair[1]))
else:
l.append(_(u"No backup chains with active signatures found"))
@@ -825,7 +831,7 @@
u"Warning, found the following orphaned "
u"backup files:",
len(self.orphaned_backup_sets)) + u"\n" +
- u"\n".join(map(unicode, self.orphaned_backup_sets)),
+ u"\n".join(map(str, self.orphaned_backup_sets)),
log.WarningCode.orphaned_backup)
def get_backup_chains(self, filename_list):
@@ -942,13 +948,9 @@
elif pr.type == u"new-sig":
new_sig_filenames.append(filename)
- # compare by file time
- def by_start_time(a, b):
- return int(file_naming.parse(a).start_time) - int(file_naming.parse(b).start_time)
-
# Try adding new signatures to existing chains
orphaned_filenames = []
- new_sig_filenames.sort(by_start_time)
+ new_sig_filenames.sort(key=lambda x: int(file_naming.parse(x).start_time))
for sig_filename in new_sig_filenames:
for chain in chains:
if chain.add_filename(sig_filename):
@@ -970,7 +972,7 @@
endtime_chain_dict[chain.end_time] = [chain]
# Use dictionary to build final sorted list
- sorted_end_times = endtime_chain_dict.keys()
+ sorted_end_times = list(endtime_chain_dict.keys())
sorted_end_times.sort()
sorted_chain_list = []
for end_time in sorted_end_times:
@@ -1152,9 +1154,6 @@
a valid input). Thus the second-to-last is obtained with n=2
rather than n=1.
"""
- def mycmp(x, y):
- return cmp(x.get_first().time, y.get_first().time)
-
assert self.values_set
assert n > 0
@@ -1162,7 +1161,7 @@
return None
sorted = self.all_backup_chains[:]
- sorted.sort(mycmp)
+ sorted.sort(key=lambda x: x.get_first().time)
sorted.reverse()
return sorted[n - 1]
@@ -1191,10 +1190,10 @@
where the newer end of the chain is newer than t.
"""
assert self.values_set
- new_chains = filter(lambda c: c.end_time >= t, self.all_backup_chains)
+ new_chains = [c for c in self.all_backup_chains if c.end_time >= t]
result_sets = []
for chain in new_chains:
- old_sets = filter(lambda s: s.get_time() < t, chain.get_all_sets())
+ old_sets = [s for s in chain.get_all_sets() if s.get_time() < t]
result_sets.extend(old_sets)
return self.sort_sets(result_sets)
@@ -1224,7 +1223,7 @@
return FileChangedStatus(filepath, list(zip(specified_file_backup_type, specified_file_backup_set)))
-class FileChangedStatus:
+class FileChangedStatus(object):
def __init__(self, filepath, fileinfo_list):
self.filepath = filepath
self.fileinfo_list = fileinfo_list
=== modified file 'duplicity/commandline.py'
--- duplicity/commandline.py 2018-09-11 21:35:37 +0000
+++ duplicity/commandline.py 2018-11-29 19:03:43 +0000
@@ -22,7 +22,10 @@
u"""Parse command line, check for consistency, and set globals"""
from __future__ import print_function
-from future_builtins import filter
+from builtins import filter
+from builtins import str
+from builtins import range
+from future.builtins import filter
from copy import copy
import optparse
@@ -127,7 +130,7 @@
# (but duplicity is run from a different directory or similar),
# then it is simply up to the user to set --archive-dir properly.
burlhash = md5()
- burlhash.update(backend_url)
+ burlhash.update(backend_url.encode())
return burlhash.hexdigest()
@@ -231,9 +234,12 @@
encoding = self._get_encoding(file)
help = self.format_help()
# The help is in unicode or bytes depending on the user's locale
- if not isinstance(help, unicode):
- help = self.format_help().decode(u'utf-8')
- file.write(help.encode(encoding, u"replace"))
+ if sys.version_info.major == 2:
+ if isinstance(help, unicode):
+ help = self.format_help().decode(u'utf-8')
+ file.write(help.encode(encoding, u"replace"))
+ else:
+ file.write(help)
def parse_cmdline_options(arglist):
@@ -387,7 +393,7 @@
# --archive-dir <path>
parser.add_option(u"--file-to-restore", u"-r", action=u"callback", type=u"file",
metavar=_(u"path"), dest=u"restore_dir",
- callback=lambda o, s, v, p: setattr(p.values, u"restore_dir", v.strip(u'/')))
+ callback=lambda o, s, v, p: setattr(p.values, u"restore_dir", util.fsencode(v.strip(u'/'))))
# Used to confirm certain destructive operations like deleting old files.
parser.add_option(u"--force", action=u"store_true")
@@ -665,7 +671,7 @@
# attributes that are 'hidden' (start with an underscore) or whose name is
# the empty string (used for arguments that don't directly store a value
# by using dest="")
- for f in filter(lambda x: x and not x.startswith(u"_"), dir(options)):
+ for f in [x for x in dir(options) if x and not x.startswith(u"_")]:
v = getattr(options, f)
# Only set if v is not None because None is the default for all the
# variables. If user didn't set it, we'll use defaults in globals.py
=== modified file 'duplicity/diffdir.py'
--- duplicity/diffdir.py 2018-10-16 20:56:54 +0000
+++ duplicity/diffdir.py 2018-11-29 19:03:43 +0000
@@ -26,10 +26,19 @@
first, the signature or delta is constructed of a ROPath iterator. In
the second, the ROPath iterator is put into tar block form.
"""
-
-from future_builtins import map
-
-import cStringIO
+from __future__ import division
+
+from future import standard_library
+standard_library.install_aliases()
+from builtins import map
+from builtins import next
+from builtins import str
+from builtins import range
+from past.utils import old_div
+from builtins import object
+from future.builtins import map
+
+import io
import types
import math
from duplicity import statistics
@@ -64,14 +73,14 @@
will be easy to split up the tar and make the volumes the same
sizes).
"""
- return DirDelta(path_iter, cStringIO.StringIO(u""))
+ return DirDelta(path_iter, io.StringIO(u""))
def DirFull_WriteSig(path_iter, sig_outfp):
u"""
Return full backup like above, but also write signature to sig_outfp
"""
- return DirDelta_WriteSig(path_iter, cStringIO.StringIO(u""), sig_outfp)
+ return DirDelta_WriteSig(path_iter, io.StringIO(u""), sig_outfp)
def DirDelta(path_iter, dirsig_fileobj_list):
@@ -126,8 +135,11 @@
Callback activated when FileWithSignature read to end
"""
ti.size = len(sig_string)
- ti.name = b"signature/" + b"/".join(index)
- sigTarFile.addfile(ti, cStringIO.StringIO(sig_string))
+ if sys.version_info.major >= 3:
+ ti.name = u"signature/" + util.fsdecode(b"/".join(index))
+ else:
+ ti.name = b"signature/" + b"/".join(index)
+ sigTarFile.addfile(ti, io.BytesIO(sig_string))
if new_path.isreg() and sig_path and sig_path.isreg() and sig_path.difftype == u"signature":
delta_path.difftype = u"diff"
@@ -140,7 +152,10 @@
else:
delta_path.difftype = u"snapshot"
if sigTarFile:
- ti.name = b"snapshot/" + b"/".join(index)
+ if sys.version_info.major >= 3:
+ ti.name = u"snapshot/" + util.fsdecode(b"/".join(index))
+ else:
+ ti.name = b"snapshot/" + b"/".join(index)
if not new_path.isreg():
if sigTarFile:
sigTarFile.addfile(ti)
@@ -207,7 +222,10 @@
util.escape(sig_path.get_relative_path()))
if sigTarFile:
ti = ROPath(sig_path.index).get_tarinfo()
- ti.name = u"deleted/" + u"/".join(sig_path.index)
+ if sys.version_info.major >= 3:
+ ti.name = u"deleted/" + util.uindex(sig_path.index)
+ else:
+ ti.name = b"deleted/" + b"/".join(sig_path.index)
sigTarFile.addfile(ti)
stats.add_deleted_file(sig_path)
yield ROPath(sig_path.index)
@@ -238,7 +256,7 @@
tf.debug = 1
for tarinfo in tf:
tiname = util.get_tarinfo_name(tarinfo)
- for prefix in [b"signature/", b"snapshot/", b"deleted/"]:
+ for prefix in [r"signature/", r"snapshot/", r"deleted/"]:
if tiname.startswith(prefix):
# strip prefix and '/' from name and set it to difftype
name, difftype = tiname[len(prefix):], prefix[:-1]
@@ -246,7 +264,10 @@
else:
raise DiffDirException(u"Bad tarinfo name %s" % (tiname,))
- index = tuple(name.split(b"/"))
+ if sys.version_info.major >= 3:
+ index = tuple(util.fsencode(name).split(b"/"))
+ else:
+ index = tuple(name.split(b"/"))
if not index[-1]:
index = index[:-1] # deal with trailing /, ""
@@ -347,7 +368,7 @@
else:
break # assumed triple_list sorted, so can exit now
- triple_list = [x for x in map(get_triple, range(len(path_iter_list))) if x]
+ triple_list = [x for x in map(get_triple, list(range(len(path_iter_list)))) if x]
while triple_list:
triple_list.sort()
yield triple_list[0][2]
@@ -382,7 +403,7 @@
return combine_path_iters([sigtar2path_iter(x) for x in sig_infp_list])
-class FileWithReadCounter:
+class FileWithReadCounter(object):
u"""
File-like object which also computes amount read as it is read
"""
@@ -404,7 +425,7 @@
return self.infile.close()
-class FileWithSignature:
+class FileWithSignature(object):
u"""
File-like object which also computes signature as it is read
"""
@@ -441,7 +462,7 @@
return self.infile.close()
-class TarBlock:
+class TarBlock(object):
u"""
Contain information to add next file to tar
"""
@@ -453,7 +474,7 @@
self.data = data
-class TarBlockIter:
+class TarBlockIter(object):
u"""
A bit like an iterator, yield tar blocks given input iterator
@@ -481,7 +502,7 @@
Make tarblock out of tarinfo and file data
"""
tarinfo.size = len(file_data)
- headers = tarinfo.tobuf(errors=u'replace')
+ headers = tarinfo.tobuf(errors=u'replace', encoding=globals.fsencoding)
blocks, remainder = divmod(tarinfo.size, tarfile.BLOCKSIZE) # @UnusedVariable
if remainder > 0:
filler_data = b"\0" * (tarfile.BLOCKSIZE - remainder)
@@ -506,7 +527,7 @@
assert self.process_waiting
XXX # Override in subclass @UndefinedVariable
- def next(self):
+ def __next__(self):
u"""
Return next block and update offset
"""
@@ -517,10 +538,10 @@
return result
if self.process_waiting:
- result = self.process_continued()
+ result = self.process_continued() # pylint: disable=assignment-from-no-return
else:
# Below a StopIteration exception will just be passed upwards
- result = self.process(next(self.input_iter))
+ result = self.process(next(self.input_iter)) # pylint: disable=assignment-from-no-return
block_number = self.process_next_vol_number
self.offset += len(result.data)
self.previous_index = result.index
@@ -570,9 +591,9 @@
"""
blocks, remainder = divmod(self.offset, tarfile.RECORDSIZE) # @UnusedVariable
self.offset = 0
- return u'\0' * (tarfile.RECORDSIZE - remainder) # remainder can be 0
+ return b'\0' * (tarfile.RECORDSIZE - remainder) # remainder can be 0
- def __iter__(self):
+ def __iter__(self): # pylint: disable=non-iterator-returned
return self
@@ -614,10 +635,14 @@
get_block_size(path.getsize()))
sigbuf = sfp.read()
sfp.close()
- ti.name = u"signature/" + u"/".join(path.index)
+ ti.name = b"signature/" + b"/".join(path.index)
+ if sys.version_info.major >= 3:
+ ti.name = util.fsdecode(ti.name)
return self.tarinfo2tarblock(path.index, ti, sigbuf)
else:
- ti.name = u"snapshot/" + u"/".join(path.index)
+ ti.name = b"snapshot/" + b"/".join(path.index)
+ if sys.version_info.major >= 3:
+ ti.name = util.fsdecode(ti.name)
return self.tarinfo2tarblock(path.index, ti)
@@ -635,10 +660,10 @@
"""
def add_prefix(tarinfo, prefix):
u"""Add prefix to the name of a tarinfo file"""
- if tarinfo.name == b".":
- tarinfo.name = prefix + b"/"
+ if tarinfo.name == r".":
+ tarinfo.name = prefix + r"/"
else:
- tarinfo.name = b"%s/%s" % (prefix, tarinfo.name)
+ tarinfo.name = r"%s/%s" % (prefix, tarinfo.name)
ti = delta_ropath.get_tarinfo()
index = delta_ropath.index
@@ -646,10 +671,10 @@
# Return blocks of deleted files or fileless snapshots
if not delta_ropath.type or not delta_ropath.fileobj:
if not delta_ropath.type:
- add_prefix(ti, u"deleted")
+ add_prefix(ti, r"deleted")
else:
assert delta_ropath.difftype == u"snapshot"
- add_prefix(ti, b"snapshot")
+ add_prefix(ti, r"snapshot")
return self.tarinfo2tarblock(index, ti)
# Now handle single volume block case
@@ -659,16 +684,16 @@
stats.RawDeltaSize += len(data)
if last_block:
if delta_ropath.difftype == u"snapshot":
- add_prefix(ti, b"snapshot")
+ add_prefix(ti, r"snapshot")
elif delta_ropath.difftype == u"diff":
- add_prefix(ti, b"diff")
+ add_prefix(ti, r"diff")
else:
assert 0, u"Unknown difftype"
return self.tarinfo2tarblock(index, ti, data)
# Finally, do multivol snapshot or diff case
- full_name = u"multivol_%s/%s" % (delta_ropath.difftype, ti.name)
- ti.name = full_name + u"/1"
+ full_name = r"multivol_%s/%s" % (delta_ropath.difftype, ti.name)
+ ti.name = full_name + r"/1"
self.process_prefix = full_name
self.process_fp = fp
self.process_ropath = delta_ropath
@@ -741,5 +766,5 @@
return 512 # set minimum of 512 bytes
else:
# Split file into about 2000 pieces, rounding to 512
- file_blocksize = int((file_len / (2000 * 512)) * 512)
+ file_blocksize = int((old_div(file_len, (2000 * 512))) * 512)
return min(file_blocksize, globals.max_blocksize)
=== modified file 'duplicity/dup_temp.py'
--- duplicity/dup_temp.py 2018-09-11 21:35:37 +0000
+++ duplicity/dup_temp.py 2018-11-29 19:03:43 +0000
@@ -21,6 +21,7 @@
u"""Manage temporary files"""
+from builtins import object
import os
import sys
import shutil
@@ -131,7 +132,7 @@
return fh
-class FileobjHooked:
+class FileobjHooked(object):
u"""
Simulate a file, but add hook on close
"""
@@ -243,7 +244,7 @@
name = property(get_name)
-class Block:
+class Block(object):
u"""
Data block to return from SrcIter
"""
@@ -251,7 +252,7 @@
self.data = data
-class SrcIter:
+class SrcIter(object):
u"""
Iterate over source and return Block of data.
"""
@@ -259,7 +260,7 @@
self.src = src
self.fp = src.open(u"rb")
- def next(self):
+ def __next__(self):
try:
res = Block(self.fp.read(self.get_read_size()))
except Exception:
@@ -275,4 +276,4 @@
return 128 * 1024
def get_footer(self):
- return u""
+ return b""
=== modified file 'duplicity/dup_threading.py'
--- duplicity/dup_threading.py 2018-09-11 21:35:37 +0000
+++ duplicity/dup_threading.py 2018-11-29 19:03:43 +0000
@@ -28,16 +28,18 @@
at least python 2.5.)
"""
+from future import standard_library
+standard_library.install_aliases()
+from builtins import object
import sys
from duplicity import errors
-
_threading_supported = True
try:
- import thread
+ import _thread
except ImportError:
- import dummy_thread as thread
+ import _dummy_thread as _thread
_threading_supported = False
try:
@@ -77,7 +79,7 @@
Returns the thread module, or dummy_thread if threading is not
supported.
"""
- return thread
+ return _thread
def threading_module():
@@ -226,7 +228,7 @@
return (waiter, caller)
-class Value:
+class Value(object):
u"""
A thread-safe container of a reference to an object (but not the
object itself).
=== modified file 'duplicity/dup_time.py'
--- duplicity/dup_time.py 2018-10-11 21:54:47 +0000
+++ duplicity/dup_time.py 2018-11-29 19:03:43 +0000
@@ -21,7 +21,8 @@
u"""Provide time related exceptions and functions"""
-from future_builtins import map
+from builtins import map
+from future.builtins import map
import time
import types
@@ -116,15 +117,15 @@
date, daytime = timestring[:19].split(u"T")
if len(timestring) == 16:
# new format for filename time
- year, month, day = map(int,
- [date[0:4], date[4:6], date[6:8]])
- hour, minute, second = map(int,
- [daytime[0:2], daytime[2:4], daytime[4:6]])
+ year, month, day = list(map(int,
+ [date[0:4], date[4:6], date[6:8]]))
+ hour, minute, second = list(map(int,
+ [daytime[0:2], daytime[2:4], daytime[4:6]]))
else:
# old format for filename time
- year, month, day = map(int, date.split(u"-"))
- hour, minute, second = map(int,
- daytime.split(globals.time_separator))
+ year, month, day = list(map(int, date.split(u"-")))
+ hour, minute, second = list(map(int,
+ daytime.split(globals.time_separator)))
assert 1900 < year < 2100, year
assert 1 <= month <= 12
assert 1 <= day <= 31
@@ -242,7 +243,7 @@
else:
return u"Z" # time is already in UTC
- hours, minutes = map(abs, divmod(offset, 60))
+ hours, minutes = list(map(abs, divmod(offset, 60)))
assert 0 <= hours <= 23
assert 0 <= minutes <= 59
return u"%s%02d%s%02d" % (prefix, hours, globals.time_separator, minutes)
=== modified file 'duplicity/file_naming.py'
--- duplicity/file_naming.py 2018-09-11 21:35:37 +0000
+++ duplicity/file_naming.py 2018-11-29 19:03:43 +0000
@@ -21,9 +21,13 @@
u"""Produce and parse the names of duplicity's backup files"""
+from builtins import str
+from builtins import range
+from builtins import object
import re
from duplicity import dup_time
from duplicity import globals
+import sys
full_vol_re = None
full_vol_re_short = None
@@ -57,87 +61,87 @@
if full_vol_re and not force:
return
- full_vol_re = re.compile(u"^" + globals.file_prefix + globals.file_prefix_archive + u"duplicity-full"
- u"\\.(?P<time>.*?)"
- u"\\.vol(?P<num>[0-9]+)"
- u"\\.difftar"
- u"(?P<partial>(\\.part))?"
- u"($|\\.)")
-
- full_vol_re_short = re.compile(u"^" + globals.file_prefix + globals.file_prefix_archive + u"df"
- u"\\.(?P<time>[0-9a-z]+?)"
- u"\\.(?P<num>[0-9a-z]+)"
- u"\\.dt"
- u"(?P<partial>(\\.p))?"
- u"($|\\.)")
-
- full_manifest_re = re.compile(u"^" + globals.file_prefix + globals.file_prefix_manifest + u"duplicity-full"
- u"\\.(?P<time>.*?)"
- u"\\.manifest"
- u"(?P<partial>(\\.part))?"
- u"($|\\.)")
-
- full_manifest_re_short = re.compile(u"^" + globals.file_prefix + globals.file_prefix_manifest + u"df"
- u"\\.(?P<time>[0-9a-z]+?)"
- u"\\.m"
- u"(?P<partial>(\\.p))?"
- u"($|\\.)")
-
- inc_vol_re = re.compile(u"^" + globals.file_prefix + globals.file_prefix_archive + u"duplicity-inc"
- u"\\.(?P<start_time>.*?)"
- u"\\.to\\.(?P<end_time>.*?)"
- u"\\.vol(?P<num>[0-9]+)"
- u"\\.difftar"
- u"($|\\.)")
-
- inc_vol_re_short = re.compile(u"^" + globals.file_prefix + globals.file_prefix_archive + u"di"
- u"\\.(?P<start_time>[0-9a-z]+?)"
- u"\\.(?P<end_time>[0-9a-z]+?)"
- u"\\.(?P<num>[0-9a-z]+)"
- u"\\.dt"
- u"($|\\.)")
-
- inc_manifest_re = re.compile(u"^" + globals.file_prefix + globals.file_prefix_manifest + u"duplicity-inc"
- u"\\.(?P<start_time>.*?)"
- u"\\.to"
- u"\\.(?P<end_time>.*?)"
- u"\\.manifest"
- u"(?P<partial>(\\.part))?"
- u"(\\.|$)")
-
- inc_manifest_re_short = re.compile(u"^" + globals.file_prefix + globals.file_prefix_manifest + u"di"
- u"\\.(?P<start_time>[0-9a-z]+?)"
- u"\\.(?P<end_time>[0-9a-z]+?)"
- u"\\.m"
- u"(?P<partial>(\\.p))?"
- u"(\\.|$)")
-
- full_sig_re = re.compile(u"^" + globals.file_prefix + globals.file_prefix_signature + u"duplicity-full-signatures"
- u"\\.(?P<time>.*?)"
- u"\\.sigtar"
- u"(?P<partial>(\\.part))?"
- u"(\\.|$)")
-
- full_sig_re_short = re.compile(u"^" + globals.file_prefix + globals.file_prefix_signature + u"dfs"
- u"\\.(?P<time>[0-9a-z]+?)"
- u"\\.st"
- u"(?P<partial>(\\.p))?"
- u"(\\.|$)")
-
- new_sig_re = re.compile(u"^" + globals.file_prefix + globals.file_prefix_signature + u"duplicity-new-signatures"
- u"\\.(?P<start_time>.*?)"
- u"\\.to"
- u"\\.(?P<end_time>.*?)"
- u"\\.sigtar"
- u"(?P<partial>(\\.part))?"
- u"(\\.|$)")
-
- new_sig_re_short = re.compile(u"^" + globals.file_prefix + globals.file_prefix_signature + u"dns"
- u"\\.(?P<start_time>[0-9a-z]+?)"
- u"\\.(?P<end_time>[0-9a-z]+?)"
- u"\\.st"
- u"(?P<partial>(\\.p))?"
- u"(\\.|$)")
+ full_vol_re = re.compile(b"^" + globals.file_prefix + globals.file_prefix_archive + b"duplicity-full"
+ b"\\.(?P<time>.*?)"
+ b"\\.vol(?P<num>[0-9]+)"
+ b"\\.difftar"
+ b"(?P<partial>(\\.part))?"
+ b"($|\\.)")
+
+ full_vol_re_short = re.compile(b"^" + globals.file_prefix + globals.file_prefix_archive + b"df"
+ b"\\.(?P<time>[0-9a-z]+?)"
+ b"\\.(?P<num>[0-9a-z]+)"
+ b"\\.dt"
+ b"(?P<partial>(\\.p))?"
+ b"($|\\.)")
+
+ full_manifest_re = re.compile(b"^" + globals.file_prefix + globals.file_prefix_manifest + b"duplicity-full"
+ b"\\.(?P<time>.*?)"
+ b"\\.manifest"
+ b"(?P<partial>(\\.part))?"
+ b"($|\\.)")
+
+ full_manifest_re_short = re.compile(b"^" + globals.file_prefix + globals.file_prefix_manifest + b"df"
+ b"\\.(?P<time>[0-9a-z]+?)"
+ b"\\.m"
+ b"(?P<partial>(\\.p))?"
+ b"($|\\.)")
+
+ inc_vol_re = re.compile(b"^" + globals.file_prefix + globals.file_prefix_archive + b"duplicity-inc"
+ b"\\.(?P<start_time>.*?)"
+ b"\\.to\\.(?P<end_time>.*?)"
+ b"\\.vol(?P<num>[0-9]+)"
+ b"\\.difftar"
+ b"($|\\.)")
+
+ inc_vol_re_short = re.compile(b"^" + globals.file_prefix + globals.file_prefix_archive + b"di"
+ b"\\.(?P<start_time>[0-9a-z]+?)"
+ b"\\.(?P<end_time>[0-9a-z]+?)"
+ b"\\.(?P<num>[0-9a-z]+)"
+ b"\\.dt"
+ b"($|\\.)")
+
+ inc_manifest_re = re.compile(b"^" + globals.file_prefix + globals.file_prefix_manifest + b"duplicity-inc"
+ b"\\.(?P<start_time>.*?)"
+ b"\\.to"
+ b"\\.(?P<end_time>.*?)"
+ b"\\.manifest"
+ b"(?P<partial>(\\.part))?"
+ b"(\\.|$)")
+
+ inc_manifest_re_short = re.compile(b"^" + globals.file_prefix + globals.file_prefix_manifest + b"di"
+ b"\\.(?P<start_time>[0-9a-z]+?)"
+ b"\\.(?P<end_time>[0-9a-z]+?)"
+ b"\\.m"
+ b"(?P<partial>(\\.p))?"
+ b"(\\.|$)")
+
+ full_sig_re = re.compile(b"^" + globals.file_prefix + globals.file_prefix_signature + b"duplicity-full-signatures"
+ b"\\.(?P<time>.*?)"
+ b"\\.sigtar"
+ b"(?P<partial>(\\.part))?"
+ b"(\\.|$)")
+
+ full_sig_re_short = re.compile(b"^" + globals.file_prefix + globals.file_prefix_signature + b"dfs"
+ b"\\.(?P<time>[0-9a-z]+?)"
+ b"\\.st"
+ b"(?P<partial>(\\.p))?"
+ b"(\\.|$)")
+
+ new_sig_re = re.compile(b"^" + globals.file_prefix + globals.file_prefix_signature + b"duplicity-new-signatures"
+ b"\\.(?P<start_time>.*?)"
+ b"\\.to"
+ b"\\.(?P<end_time>.*?)"
+ b"\\.sigtar"
+ b"(?P<partial>(\\.part))?"
+ b"(\\.|$)")
+
+ new_sig_re_short = re.compile(b"^" + globals.file_prefix + globals.file_prefix_signature + b"dns"
+ b"\\.(?P<start_time>[0-9a-z]+?)"
+ b"\\.(?P<end_time>[0-9a-z]+?)"
+ b"\\.st"
+ b"(?P<partial>(\\.p))?"
+ b"(\\.|$)")
def to_base36(n):
@@ -149,6 +153,8 @@
last_digit = str(mod)
else:
last_digit = chr(ord(u'a') + mod - 10)
+ if sys.version_info.major >= 3:
+ last_digit = last_digit.encode()
if n == mod:
return last_digit
else:
@@ -162,7 +168,10 @@
total = 0
for i in range(len(s)):
total *= 36
- digit_ord = ord(s[i])
+ if sys.version_info.major >= 3 and isinstance(s, bytes):
+ digit_ord = s[i]
+ else:
+ digit_ord = ord(s[i])
if ord(u'0') <= digit_ord <= ord(u'9'):
total += digit_ord - ord(u'0')
elif ord(u'a') <= digit_ord <= ord(u'z'):
@@ -181,16 +190,16 @@
gzipped = False
if encrypted:
if globals.short_filenames:
- suffix = u'.g'
+ suffix = b'.g'
else:
- suffix = u".gpg"
+ suffix = b".gpg"
elif gzipped:
if globals.short_filenames:
- suffix = u".z"
+ suffix = b".z"
else:
- suffix = u'.gz'
+ suffix = b'.gz'
else:
- suffix = u""
+ suffix = b""
return suffix
@@ -207,13 +216,13 @@
if encrypted:
gzipped = False
suffix = get_suffix(encrypted, gzipped)
- part_string = u""
+ part_string = b""
if globals.short_filenames:
if partial:
- part_string = u".p"
+ part_string = b".p"
else:
if partial:
- part_string = u".part"
+ part_string = b".part"
if type == u"full-sig" or type == u"new-sig":
assert not volume_number and not manifest
@@ -221,23 +230,23 @@
if type == u"full-sig":
if globals.short_filenames:
return (globals.file_prefix + globals.file_prefix_signature +
- u"dfs.%s.st%s%s" %
+ b"dfs.%s.st%s%s" %
(to_base36(dup_time.curtime), part_string, suffix))
else:
return (globals.file_prefix + globals.file_prefix_signature +
- u"duplicity-full-signatures.%s.sigtar%s%s" %
- (dup_time.curtimestr, part_string, suffix))
+ b"duplicity-full-signatures.%s.sigtar%s%s" %
+ (dup_time.curtimestr.encode(), part_string, suffix))
elif type == u"new-sig":
if globals.short_filenames:
return (globals.file_prefix + globals.file_prefix_signature +
- u"dns.%s.%s.st%s%s" %
+ b"dns.%s.%s.st%s%s" %
(to_base36(dup_time.prevtime),
to_base36(dup_time.curtime),
part_string, suffix))
else:
return (globals.file_prefix + globals.file_prefix_signature +
- u"duplicity-new-signatures.%s.to.%s.sigtar%s%s" %
- (dup_time.prevtimestr, dup_time.curtimestr,
+ b"duplicity-new-signatures.%s.to.%s.sigtar%s%s" %
+ (dup_time.prevtimestr.encode(), dup_time.curtimestr.encode(),
part_string, suffix))
else:
assert volume_number or manifest
@@ -247,32 +256,32 @@
if volume_number:
if globals.short_filenames:
- vol_string = u"%s.dt" % to_base36(volume_number)
+ vol_string = b"%s.dt" % to_base36(volume_number)
else:
- vol_string = u"vol%d.difftar" % volume_number
+ vol_string = b"vol%d.difftar" % volume_number
prefix += globals.file_prefix_archive
else:
if globals.short_filenames:
- vol_string = u"m"
+ vol_string = b"m"
else:
- vol_string = u"manifest"
+ vol_string = b"manifest"
prefix += globals.file_prefix_manifest
if type == u"full":
if globals.short_filenames:
- return (u"%sdf.%s.%s%s%s" % (prefix, to_base36(dup_time.curtime),
+ return (b"%sdf.%s.%s%s%s" % (prefix, to_base36(dup_time.curtime),
vol_string, part_string, suffix))
else:
- return (u"%sduplicity-full.%s.%s%s%s" % (prefix, dup_time.curtimestr,
+ return (b"%sduplicity-full.%s.%s%s%s" % (prefix, dup_time.curtimestr.encode(),
vol_string, part_string, suffix))
elif type == u"inc":
if globals.short_filenames:
- return (u"%sdi.%s.%s.%s%s%s" % (prefix, to_base36(dup_time.prevtime),
+ return (b"%sdi.%s.%s.%s%s%s" % (prefix, to_base36(dup_time.prevtime),
to_base36(dup_time.curtime),
vol_string, part_string, suffix))
else:
- return (u"%sduplicity-inc.%s.to.%s.%s%s%s" % (prefix, dup_time.prevtimestr,
- dup_time.curtimestr,
+ return (b"%sduplicity-inc.%s.to.%s.%s%s%s" % (prefix, dup_time.prevtimestr.encode(),
+ dup_time.curtimestr.encode(),
vol_string, part_string, suffix))
else:
assert 0
@@ -286,6 +295,9 @@
u"""
Return time in seconds if string can be converted, None otherwise
"""
+ if isinstance(timestr, bytes):
+ timestr = timestr.decode()
+
if short:
t = from_base36(timestr)
else:
@@ -386,14 +398,14 @@
u"""
Set encryption and compression flags in ParseResults pr
"""
- if (filename.endswith(u'.z') or
- not globals.short_filenames and filename.endswith(u'gz')):
+ if (filename.endswith(b'.z') or
+ not globals.short_filenames and filename.endswith(b'gz')):
pr.compressed = 1
else:
pr.compressed = None
- if (filename.endswith(u'.g') or
- not globals.short_filenames and filename.endswith(u'.gpg')):
+ if (filename.endswith(b'.g') or
+ not globals.short_filenames and filename.endswith(b'.gpg')):
pr.encrypted = 1
else:
pr.encrypted = None
@@ -409,7 +421,7 @@
return pr
-class ParseResults:
+class ParseResults(object):
u"""
Hold information taken from a duplicity filename
"""
=== modified file 'duplicity/globals.py'
--- duplicity/globals.py 2018-09-11 21:35:37 +0000
+++ duplicity/globals.py 2018-11-29 19:03:43 +0000
@@ -30,16 +30,16 @@
version = u"$version"
# Prefix for all files (appended before type-specific prefixes)
-file_prefix = u""
+file_prefix = b""
# Prefix for manifest files only
-file_prefix_manifest = u""
+file_prefix_manifest = b""
# Prefix for archive files only
-file_prefix_archive = u""
+file_prefix_archive = b""
# Prefix for sig files only
-file_prefix_signature = u""
+file_prefix_signature = b""
# The name of the current host, or None if it cannot be set
hostname = socket.getfqdn()
=== modified file 'duplicity/globmatch.py'
--- duplicity/globmatch.py 2018-06-10 17:28:55 +0000
+++ duplicity/globmatch.py 2018-11-29 19:03:43 +0000
@@ -27,6 +27,7 @@
from builtins import map
from builtins import range
+from builtins import str
class GlobbingError(Exception):
@@ -68,7 +69,7 @@
Note: including a folder implicitly includes everything within it.
"""
- assert isinstance(glob_str, unicode)
+ assert isinstance(glob_str, str)
glob_ends_w_slash = False
if glob_str == u"/":
@@ -159,7 +160,7 @@
"""
# Internal. Used by glob_get_normal_sf, glob_get_prefix_res and unit tests.
- assert isinstance(pat, unicode)
+ assert isinstance(pat, str)
i, n, res = 0, len(pat), u''
while i < n:
=== modified file 'duplicity/gpg.py'
--- duplicity/gpg.py 2018-10-11 21:54:47 +0000
+++ duplicity/gpg.py 2018-11-29 19:03:43 +0000
@@ -25,6 +25,9 @@
see duplicity's README for details
"""
+from builtins import next
+from builtins import str
+from builtins import object
import os
import sys
import types
@@ -57,7 +60,7 @@
pass
-class GPGProfile:
+class GPGProfile(object):
u"""
Just hold some GPG settings, avoid passing tons of arguments
"""
@@ -91,7 +94,8 @@
self.gpg_version = self.get_gpg_version(globals.gpg_binary)
- _version_re = re.compile(r'^gpg.*\(GnuPG(?:/MacGPG2)?\) (?P<maj>[0-9]+)\.(?P<min>[0-9]+)\.(?P<bug>[0-9]+)(-.+)?$')
+ rc = re.compile
+ _version_re = rc(b'^gpg.*\\(GnuPG(?:/MacGPG2)?\\) (?P<maj>[0-9]+)\\.(?P<min>[0-9]+)\\.(?P<bug>[0-9]+)(-.+)?$')
def get_gpg_version(self, binary):
gnupg = gpginterface.GnuPG()
@@ -112,7 +116,7 @@
raise GPGError(u"failed to determine gnupg version of %s from %s" % (binary, line))
-class GPGFile:
+class GPGFile(object):
u"""
File-like object that encrypts decrypts another file on the fly
"""
@@ -243,7 +247,10 @@
try:
res = self.gpg_input.write(buf)
if res is not None:
- self.byte_count += len(res)
+ if sys.version_info.major >= 3:
+ self.byte_count += res
+ else:
+ self.byte_count += len(res)
except Exception:
self.gpg_failed()
return res
@@ -264,7 +271,7 @@
fp.seek(0)
for line in fp:
try:
- msg += unicode(line.strip(), locale.getpreferredencoding(), u'replace') + u"\n"
+ msg += str(line.strip(), locale.getpreferredencoding(), u'replace') + u"\n"
except Exception as e:
msg += line.strip() + u"\n"
msg += u"===== End GnuPG log =====\n"
@@ -316,13 +323,13 @@
"""
self.status_fp.seek(0)
status_buf = self.status_fp.read()
- match = re.search(u"^\\[GNUPG:\\] GOODSIG ([0-9A-F]*)",
+ match = re.search(b"^\\[GNUPG:\\] GOODSIG ([0-9A-F]*)",
status_buf, re.M)
if not match:
self.signature = None
else:
assert len(match.group(1)) >= 8
- self.signature = match.group(1)
+ self.signature = match.group(1).decode()
def get_signature(self):
u"""
@@ -383,7 +390,7 @@
if bytes_to_go < block_iter.get_read_size():
break
try:
- data = block_iter.next().data
+ data = block_iter.__next__().data
except StopIteration:
at_end_of_blockiter = 1
break
@@ -415,7 +422,7 @@
The input requirements on block_iter and the output is the same as
GPGWriteFile (returns true if wrote until end of block_iter).
"""
- class FileCounted:
+ class FileCounted(object):
u"""
Wrapper around file object that counts number of bytes written
"""
=== modified file 'duplicity/gpginterface.py'
--- duplicity/gpginterface.py 2018-09-11 21:35:37 +0000
+++ duplicity/gpginterface.py 2018-11-29 19:03:43 +0000
@@ -224,6 +224,7 @@
or see http://www.gnu.org/copyleft/lesser.html
"""
+from builtins import object
import os
import sys
import fcntl
@@ -244,8 +245,8 @@
_stds = [u'stdin', u'stdout', u'stderr']
# the permissions each type of fh needs to be opened with
-_fd_modes = {u'stdin': u'w',
- u'stdout': u'r',
+_fd_modes = {u'stdin': u'wb',
+ u'stdout': u'rb',
u'stderr': u'r',
u'passphrase': u'w',
u'command': u'w',
@@ -261,7 +262,7 @@
}
-class GnuPG:
+class GnuPG(object):
u"""Class instances represent GnuPG.
Instance attributes of a GnuPG object are:
@@ -387,10 +388,10 @@
process = Process()
- for fh_name in create_fhs + attach_fhs.keys():
+ for fh_name in create_fhs + list(attach_fhs.keys()):
if fh_name not in _fd_modes:
raise KeyError(u"unrecognized filehandle name '%s'; must be one of %s"
- % (fh_name, _fd_modes.keys()))
+ % (fh_name, list(_fd_modes.keys())))
for fh_name in create_fhs:
# make sure the user doesn't specify a filehandle
@@ -404,11 +405,14 @@
# that since pipes are unidirectional on some systems,
# so we have to 'turn the pipe around'
# if we are writing
- if _fd_modes[fh_name] == u'w':
+ if _fd_modes[fh_name] == u'w' or _fd_modes[fh_name] == u'wb':
pipe = (pipe[1], pipe[0])
+ if sys.version_info.major >= 3:
+ os.set_inheritable(pipe[0], True)
+ os.set_inheritable(pipe[1], True)
process._pipes[fh_name] = Pipe(pipe[0], pipe[1], 0)
- for fh_name, fh in attach_fhs.items():
+ for fh_name, fh in list(attach_fhs.items()):
process._pipes[fh_name] = Pipe(fh.fileno(), fh.fileno(), 1)
process.pid = os.fork()
@@ -425,7 +429,7 @@
def _as_parent(self, process):
u"""Stuff run after forking in parent"""
- for k, p in process._pipes.items():
+ for k, p in list(process._pipes.items()):
if not p.direct:
os.close(p.child)
process.handles[k] = os.fdopen(p.parent, _fd_modes[k])
@@ -442,14 +446,14 @@
p = process._pipes[std]
os.dup2(p.child, getattr(sys, u"__%s__" % std).fileno())
- for k, p in process._pipes.items():
+ for k, p in list(process._pipes.items()):
if p.direct and k not in _stds:
# we want the fh to stay open after execing
fcntl.fcntl(p.child, fcntl.F_SETFD, 0)
fd_args = []
- for k, p in process._pipes.items():
+ for k, p in list(process._pipes.items()):
# set command-line options for non-standard fds
if k not in _stds:
fd_args.extend([_fd_options[k], u"%d" % p.child])
@@ -462,7 +466,7 @@
os.execvp(command[0], command)
-class Pipe:
+class Pipe(object):
u"""simple struct holding stuff about pipes we use"""
def __init__(self, parent, child, direct):
self.parent = parent
@@ -470,7 +474,7 @@
self.direct = direct
-class Options:
+class Options(object):
u"""Objects of this class encompass options passed to GnuPG.
This class is responsible for determining command-line arguments
which are based on options. It can be said that a GnuPG
@@ -645,7 +649,7 @@
return args
-class Process:
+class Process(object):
u"""Objects of this class encompass properties of a GnuPG
process spawned by GnuPG.run().
=== modified file 'duplicity/lazy.py'
--- duplicity/lazy.py 2018-09-11 21:35:37 +0000
+++ duplicity/lazy.py 2018-11-29 19:03:43 +0000
@@ -22,13 +22,17 @@
u"""Define some lazy data structures and functions acting on them"""
from __future__ import print_function
+from builtins import map
+from builtins import next
+from builtins import range
+from builtins import object
import os
from duplicity import log
from duplicity import robust
from duplicity import util
-class Iter:
+class Iter(object):
u"""Hold static methods for the manipulation of lazy iterators"""
@staticmethod
@@ -204,10 +208,10 @@
while(1):
yield get_next(fork_num)
- return tuple(map(make_iterator, range(num_of_forks)))
-
-
-class IterMultiplex2:
+ return tuple(map(make_iterator, list(range(num_of_forks))))
+
+
+class IterMultiplex2(object):
u"""Multiplex an iterator into 2 parts
This is a special optimized case of the Iter.multiplex function,
@@ -249,7 +253,7 @@
yield elem
-class IterTreeReducer:
+class IterTreeReducer(object):
u"""Tree style reducer object for iterator - stolen from rdiff-backup
The indicies of a RORPIter form a tree type structure. This class
@@ -356,7 +360,7 @@
return 1
-class ITRBranch:
+class ITRBranch(object):
u"""Helper class for IterTreeReducer above
There are five stub functions below: start_process, end_process,
=== modified file 'duplicity/librsync.py'
--- duplicity/librsync.py 2018-10-11 21:54:47 +0000
+++ duplicity/librsync.py 2018-11-29 19:03:43 +0000
@@ -26,13 +26,16 @@
"""
+from builtins import str
+from builtins import object
import os
+import sys
from . import _librsync
import types
import array
if os.environ.get(u'READTHEDOCS') == u'True':
- import mock
+ import mock # pylint: disable=import-error
import duplicity
duplicity._librsync = mock.MagicMock()
@@ -51,7 +54,7 @@
pass
-class LikeFile:
+class LikeFile(object):
u"""File-like object used by SigFile, DeltaFile, and PatchFile"""
mode = u"rb"
@@ -65,7 +68,7 @@
self.infile = infile
self.closed = self.infile_closed = None
self.inbuf = b""
- self.outbuf = array.array(b'c')
+ self.outbuf = array.array(u'b')
self.eof = self.infile_eof = None
def check_file(self, file, need_seek=None):
@@ -88,7 +91,10 @@
self._add_to_outbuf_once()
real_len = min(length, len(self.outbuf))
- return_val = self.outbuf[:real_len].tostring()
+ if sys.version_info.major >= 3:
+ return_val = self.outbuf[:real_len].tobytes()
+ else:
+ return_val = self.outbuf[:real_len].tostring()
del self.outbuf[:real_len]
return return_val
@@ -101,7 +107,10 @@
except _librsync.librsyncError as e:
raise librsyncError(str(e))
self.inbuf = self.inbuf[len_inbuf_read:]
- self.outbuf.fromstring(cycle_out)
+ if sys.version_info.major >= 3:
+ self.outbuf.frombytes(cycle_out)
+ else:
+ self.outbuf.fromstring(cycle_out)
def _add_to_inbuf(self):
u"""Make sure len(self.inbuf) >= blocksize"""
@@ -172,13 +181,15 @@
"""
LikeFile.__init__(self, delta_file)
- if not isinstance(basis_file, types.FileType):
+ try:
+ basis_file.fileno()
+ except:
u""" tempfile.TemporaryFile() only guarantees a true file
object on posix platforms. on cygwin/windows a file-like
object whose file attribute is the underlying true file
object is returned.
"""
- if hasattr(basis_file, u'file') and isinstance(basis_file.file, types.FileType):
+ if hasattr(basis_file, u'file') and hasattr(basis_file.file, u'fileno'):
basis_file = basis_file.file
else:
raise TypeError(_(u"basis_file must be a (true) file or an object whose "
@@ -189,7 +200,7 @@
raise librsyncError(str(e))
-class SigGenerator:
+class SigGenerator(object):
u"""Calculate signature.
Input and output is same as SigFile, but the interface is like md5
=== modified file 'duplicity/log.py'
--- duplicity/log.py 2018-09-11 21:35:37 +0000
+++ duplicity/log.py 2018-11-29 19:03:43 +0000
@@ -22,7 +22,11 @@
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
u"""Log various messages depending on verbosity level"""
+from __future__ import division
+from builtins import str
+from past.utils import old_div
+from builtins import object
import os
import sys
import logging
@@ -82,7 +86,7 @@
# assert line. As it is, we'll attempt to convert s to unicode if we
# are handed bytes. One day we should update the backends.
# assert isinstance(s, unicode)
- if not isinstance(s, unicode):
+ if not isinstance(s, str):
s = s.decode(u"utf8", u"replace")
_logger.log(DupToLoggerLevel(verb_level), s,
@@ -98,7 +102,7 @@
Log(s, DEBUG)
-class InfoCode:
+class InfoCode(object):
u"""Enumeration class to hold info code values.
These values should never change, as frontends rely upon them.
Don't use 0 or negative numbers."""
@@ -180,7 +184,7 @@
def TransferProgress(progress, eta, changed_bytes, elapsed, speed, stalled):
u"""Shortcut used for upload progress messages (verbosity 5)."""
dots = int(0.4 * progress) # int(40.0 * progress / 100.0) -- for 40 chars
- data_amount = float(changed_bytes) / 1024.0
+ data_amount = old_div(float(changed_bytes), 1024.0)
data_scale = u"KB"
if data_amount > 1000.0:
data_amount /= 1024.0
@@ -194,7 +198,7 @@
speed_scale = u"B"
else:
eta_str = _RemainingSecs2Str(eta)
- speed_amount = float(speed) / 1024.0
+ speed_amount = old_div(float(speed), 1024.0)
speed_scale = u"KB"
if speed_amount > 1000.0:
speed_amount /= 1024.0
@@ -216,13 +220,13 @@
def PrintCollectionStatus(col_stats, force_print=False):
u"""Prints a collection status to the log"""
- Log(unicode(col_stats), 8, InfoCode.collection_status,
+ Log(str(col_stats), 8, InfoCode.collection_status,
u'\n' + u'\n'.join(col_stats.to_log_info()), force_print)
def PrintCollectionFileChangedStatus(col_stats, filepath, force_print=False):
u"""Prints a collection status to the log"""
- Log(unicode(col_stats.get_file_changed_record(filepath)), 8, InfoCode.collection_status, None, force_print)
+ Log(str(col_stats.get_file_changed_record(filepath)), 8, InfoCode.collection_status, None, force_print)
def Notice(s):
@@ -230,7 +234,7 @@
Log(s, NOTICE)
-class WarningCode:
+class WarningCode(object):
u"""Enumeration class to hold warning code values.
These values should never change, as frontends rely upon them.
Don't use 0 or negative numbers."""
@@ -254,7 +258,7 @@
Log(s, WARNING, code, extra)
-class ErrorCode:
+class ErrorCode(object):
u"""Enumeration class to hold error code values.
These values should never change, as frontends rely upon them.
Don't use 0 or negative numbers. This code is returned by duplicity
=== modified file 'duplicity/manifest.py'
--- duplicity/manifest.py 2018-09-24 17:02:42 +0000
+++ duplicity/manifest.py 2018-11-29 19:03:43 +0000
@@ -21,9 +21,14 @@
u"""Create and edit manifest for session contents"""
-from future_builtins import filter
+from builtins import filter
+from builtins import map
+from builtins import range
+from builtins import object
+from future.builtins import filter
import re
+import sys
from duplicity import globals
from duplicity import log
@@ -38,7 +43,7 @@
pass
-class Manifest:
+class Manifest(object):
u"""
List of volumes and information about each one
"""
@@ -70,9 +75,9 @@
self.local_dirname = globals.local_path.name # @UndefinedVariable
if self.fh:
if self.hostname:
- self.fh.write(u"Hostname %s\n" % self.hostname)
+ self.fh.write(b"Hostname %s\n" % self.hostname.encode())
if self.local_dirname:
- self.fh.write(u"Localdir %s\n" % Quote(self.local_dirname))
+ self.fh.write(b"Localdir %s\n" % Quote(self.local_dirname))
return self
def check_dirinfo(self):
@@ -118,7 +123,7 @@
self.files_changed = files_changed
if self.fh:
- self.fh.write(u"Filelist %d\n" % len(self.files_changed))
+ self.fh.write(b"Filelist %d\n" % len(self.files_changed))
for fileinfo in self.files_changed:
self.fh.write(b" %-7s %s\n" % (fileinfo[1], Quote(fileinfo[0])))
@@ -157,9 +162,9 @@
@rtype: string
@return: self in string form
"""
- result = u""
+ result = b""
if self.hostname:
- result += b"Hostname %s\n" % self.hostname
+ result += b"Hostname %s\n" % self.hostname.encode()
if self.local_dirname:
result += b"Localdir %s\n" % Quote(self.local_dirname)
@@ -167,7 +172,7 @@
for fileinfo in self.files_changed:
result += b" %-7s %s\n" % (fileinfo[1], Quote(fileinfo[0]))
- vol_num_list = self.volume_info_dict.keys()
+ vol_num_list = list(self.volume_info_dict.keys())
vol_num_list.sort()
def vol_num_to_string(vol_num):
@@ -182,21 +187,26 @@
u"""
Initialize self from string s, return self
"""
+
def get_field(fieldname):
u"""
Return the value of a field by parsing s, or None if no field
"""
- m = re.search(u"(^|\\n)%s\\s(.*?)\n" % fieldname, s, re.I)
+ if not isinstance(fieldname, bytes):
+ fieldname = fieldname.encode()
+ m = re.search(b"(^|\\n)%s\\s(.*?)\n" % fieldname, s, re.I)
if not m:
return None
else:
return Unquote(m.group(2))
self.hostname = get_field(u"hostname")
+ if self.hostname is not None:
+ self.hostname = self.hostname.decode()
self.local_dirname = get_field(u"localdir")
highest_vol = 0
latest_vol = 0
- vi_regexp = re.compile(u"(?:^|\\n)(volume\\s.*(?:\\n.*)*?)(?=\\nvolume\\s|$)", re.I)
+ vi_regexp = re.compile(b"(?:^|\\n)(volume\\s.*(?:\\n.*)*?)(?=\\nvolume\\s|$)", re.I)
vi_iterator = vi_regexp.finditer(s)
for match in vi_iterator:
vi = VolumeInfo().from_string(match.group(1))
@@ -215,16 +225,16 @@
# Get file changed list - not needed if --file-changed not present
filecount = 0
if globals.file_changed is not None:
- filelist_regexp = re.compile(u"(^|\\n)filelist\\s([0-9]+)\\n(.*?)(\\nvolume\\s|$)", re.I | re.S)
+ filelist_regexp = re.compile(b"(^|\\n)filelist\\s([0-9]+)\\n(.*?)(\\nvolume\\s|$)", re.I | re.S)
match = filelist_regexp.search(s)
if match:
filecount = int(match.group(2))
if filecount > 0:
def parse_fileinfo(line):
fileinfo = line.strip().split()
- return (fileinfo[0], u''.join(fileinfo[1:]))
+ return (fileinfo[0], b''.join(fileinfo[1:]))
- self.files_changed = list(map(parse_fileinfo, match.group(3).split(u'\n')))
+ self.files_changed = list(map(parse_fileinfo, match.group(3).split(b'\n')))
if filecount != len(self.files_changed):
log.Error(_(u"Manifest file '%s' is corrupt: File count says %d, File list contains %d" %
@@ -240,9 +250,9 @@
u"""
Two manifests are equal if they contain the same volume infos
"""
- vi_list1 = self.volume_info_dict.keys()
+ vi_list1 = list(self.volume_info_dict.keys())
vi_list1.sort()
- vi_list2 = other.volume_info_dict.keys()
+ vi_list2 = list(other.volume_info_dict.keys())
vi_list2.sort()
if vi_list1 != vi_list2:
@@ -281,9 +291,10 @@
u"""
Return list of volume numbers that may contain index_prefix
"""
- return filter(lambda vol_num:
- self.volume_info_dict[vol_num].contains(index_prefix),
- self.volume_info_dict.keys())
+ if len(index_prefix) == 1 and isinstance(index_prefix[0], u"".__class__):
+ index_prefix = (index_prefix[0].encode(),)
+ return [vol_num for vol_num in list(self.volume_info_dict.keys()) if
+ self.volume_info_dict[vol_num].contains(index_prefix)]
class VolumeInfoError(Exception):
@@ -293,7 +304,7 @@
pass
-class VolumeInfo:
+class VolumeInfo(object):
u"""
Information about a single volume
"""
@@ -328,6 +339,10 @@
u"""
Set the value of hash hash_name (e.g. "MD5") to data
"""
+ if isinstance(hash_name, bytes):
+ hash_name = hash_name.decode()
+ if isinstance(data, bytes):
+ data = data.decode()
self.hashes[hash_name] = data
def get_best_hash(self):
@@ -347,7 +362,7 @@
return (u"MD5", self.hashes[u'MD5'])
except KeyError:
pass
- return self.hashes.items()[0]
+ return list(self.hashes.items())[0]
def to_string(self):
u"""
@@ -361,15 +376,20 @@
else:
return b"."
+ def bfmt(x):
+ if x is None:
+ return b" "
+ return str(x).encode()
+
slist = [b"Volume %d:" % self.volume_number]
whitespace = b" "
slist.append(b"%sStartingPath %s %s" %
- (whitespace, index_to_string(self.start_index), (self.start_block or b" ")))
+ (whitespace, index_to_string(self.start_index), bfmt(self.start_block)))
slist.append(b"%sEndingPath %s %s" %
- (whitespace, index_to_string(self.end_index), (self.end_block or b" ")))
+ (whitespace, index_to_string(self.end_index), bfmt(self.end_block)))
for key in self.hashes:
slist.append(b"%sHash %s %s" %
- (whitespace, key.encode(), self.hashes[key]))
+ (whitespace, key.encode(), self.hashes[key].encode()))
return b"\n".join(slist)
__str__ = to_string
@@ -390,7 +410,7 @@
linelist = s.strip().split(b"\n")
# Set volume number
- m = re.search(u"^Volume ([0-9]+):", linelist[0], re.I)
+ m = re.search(b"^Volume ([0-9]+):", linelist[0], re.I)
if not m:
raise VolumeInfoError(u"Bad first line '%s'" % (linelist[0],))
self.volume_number = int(m.group(1))
@@ -402,22 +422,22 @@
line_split = line.strip().split()
field_name = line_split[0].lower()
other_fields = line_split[1:]
- if field_name == u"Volume":
+ if field_name == b"Volume":
log.Warn(_(u"Warning, found extra Volume identifier"))
break
- elif field_name == u"startingpath":
+ elif field_name == b"startingpath":
self.start_index = string_to_index(other_fields[0])
if len(other_fields) > 1:
self.start_block = int(other_fields[1])
else:
self.start_block = None
- elif field_name == u"endingpath":
+ elif field_name == b"endingpath":
self.end_index = string_to_index(other_fields[0])
if len(other_fields) > 1:
self.end_block = int(other_fields[1])
else:
self.end_block = None
- elif field_name == u"hash":
+ elif field_name == b"hash":
self.set_hash(other_fields[0], other_fields[1])
if self.start_index is None or self.end_index is None:
@@ -440,9 +460,9 @@
if self.end_index != other.end_index:
log.Notice(_(u"end_index don't match"))
return None
- hash_list1 = self.hashes.items()
+ hash_list1 = list(self.hashes.items())
hash_list1.sort()
- hash_list2 = other.hashes.items()
+ hash_list2 = list(other.hashes.items())
hash_list2.sort()
if hash_list1 != hash_list2:
log.Notice(_(u"Hashes don't match"))
@@ -471,7 +491,7 @@
return self.start_index <= index_prefix <= self.end_index
-nonnormal_char_re = re.compile(u"(\\s|[\\\\\"'])")
+nonnormal_char_re = re.compile(b"(\\s|[\\\\\"'])")
def Quote(s):
@@ -481,29 +501,40 @@
if not nonnormal_char_re.search(s):
return s # no quoting necessary
slist = []
- for char in s:
+ for i in range(0, len(s)):
+ char = s[i:i + 1]
if nonnormal_char_re.search(char):
slist.append(b"\\x%02x" % ord(char))
else:
slist.append(char)
- return b'"%s"' % u"".join(slist)
+ return b'"%s"' % b"".join(slist)
+
+
+def maybe_chr(ch):
+ if sys.version_info.major >= 3:
+ return chr(ch)
+ else:
+ return ch
def Unquote(quoted_string):
u"""
Return original string from quoted_string produced by above
"""
- if not quoted_string[0] == b'"' or quoted_string[0] == b"'":
+ if not maybe_chr(quoted_string[0]) == u'"' or maybe_chr(quoted_string[0]) == u"'":
return quoted_string
assert quoted_string[0] == quoted_string[-1]
return_list = []
i = 1 # skip initial char
while i < len(quoted_string) - 1:
- char = quoted_string[i]
+ char = quoted_string[i:i + 1]
if char == b"\\":
# quoted section
- assert quoted_string[i + 1] == b"x"
- return_list.append(chr(int(quoted_string[i + 2:i + 4], 16)))
+ assert maybe_chr(quoted_string[i + 1]) == u"x"
+ if sys.version_info.major >= 3:
+ return_list.append(int(quoted_string[i + 2:i + 4].decode(), 16).to_bytes(1, byteorder=u'big'))
+ else:
+ return_list.append(chr(int(quoted_string[i + 2:i + 4], 16)))
i += 4
else:
return_list.append(char)
=== modified file 'duplicity/patchdir.py'
--- duplicity/patchdir.py 2018-09-24 17:02:42 +0000
+++ duplicity/patchdir.py 2018-11-29 19:03:43 +0000
@@ -19,7 +19,12 @@
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-from future_builtins import filter, map
+from builtins import map
+from builtins import filter
+from builtins import next
+from builtins import range
+from builtins import object
+from future.builtins import filter, map
import re # @UnusedImport
import types
@@ -149,9 +154,11 @@
def get_index_from_tarinfo(tarinfo):
u"""Return (index, difftype, multivol) pair from tarinfo object"""
- for prefix in [b"snapshot/", b"diff/", b"deleted/",
- b"multivol_diff/", b"multivol_snapshot/"]:
+ for prefix in [u"snapshot/", u"diff/", u"deleted/",
+ u"multivol_diff/", u"multivol_snapshot/"]:
tiname = util.get_tarinfo_name(tarinfo)
+ if sys.version_info.major == 2 and isinstance(prefix, unicode):
+ prefix = prefix.encode()
if tiname.startswith(prefix):
name = tiname[len(prefix):] # strip prefix
if prefix.startswith(u"multivol"):
@@ -161,32 +168,35 @@
difftype = u"snapshot"
multivol = 1
name, num_subs = \
- re.subn(b"(?s)^multivol_(diff|snapshot)/?(.*)/[0-9]+$",
- b"\\2", tiname)
+ re.subn(u"(?s)^multivol_(diff|snapshot)/?(.*)/[0-9]+$",
+ u"\\2", tiname)
if num_subs != 1:
raise PatchDirException(u"Unrecognized diff entry %s" %
- util.fsdecode(tiname))
+ tiname)
else:
difftype = prefix[:-1] # strip trailing /
name = tiname[len(prefix):]
- if name.endswith(b"/"):
+ if name.endswith(r"/"):
name = name[:-1] # strip trailing /'s
multivol = 0
break
else:
raise PatchDirException(u"Unrecognized diff entry %s" %
- util.fsdecode(tiname))
- if name == b"." or name == b"":
+ tiname)
+ if name == r"." or name == r"":
index = ()
else:
- index = tuple(name.split(b"/"))
+ if sys.version_info.major >= 3:
+ index = tuple(util.fsencode(name).split(b"/"))
+ else:
+ index = tuple(name.split(b"/"))
if b'..' in index:
raise PatchDirException(u"Tar entry %s contains '..'. Security "
u"violation" % util.fsdecode(tiname))
return (index, difftype, multivol)
-class Multivol_Filelike:
+class Multivol_Filelike(object):
u"""Emulate a file like object from multivols
Maintains a buffer about the size of a volume. When it is read()
@@ -310,7 +320,7 @@
basis_path.patch_with_attribs(diff_ropath)
-class TarFile_FromFileobjs:
+class TarFile_FromFileobjs(object):
u"""Like a tarfile.TarFile iterator, but read from multiple fileobjs"""
def __init__(self, fileobj_iter):
u"""Make new tarinfo iterator
@@ -323,7 +333,7 @@
self.tarfile, self.tar_iter = None, None
self.current_fp = None
- def __iter__(self):
+ def __iter__(self): # pylint: disable=non-iterator-returned
return self
def set_tarfile(self):
@@ -334,7 +344,7 @@
self.tarfile = util.make_tarfile(u"r", self.current_fp)
self.tar_iter = iter(self.tarfile)
- def next(self):
+ def __next__(self):
if not self.tarfile:
self.set_tarfile()
try:
@@ -382,7 +392,7 @@
def getleastindex(elems):
u"""Return the first index in elems, assuming elems isn't empty"""
- return min(map(lambda elem: elem.index, filter(lambda x: x, elems)))
+ return min([elem.index for elem in [x for x in elems if x]])
def yield_tuples(iter_num, overflow, elems):
while 1:
@@ -402,7 +412,7 @@
return yield_tuples(iter_num, overflow, elems)
-class IndexedTuple:
+class IndexedTuple(object):
u"""Like a tuple, but has .index (used previously by collate_iters)"""
def __init__(self, index, sequence):
self.index = index
@@ -488,7 +498,9 @@
for delta_ropath in patch_seq[1:]:
assert delta_ropath.difftype == u"diff", delta_ropath.difftype
- if not isinstance(current_file, file):
+ try:
+ cur_file.fileno()
+ except:
u"""
librsync insists on a real file object, which we create manually
by using the duplicity.tempdir to tell us where.
=== modified file 'duplicity/path.py'
--- duplicity/path.py 2018-09-24 17:02:42 +0000
+++ duplicity/path.py 2018-11-29 19:03:43 +0000
@@ -26,7 +26,10 @@
"""
-from future_builtins import filter
+from builtins import filter
+from builtins import str
+from builtins import object
+from future.builtins import filter
import stat
import errno
@@ -52,7 +55,7 @@
_tmp_path_counter = 1
-class StatResult:
+class StatResult(object):
u"""Used to emulate the output of os.stat() and related"""
# st_mode is required by the TarInfo class, but it's unclear how
# to generate it from file permissions.
@@ -63,7 +66,7 @@
pass
-class ROPath:
+class ROPath(object):
u"""Read only Path
Objects of this class doesn't represent real files, so they don't
@@ -199,6 +202,8 @@
elif type == tarfile.SYMTYPE:
self.type = u"sym"
self.symtext = tarinfo.linkname
+ if isinstance(self.symtext, u"".__class__):
+ self.symtext = util.fsencode(self.symtext)
elif type == tarfile.CHRTYPE:
self.type = u"chr"
self.devnums = (tarinfo.devmajor, tarinfo.devminor)
@@ -262,11 +267,11 @@
"""
ti = tarfile.TarInfo()
if self.index:
- ti.name = b"/".join(self.index)
+ ti.name = util.fsdecode(b"/".join(self.index))
else:
- ti.name = b"."
+ ti.name = u"."
if self.isdir():
- ti.name += b"/" # tar dir naming convention
+ ti.name += u"/" # tar dir naming convention
ti.size = 0
if self.type:
@@ -282,6 +287,8 @@
elif self.issym():
ti.type = tarfile.SYMTYPE
ti.linkname = self.symtext
+ if isinstance(ti.linkname, bytes):
+ ti.linkname = util.fsdecode(ti.linkname)
elif self.isdev():
if self.type == u"chr":
ti.type = tarfile.CHRTYPE
@@ -512,7 +519,7 @@
# self.opened should be true if the file has been opened, and
# self.fileobj can override returned fileobj
self.opened, self.fileobj = None, None
- if isinstance(base, unicode):
+ if isinstance(base, str):
# For now (Python 2), it is helpful to know that all paths
# are starting with bytes -- see note above util.fsencode definition
base = util.fsencode(base)
@@ -539,7 +546,7 @@
else:
self.stat = os.lstat(self.name)
except OSError as e:
- err_string = errno.errorcode[e[0]]
+ err_string = errno.errorcode[e.errno]
if err_string in [u"ENOENT", u"ENOTDIR", u"ELOOP", u"ENOTCONN"]:
self.stat, self.type = None, None # file doesn't exist
self.mode = None
@@ -552,6 +559,8 @@
def append(self, ext):
u"""Return new Path with ext added to index"""
+ if isinstance(ext, u"".__class__):
+ ext = util.fsencode(ext)
return self.__class__(self.base, self.index + (ext,))
def new_index(self, index):
@@ -627,11 +636,11 @@
if self.index:
return Path(self.base, self.index[:-1])
else:
- components = self.base.split(u"/")
+ components = self.base.split(b"/")
if len(components) == 2 and not components[0]:
- return Path(u"/") # already in root directory
+ return Path(b"/") # already in root directory
else:
- return Path(u"/".join(components[:-1]))
+ return Path(b"/".join(components[:-1]))
def writefileobj(self, fin):
u"""Copy file object fin to self. Close both when done."""
@@ -706,7 +715,7 @@
used with os.system.
"""
if not s:
- s = self.name
+ s = self.uc_name
return u'"%s"' % self.regex_chars_to_quote.sub(lambda m: u"\\" + m.group(0), s)
def unquote(self, s):
@@ -725,7 +734,7 @@
def get_filename(self):
u"""Return filename of last component"""
- components = self.name.split(u"/")
+ components = self.name.split(b"/")
assert components and components[-1]
return components[-1]
@@ -737,14 +746,13 @@
it's harder to remove "..", as "foo/bar/.." is not necessarily
"foo", so we can't use path.normpath()
"""
- newpath = u"/".join(filter(lambda x: x and x != u".",
- self.name.split(u"/")))
- if self.name[0] == u"/":
- return u"/" + newpath
+ newpath = b"/".join([x for x in self.name.split(b"/") if x and x != b"."])
+ if self.uc_name[0] == u"/":
+ return b"/" + newpath
elif newpath:
return newpath
else:
- return u"."
+ return b"."
class DupPath(Path):
=== modified file 'duplicity/progress.py'
--- duplicity/progress.py 2018-09-11 21:35:37 +0000
+++ duplicity/progress.py 2018-11-29 19:03:43 +0000
@@ -33,7 +33,10 @@
"""
from __future__ import absolute_import
+from __future__ import division
+from builtins import object
+from past.utils import old_div
import collections as sys_collections
import math
import threading
@@ -77,7 +80,7 @@
u"""
Serializes object to cache
"""
- progressfd = open(u'%s/progress' % globals.archive_dir_path.name, u'w+')
+ progressfd = open(b'%s/progress' % globals.archive_dir_path.name, u'wb+')
pickle.dump(self, progressfd)
progressfd.close()
@@ -103,7 +106,7 @@
self.last_vol = 0
-class ProgressTracker():
+class ProgressTracker(object):
def __init__(self):
self.total_stats = None
@@ -194,14 +197,14 @@
if self.is_full:
# Compute mean ratio of data transfer, assuming 1:1 data density
- self.current_estimation = float(self.total_bytecount) / float(total_changes)
+ self.current_estimation = old_div(float(self.total_bytecount), float(total_changes))
else:
# Compute mean ratio of data transfer, estimating unknown progress
- change_ratio = float(self.total_bytecount) / float(diffdir.stats.RawDeltaSize)
+ change_ratio = old_div(float(self.total_bytecount), float(diffdir.stats.RawDeltaSize))
change_delta = change_ratio - self.change_mean_ratio
- self.change_mean_ratio += change_delta / float(self.nsteps) # mean cumulated ratio
+ self.change_mean_ratio += old_div(change_delta, float(self.nsteps)) # mean cumulated ratio
self.change_r_estimation += change_delta * (change_ratio - self.change_mean_ratio)
- change_sigma = math.sqrt(math.fabs(self.change_r_estimation / float(self.nsteps)))
+ change_sigma = math.sqrt(math.fabs(old_div(self.change_r_estimation, float(self.nsteps))))
u"""
Combine variables for progress estimation
@@ -249,7 +252,7 @@
self.elapsed_sum += elapsed
projection = 1.0
if self.progress_estimation > 0:
- projection = (1.0 - self.progress_estimation) / self.progress_estimation
+ projection = old_div((1.0 - self.progress_estimation), self.progress_estimation)
self.time_estimation = int(projection * float(self.elapsed_sum.total_seconds()))
# Apply values only when monotonic, so the estimates look more consistent to the human eye
@@ -260,8 +263,8 @@
Compute Exponential Moving Average of speed as bytes/sec of the last 30 probes
"""
if elapsed.total_seconds() > 0:
- self.transfers.append(float(self.total_bytecount - self.last_total_bytecount) /
- float(elapsed.total_seconds()))
+ self.transfers.append(old_div(float(self.total_bytecount - self.last_total_bytecount),
+ float(elapsed.total_seconds())))
self.last_total_bytecount = self.total_bytecount
if len(self.transfers) > 30:
self.transfers.popleft()
=== modified file 'duplicity/robust.py'
--- duplicity/robust.py 2018-09-11 21:35:37 +0000
+++ duplicity/robust.py 2018-11-29 19:03:43 +0000
@@ -45,11 +45,11 @@
# TracebackArchive.add()
except (IOError, EnvironmentError, librsync.librsyncError, path.PathException) as exc:
if (not isinstance(exc, EnvironmentError) or
- ((exc[0] in errno.errorcode) and
- errno.errorcode[exc[0]] in
- [u'EPERM', u'ENOENT', u'EACCES', u'EBUSY', u'EEXIST',
- u'ENOTDIR', u'ENAMETOOLONG', u'EINTR', u'ENOTEMPTY',
- u'EIO', u'ETXTBSY', u'ESRCH', u'EINVAL'])):
+ hasattr(exc, u"errno") and
+ errno.errorcode[exc.errno] in
+ [u'EPERM', u'ENOENT', u'EACCES', u'EBUSY', u'EEXIST',
+ u'ENOTDIR', u'ENAMETOOLONG', u'EINTR', u'ENOTEMPTY',
+ u'EIO', u'ETXTBSY', u'ESRCH', u'EINVAL']):
# Log.exception()
if error_handler:
return error_handler(exc, *args)
=== modified file 'duplicity/selection.py'
--- duplicity/selection.py 2018-10-07 12:01:41 +0000
+++ duplicity/selection.py 2018-11-29 19:03:43 +0000
@@ -20,7 +20,10 @@
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-from future_builtins import filter, map
+from builtins import next
+from builtins import str
+from builtins import object
+from builtins import filter, map
import os # @UnusedImport
import stat # @UnusedImport
@@ -43,7 +46,7 @@
"""
-class Select:
+class Select(object):
u"""Iterate appropriate Paths in given directory
This class acts as an iterator on account of its next() method.
@@ -87,13 +90,17 @@
self.rootpath = path
self.prefix = self.rootpath.uc_name
+ def __iter__(self): # pylint: disable=non-iterator-returned
+ return self
+
+ def __next__(self):
+ return next(self.iter)
+
def set_iter(self):
u"""Initialize generator, prepare to iterate."""
# Externally-accessed method
self.rootpath.setdata() # this may have changed since Select init
self.iter = self.Iterate(self.rootpath)
- self.next = self.iter.next
- self.__iter__ = lambda: self
return self
def Iterate(self, path):
@@ -241,10 +248,10 @@
filelists_index = 0
try:
for opt, arg in argtuples:
- assert isinstance(opt, unicode), u"option " + opt.decode(sys.getfilesystemencoding(), u"ignore") + \
- u" is not unicode"
- assert isinstance(arg, unicode), u"option " + arg.decode(sys.getfilesystemencoding(), u"ignore") + \
- u" is not unicode"
+ assert isinstance(opt, str), u"option " + opt.decode(sys.getfilesystemencoding(), u"ignore") + \
+ u" is not unicode"
+ assert isinstance(arg, str), u"option " + arg.decode(sys.getfilesystemencoding(), u"ignore") + \
+ u" is not unicode"
if opt == u"--exclude":
self.add_selection_func(self.glob_get_sf(arg, 0))
@@ -426,7 +433,7 @@
u"""Return selection function given by glob string"""
# Internal. Used by ParseArgs, filelist_globbing_get_sfs and unit tests.
assert include == 0 or include == 1
- assert isinstance(glob_str, unicode)
+ assert isinstance(glob_str, str)
if glob_str == u"**":
sel_func = lambda path: include
else:
@@ -488,7 +495,7 @@
things similar to this.
"""
- assert isinstance(glob_str, unicode), \
+ assert isinstance(glob_str, str), \
u"The glob string " + glob_str.decode(sys.getfilesystemencoding(), u"ignore") + u" is not unicode"
ignore_case = False
=== modified file 'duplicity/statistics.py'
--- duplicity/statistics.py 2018-10-10 20:25:04 +0000
+++ duplicity/statistics.py 2018-11-29 19:03:43 +0000
@@ -20,8 +20,14 @@
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
u"""Generate and process backup statistics"""
+from __future__ import division
-from future_builtins import map
+from builtins import zip
+from builtins import map
+from builtins import str
+from builtins import object
+from past.utils import old_div
+from future.builtins import map
import re
import time
@@ -34,7 +40,7 @@
pass
-class StatsObj:
+class StatsObj(object):
u"""Contains various statistics, provide string conversion functions"""
# used when quoting files in get_stats_line
space_regex = re.compile(u" ")
@@ -150,10 +156,10 @@
u"""Return portion of statistics string dealing with time"""
timelist = []
if self.StartTime is not None:
- timelist.append(u"StartTime %.2f (%s)\n" %
+ timelist.append(u"StartTime %.2f (%s)\n" % # pylint: disable=bad-string-format-type
(self.StartTime, dup_time.timetopretty(self.StartTime)))
if self.EndTime is not None:
- timelist.append(u"EndTime %.2f (%s)\n" %
+ timelist.append(u"EndTime %.2f (%s)\n" % # pylint: disable=bad-string-format-type
(self.EndTime, dup_time.timetopretty(self.EndTime)))
if self.ElapsedTime or (self.StartTime is not None and
self.EndTime is not None):
@@ -201,7 +207,7 @@
for abbrev_bytes, abbrev_string in self.byte_abbrev_list:
if byte_count >= abbrev_bytes:
# Now get 3 significant figures
- abbrev_count = float(byte_count) / abbrev_bytes
+ abbrev_count = old_div(float(byte_count), abbrev_bytes)
if abbrev_count >= 100:
precision = 0
elif abbrev_count >= 10:
@@ -290,7 +296,7 @@
for attr in self.stat_attrs:
if self.get_stat(attr) is not None:
self.set_stat(attr,
- self.get_stat(attr) / float(len(statobj_list)))
+ old_div(self.get_stat(attr), float(len(statobj_list))))
return self
def get_statsobj_copy(self):
=== modified file 'duplicity/tempdir.py'
--- duplicity/tempdir.py 2018-09-11 21:35:37 +0000
+++ duplicity/tempdir.py 2018-11-29 19:03:43 +0000
@@ -26,6 +26,7 @@
The public interface of this module is thread-safe.
"""
+from builtins import object
import os
import threading
import tempfile
@@ -67,7 +68,7 @@
_defaultLock.release()
-class TemporaryDirectory:
+class TemporaryDirectory(object):
u"""
A temporary directory.
@@ -127,9 +128,11 @@
else:
global _initialSystemTempRoot
temproot = _initialSystemTempRoot
+ if isinstance(temproot, b"".__class__):
+ temproot = util.fsdecode(temproot)
self.__dir = tempfile.mkdtemp(u"-tempdir", u"duplicity-", temproot)
- log.Info(_(u"Using temporary directory %s") % util.fsdecode(self.__dir))
+ log.Info(_(u"Using temporary directory %s") % self.__dir)
# number of mktemp()/mkstemp() calls served so far
self.__tempcount = 0
@@ -171,7 +174,7 @@
try:
self.__tempcount = self.__tempcount + 1
suffix = u"-%d" % (self.__tempcount,)
- filename = tempfile.mktemp(suffix, u"mktemp-", self.__dir)
+ filename = tempfile.mktemp(suffix, u"mktemp-", util.fsdecode(self.__dir))
log.Debug(_(u"Registering (mktemp) temporary file %s") % util.fsdecode(filename))
self.__pending[filename] = None
@@ -193,9 +196,9 @@
try:
self.__tempcount = self.__tempcount + 1
suffix = u"-%d" % (self.__tempcount,)
- fd, filename = tempfile.mkstemp(suffix, u"mkstemp-", self.__dir)
+ fd, filename = tempfile.mkstemp(suffix, u"mkstemp-", self.__dir,)
- log.Debug(_(u"Registering (mkstemp) temporary file %s") % util.fsdecode(filename))
+ log.Debug(_(u"Registering (mkstemp) temporary file %s") % filename)
self.__pending[filename] = None
finally:
self.__lock.release()
@@ -247,7 +250,7 @@
self.__lock.acquire()
try:
if self.__dir is not None:
- for file in self.__pending.keys():
+ for file in list(self.__pending.keys()):
try:
log.Debug(_(u"Removing still remembered temporary file %s") % util.fsdecode(file))
util.ignore_missing(os.unlink, file)
=== modified file 'duplicity/util.py'
--- duplicity/util.py 2018-09-24 17:02:42 +0000
+++ duplicity/util.py 2018-11-29 19:03:43 +0000
@@ -23,6 +23,9 @@
Miscellaneous utilities.
"""
+from builtins import str
+from builtins import map
+from builtins import object
import errno
import os
import string
@@ -77,9 +80,14 @@
lines.extend(traceback.format_exception_only(type, value))
msg = u"Traceback (innermost last):\n"
- msg = msg + u"%-20s %s" % (string.join(lines[:-1], u""), lines[-1])
+ if sys.version_info.major >= 3:
+ msg = msg + u"%-20s %s" % (str.join(u"", lines[:-1]), lines[-1])
+ else:
+ msg = msg + u"%-20s %s" % (string.join(lines[:-1], u""), lines[-1])
- return msg.decode(u'unicode-escape', u'replace')
+ if sys.version_info.major < 3:
+ return msg.decode(u'unicode-escape', u'replace')
+ return msg
def escape(string):
@@ -101,7 +109,7 @@
# non-ascii will cause a UnicodeDecodeError when implicitly decoding to
# unicode. So we decode manually, using the filesystem encoding.
# 99.99% of the time, this will be a fine encoding to use.
- return fsdecode(unicode(e).encode(u'utf-8'))
+ return fsdecode(str(e).encode(u'utf-8'))
def maybe_ignore_errors(fn):
@@ -130,7 +138,7 @@
pass
-class FakeTarFile:
+class FakeTarFile(object):
debug = 0
def __iter__(self):
@@ -159,8 +167,8 @@
# Python versions before 2.6 ensure that directories end with /, but 2.6
# and later ensure they they *don't* have /. ::shrug:: Internally, we
# continue to use pre-2.6 method.
- if ti.isdir() and not ti.name.endswith(b"/"):
- return ti.name + b"/"
+ if ti.isdir() and not ti.name.endswith(r"/"):
+ return ti.name + r"/"
else:
return ti.name
=== modified file 'po/duplicity.pot'
--- po/duplicity.pot 2018-10-16 20:56:54 +0000
+++ po/duplicity.pot 2018-11-29 19:03:43 +0000
@@ -8,7 +8,7 @@
msgstr ""
"Project-Id-Version: PACKAGE VERSION\n"
"Report-Msgid-Bugs-To: Kenneth Loafman <kenneth@xxxxxxxxxxx>\n"
-"POT-Creation-Date: 2018-10-16 15:41-0500\n"
+"POT-Creation-Date: 2018-11-29 11:12-0600\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@xxxxxx>\n"
@@ -18,313 +18,313 @@
"Content-Transfer-Encoding: 8bit\n"
"Plural-Forms: nplurals=INTEGER; plural=EXPRESSION;\n"
-#: ../bin/duplicity:122
+#: ../bin/duplicity:128
msgid "Reuse configured PASSPHRASE as SIGN_PASSPHRASE"
msgstr ""
-#: ../bin/duplicity:129
+#: ../bin/duplicity:135
msgid "Reuse configured SIGN_PASSPHRASE as PASSPHRASE"
msgstr ""
-#: ../bin/duplicity:168
+#: ../bin/duplicity:174
msgid "PASSPHRASE variable not set, asking user."
msgstr ""
-#: ../bin/duplicity:183
+#: ../bin/duplicity:189
msgid "GnuPG passphrase for signing key:"
msgstr ""
-#: ../bin/duplicity:188
+#: ../bin/duplicity:194
msgid "GnuPG passphrase:"
msgstr ""
-#: ../bin/duplicity:193
+#: ../bin/duplicity:199
msgid "Retype passphrase for signing key to confirm: "
msgstr ""
-#: ../bin/duplicity:195
+#: ../bin/duplicity:201
msgid "Retype passphrase to confirm: "
msgstr ""
-#: ../bin/duplicity:198
+#: ../bin/duplicity:204
msgid "First and second passphrases do not match! Please try again."
msgstr ""
-#: ../bin/duplicity:205
+#: ../bin/duplicity:211
msgid ""
"Cannot use empty passphrase with symmetric encryption! Please try again."
msgstr ""
-#: ../bin/duplicity:262
+#: ../bin/duplicity:268
#, python-format
msgid ""
"File %s complete in backup set.\n"
"Continuing restart on file %s."
msgstr ""
-#: ../bin/duplicity:271
+#: ../bin/duplicity:277
#, python-format
msgid ""
"File %s missing in backup set.\n"
"Continuing restart on file %s."
msgstr ""
-#: ../bin/duplicity:320
+#: ../bin/duplicity:326
#, python-format
msgid "File %s was corrupted during upload."
msgstr ""
-#: ../bin/duplicity:353
+#: ../bin/duplicity:359
msgid ""
"Restarting backup, but current encryption settings do not match original "
"settings"
msgstr ""
-#: ../bin/duplicity:376
+#: ../bin/duplicity:382
#, python-format
msgid "Restarting after volume %s, file %s, block %s"
msgstr ""
-#: ../bin/duplicity:446
+#: ../bin/duplicity:452
#, python-format
msgid "Processed volume %d"
msgstr ""
-#: ../bin/duplicity:596
+#: ../bin/duplicity:602
msgid ""
"Fatal Error: Unable to start incremental backup. Old signatures not found "
"and incremental specified"
msgstr ""
-#: ../bin/duplicity:600
+#: ../bin/duplicity:606
msgid "No signatures found, switching to full backup."
msgstr ""
-#: ../bin/duplicity:614
+#: ../bin/duplicity:620
msgid "Backup Statistics"
msgstr ""
-#: ../bin/duplicity:720
+#: ../bin/duplicity:726
#, python-format
msgid "%s not found in archive - no files restored."
msgstr ""
-#: ../bin/duplicity:724
+#: ../bin/duplicity:730
msgid "No files found in archive - nothing restored."
msgstr ""
-#: ../bin/duplicity:757
+#: ../bin/duplicity:763
#, python-format
msgid "Processed volume %d of %d"
msgstr ""
-#: ../bin/duplicity:791
+#: ../bin/duplicity:797
#, python-format
msgid "Invalid data - %s hash mismatch for file:"
msgstr ""
-#: ../bin/duplicity:794
+#: ../bin/duplicity:800
#, python-format
msgid "Calculated hash: %s"
msgstr ""
-#: ../bin/duplicity:795
+#: ../bin/duplicity:801
#, python-format
msgid "Manifest hash: %s"
msgstr ""
-#: ../bin/duplicity:838
+#: ../bin/duplicity:844
#, python-format
msgid "Volume was signed by key %s, not %s"
msgstr ""
-#: ../bin/duplicity:870
+#: ../bin/duplicity:876
#, python-format
msgid "Verify complete: %s, %s."
msgstr ""
-#: ../bin/duplicity:871
+#: ../bin/duplicity:877
#, python-format
msgid "%d file compared"
msgid_plural "%d files compared"
msgstr[0] ""
msgstr[1] ""
-#: ../bin/duplicity:873
+#: ../bin/duplicity:879
#, python-format
msgid "%d difference found"
msgid_plural "%d differences found"
msgstr[0] ""
msgstr[1] ""
-#: ../bin/duplicity:892
+#: ../bin/duplicity:898
msgid "No extraneous files found, nothing deleted in cleanup."
msgstr ""
-#: ../bin/duplicity:897
+#: ../bin/duplicity:903
msgid "Deleting this file from backend:"
msgid_plural "Deleting these files from backend:"
msgstr[0] ""
msgstr[1] ""
-#: ../bin/duplicity:908
+#: ../bin/duplicity:914
msgid "Found the following file to delete:"
msgid_plural "Found the following files to delete:"
msgstr[0] ""
msgstr[1] ""
-#: ../bin/duplicity:911
+#: ../bin/duplicity:917
msgid "Run duplicity again with the --force option to actually delete."
msgstr ""
-#: ../bin/duplicity:954
+#: ../bin/duplicity:960
msgid "There are backup set(s) at time(s):"
msgstr ""
-#: ../bin/duplicity:956
+#: ../bin/duplicity:962
msgid "Which can't be deleted because newer sets depend on them."
msgstr ""
-#: ../bin/duplicity:960
+#: ../bin/duplicity:966
msgid ""
"Current active backup chain is older than specified time. However, it will "
"not be deleted. To remove all your backups, manually purge the repository."
msgstr ""
-#: ../bin/duplicity:973
+#: ../bin/duplicity:979
msgid "No old backup sets found, nothing deleted."
msgstr ""
-#: ../bin/duplicity:976
+#: ../bin/duplicity:982
msgid "Deleting backup chain at time:"
msgid_plural "Deleting backup chains at times:"
msgstr[0] ""
msgstr[1] ""
-#: ../bin/duplicity:988
+#: ../bin/duplicity:994
#, python-format
msgid "Deleting any incremental signature chain rooted at %s"
msgstr ""
-#: ../bin/duplicity:990
+#: ../bin/duplicity:996
#, python-format
msgid "Deleting any incremental backup chain rooted at %s"
msgstr ""
-#: ../bin/duplicity:993
+#: ../bin/duplicity:999
#, python-format
msgid "Deleting complete signature chain %s"
msgstr ""
-#: ../bin/duplicity:995
+#: ../bin/duplicity:1001
#, python-format
msgid "Deleting complete backup chain %s"
msgstr ""
-#: ../bin/duplicity:1001
+#: ../bin/duplicity:1007
msgid "Found old backup chain at the following time:"
msgid_plural "Found old backup chains at the following times:"
msgstr[0] ""
msgstr[1] ""
-#: ../bin/duplicity:1005
+#: ../bin/duplicity:1011
msgid "Rerun command with --force option to actually delete."
msgstr ""
-#: ../bin/duplicity:1028
+#: ../bin/duplicity:1034
msgid "No old backup sets found."
msgstr ""
-#: ../bin/duplicity:1043
+#: ../bin/duplicity:1049
#, python-format
msgid "Signature %s already replicated"
msgstr ""
-#: ../bin/duplicity:1050 ../bin/duplicity:1089
+#: ../bin/duplicity:1056 ../bin/duplicity:1095
#, python-format
msgid "Replicating %s."
msgstr ""
-#: ../bin/duplicity:1077
+#: ../bin/duplicity:1083
#, python-format
msgid "Backupset %s already replicated"
msgstr ""
-#: ../bin/duplicity:1198
+#: ../bin/duplicity:1204
#, python-format
msgid "Deleting local %s (not authoritative at backend)."
msgstr ""
-#: ../bin/duplicity:1203
+#: ../bin/duplicity:1209
#, python-format
msgid "Unable to delete %s: %s"
msgstr ""
-#: ../bin/duplicity:1234 ../duplicity/dup_temp.py:266
+#: ../bin/duplicity:1243 ../duplicity/dup_temp.py:267
#, python-format
msgid "Failed to read %s: %s"
msgstr ""
-#: ../bin/duplicity:1248
+#: ../bin/duplicity:1257
#, python-format
msgid "Copying %s to local cache."
msgstr ""
-#: ../bin/duplicity:1296
+#: ../bin/duplicity:1305
msgid "Local and Remote metadata are synchronized, no sync needed."
msgstr ""
-#: ../bin/duplicity:1301
+#: ../bin/duplicity:1310
msgid "Synchronizing remote metadata to local cache..."
msgstr ""
-#: ../bin/duplicity:1313
+#: ../bin/duplicity:1322
msgid "Sync would copy the following from remote to local:"
msgstr ""
-#: ../bin/duplicity:1316
+#: ../bin/duplicity:1325
msgid "Sync would remove the following spurious local files:"
msgstr ""
-#: ../bin/duplicity:1360
+#: ../bin/duplicity:1369
msgid "Unable to get free space on temp."
msgstr ""
-#: ../bin/duplicity:1368
+#: ../bin/duplicity:1377
#, python-format
msgid "Temp space has %d available, backup needs approx %d."
msgstr ""
-#: ../bin/duplicity:1371
+#: ../bin/duplicity:1380
#, python-format
msgid "Temp has %d available, backup will use approx %d."
msgstr ""
-#: ../bin/duplicity:1379
+#: ../bin/duplicity:1388
msgid "Unable to get max open files."
msgstr ""
-#: ../bin/duplicity:1383
+#: ../bin/duplicity:1392
#, python-format
msgid ""
"Max open files of %s is too low, should be >= 1024.\n"
"Use 'ulimit -n 1024' or higher to correct.\n"
msgstr ""
-#: ../bin/duplicity:1435
+#: ../bin/duplicity:1444
msgid ""
"RESTART: The first volume failed to upload before termination.\n"
" Restart is impossible...starting backup from beginning."
msgstr ""
-#: ../bin/duplicity:1441
+#: ../bin/duplicity:1450
#, python-format
msgid ""
"RESTART: Volumes %d to %d failed to upload before termination.\n"
" Restarting backup at volume %d."
msgstr ""
-#: ../bin/duplicity:1448
+#: ../bin/duplicity:1457
#, python-format
msgid ""
"RESTART: Impossible backup state: manifest has %d vols, remote has %d vols.\n"
@@ -333,7 +333,7 @@
" backup then restart the backup from the beginning."
msgstr ""
-#: ../bin/duplicity:1469
+#: ../bin/duplicity:1478
msgid ""
"\n"
"PYTHONOPTIMIZE in the environment causes duplicity to fail to\n"
@@ -343,96 +343,96 @@
"See https://bugs.launchpad.net/duplicity/+bug/931175\n"
msgstr ""
-#: ../bin/duplicity:1492
+#: ../bin/duplicity:1501
#, python-format
msgid "Acquiring lockfile %s"
msgstr ""
-#: ../bin/duplicity:1550
+#: ../bin/duplicity:1559
#, python-format
msgid "Last %s backup left a partial set, restarting."
msgstr ""
-#: ../bin/duplicity:1554
+#: ../bin/duplicity:1563
#, python-format
msgid "Cleaning up previous partial %s backup set, restarting."
msgstr ""
-#: ../bin/duplicity:1566
+#: ../bin/duplicity:1575
msgid "Last full backup date:"
msgstr ""
-#: ../bin/duplicity:1568
+#: ../bin/duplicity:1577
msgid "Last full backup date: none"
msgstr ""
-#: ../bin/duplicity:1570
+#: ../bin/duplicity:1580
msgid "Last full backup is too old, forcing full backup"
msgstr ""
-#: ../bin/duplicity:1619
+#: ../bin/duplicity:1629
msgid ""
"When using symmetric encryption, the signing passphrase must equal the "
"encryption passphrase."
msgstr ""
-#: ../bin/duplicity:1688
+#: ../bin/duplicity:1698
msgid "INT intercepted...exiting."
msgstr ""
-#: ../bin/duplicity:1696
+#: ../bin/duplicity:1706
#, python-format
msgid "GPG error detail: %s"
msgstr ""
-#: ../bin/duplicity:1706
+#: ../bin/duplicity:1716
#, python-format
msgid "User error detail: %s"
msgstr ""
-#: ../bin/duplicity:1716
+#: ../bin/duplicity:1726
#, python-format
msgid "Backend error detail: %s"
msgstr ""
-#: ../bin/rdiffdir:62 ../duplicity/commandline.py:263
+#: ../bin/rdiffdir:63 ../duplicity/commandline.py:269
#, python-format
msgid "Error opening file %s"
msgstr ""
-#: ../bin/rdiffdir:129
+#: ../bin/rdiffdir:130
#, python-format
msgid "File %s already exists, will not overwrite."
msgstr ""
-#: ../duplicity/selection.py:114
+#: ../duplicity/selection.py:121
#, python-format
msgid "Skipping socket %s"
msgstr ""
-#: ../duplicity/selection.py:118
+#: ../duplicity/selection.py:125
#, python-format
msgid "Error initializing file %s"
msgstr ""
-#: ../duplicity/selection.py:122 ../duplicity/selection.py:147
-#: ../duplicity/selection.py:456
+#: ../duplicity/selection.py:129 ../duplicity/selection.py:154
+#: ../duplicity/selection.py:463
#, python-format
msgid "Error accessing possibly locked file %s"
msgstr ""
-#: ../duplicity/selection.py:162
+#: ../duplicity/selection.py:169
#, python-format
msgid "Warning: base %s doesn't exist, continuing"
msgstr ""
-#: ../duplicity/selection.py:165 ../duplicity/selection.py:183
-#: ../duplicity/selection.py:186
+#: ../duplicity/selection.py:172 ../duplicity/selection.py:190
+#: ../duplicity/selection.py:193
#, python-format
msgid "Selecting %s"
msgstr ""
-#: ../duplicity/selection.py:288
+#: ../duplicity/selection.py:295
#, python-format
msgid ""
"Fatal Error: The file specification\n"
@@ -443,14 +443,14 @@
"pattern (such as '**') which matches the base directory."
msgstr ""
-#: ../duplicity/selection.py:297
+#: ../duplicity/selection.py:304
#, python-format
msgid ""
"Fatal Error while processing expression\n"
"%s"
msgstr ""
-#: ../duplicity/selection.py:306
+#: ../duplicity/selection.py:313
#, python-format
msgid ""
"Last selection expression:\n"
@@ -460,23 +460,23 @@
"probably isn't what you meant."
msgstr ""
-#: ../duplicity/selection.py:362
+#: ../duplicity/selection.py:369
#, python-format
msgid "Reading globbing filelist %s"
msgstr ""
-#: ../duplicity/selection.py:395
+#: ../duplicity/selection.py:402
#, python-format
msgid "Error compiling regular expression %s"
msgstr ""
-#: ../duplicity/selection.py:412
+#: ../duplicity/selection.py:419
msgid ""
"Warning: exclude-device-files is not the first selector.\n"
"This may not be what you intended"
msgstr ""
-#: ../duplicity/commandline.py:75
+#: ../duplicity/commandline.py:78
#, python-format
msgid ""
"Warning: Option %s is pending deprecation and will be removed in a future "
@@ -484,7 +484,7 @@
"Use of default filenames is strongly suggested."
msgstr ""
-#: ../duplicity/commandline.py:82
+#: ../duplicity/commandline.py:85
#, python-format
msgid ""
"Warning: Option %s is pending deprecation and will be removed in a future "
@@ -493,7 +493,7 @@
"should be used instead."
msgstr ""
-#: ../duplicity/commandline.py:92
+#: ../duplicity/commandline.py:95
#, python-format
msgid ""
"Warning: Option %s is pending deprecation and will be removed in a future "
@@ -505,10 +505,10 @@
#. Used in usage help to represent a Unix-style path name. Example:
#. --archive-dir <path>
-#: ../duplicity/commandline.py:284 ../duplicity/commandline.py:294
-#: ../duplicity/commandline.py:315 ../duplicity/commandline.py:389
-#: ../duplicity/commandline.py:407 ../duplicity/commandline.py:620
-#: ../duplicity/commandline.py:653 ../duplicity/commandline.py:852
+#: ../duplicity/commandline.py:290 ../duplicity/commandline.py:300
+#: ../duplicity/commandline.py:321 ../duplicity/commandline.py:395
+#: ../duplicity/commandline.py:413 ../duplicity/commandline.py:626
+#: ../duplicity/commandline.py:659 ../duplicity/commandline.py:858
msgid "path"
msgstr ""
@@ -518,9 +518,9 @@
#. --hidden-encrypt-key <gpg_key_id>
#. Used in usage help to represent an ID for a GnuPG key. Example:
#. --encrypt-key <gpg_key_id>
-#: ../duplicity/commandline.py:310 ../duplicity/commandline.py:317
-#: ../duplicity/commandline.py:413 ../duplicity/commandline.py:604
-#: ../duplicity/commandline.py:825
+#: ../duplicity/commandline.py:316 ../duplicity/commandline.py:323
+#: ../duplicity/commandline.py:419 ../duplicity/commandline.py:610
+#: ../duplicity/commandline.py:831
msgid "gpg-key-id"
msgstr ""
@@ -528,43 +528,43 @@
#. matching one or more files, as described in the documentation.
#. Example:
#. --exclude <shell_pattern>
-#: ../duplicity/commandline.py:325 ../duplicity/commandline.py:438
-#: ../duplicity/commandline.py:875
+#: ../duplicity/commandline.py:331 ../duplicity/commandline.py:444
+#: ../duplicity/commandline.py:881
msgid "shell_pattern"
msgstr ""
#. Used in usage help to represent the name of a file. Example:
#. --log-file <filename>
-#: ../duplicity/commandline.py:331 ../duplicity/commandline.py:340
-#: ../duplicity/commandline.py:347 ../duplicity/commandline.py:440
-#: ../duplicity/commandline.py:447 ../duplicity/commandline.py:460
-#: ../duplicity/commandline.py:821
+#: ../duplicity/commandline.py:337 ../duplicity/commandline.py:346
+#: ../duplicity/commandline.py:353 ../duplicity/commandline.py:446
+#: ../duplicity/commandline.py:453 ../duplicity/commandline.py:466
+#: ../duplicity/commandline.py:827
msgid "filename"
msgstr ""
#. Used in usage help to represent a regular expression (regexp).
-#: ../duplicity/commandline.py:354 ../duplicity/commandline.py:451
+#: ../duplicity/commandline.py:360 ../duplicity/commandline.py:457
msgid "regular_expression"
msgstr ""
#. Used in usage help to represent a time spec for a previous
#. point in time, as described in the documentation. Example:
#. duplicity remove-older-than time [options] target_url
-#: ../duplicity/commandline.py:358 ../duplicity/commandline.py:401
-#: ../duplicity/commandline.py:522 ../duplicity/commandline.py:907
+#: ../duplicity/commandline.py:364 ../duplicity/commandline.py:407
+#: ../duplicity/commandline.py:528 ../duplicity/commandline.py:913
msgid "time"
msgstr ""
#. Used in usage help. (Should be consistent with the "Options:"
#. header.) Example:
#. duplicity [full|incremental] [options] source_dir target_url
-#: ../duplicity/commandline.py:409 ../duplicity/commandline.py:502
-#: ../duplicity/commandline.py:525 ../duplicity/commandline.py:612
-#: ../duplicity/commandline.py:840
+#: ../duplicity/commandline.py:415 ../duplicity/commandline.py:508
+#: ../duplicity/commandline.py:531 ../duplicity/commandline.py:618
+#: ../duplicity/commandline.py:846
msgid "options"
msgstr ""
-#: ../duplicity/commandline.py:424
+#: ../duplicity/commandline.py:430
#, python-format
msgid ""
"Running in 'ignore errors' mode due to %s; please re-consider if this was "
@@ -572,50 +572,50 @@
msgstr ""
#. Used in usage help to represent an imap mailbox
-#: ../duplicity/commandline.py:436
+#: ../duplicity/commandline.py:442
msgid "imap_mailbox"
msgstr ""
-#: ../duplicity/commandline.py:454
+#: ../duplicity/commandline.py:460
msgid "file_descriptor"
msgstr ""
#. Used in usage help to represent a desired number of
#. something. Example:
#. --num-retries <number>
-#: ../duplicity/commandline.py:465 ../duplicity/commandline.py:487
-#: ../duplicity/commandline.py:499 ../duplicity/commandline.py:508
-#: ../duplicity/commandline.py:552 ../duplicity/commandline.py:557
-#: ../duplicity/commandline.py:561 ../duplicity/commandline.py:575
-#: ../duplicity/commandline.py:581 ../duplicity/commandline.py:585
-#: ../duplicity/commandline.py:648 ../duplicity/commandline.py:835
+#: ../duplicity/commandline.py:471 ../duplicity/commandline.py:493
+#: ../duplicity/commandline.py:505 ../duplicity/commandline.py:514
+#: ../duplicity/commandline.py:558 ../duplicity/commandline.py:563
+#: ../duplicity/commandline.py:567 ../duplicity/commandline.py:581
+#: ../duplicity/commandline.py:587 ../duplicity/commandline.py:591
+#: ../duplicity/commandline.py:654 ../duplicity/commandline.py:841
msgid "number"
msgstr ""
#. Used in usage help (noun)
-#: ../duplicity/commandline.py:468
+#: ../duplicity/commandline.py:474
msgid "backup name"
msgstr ""
-#: ../duplicity/commandline.py:570
+#: ../duplicity/commandline.py:576
msgid "policy"
msgstr ""
#. noun
-#: ../duplicity/commandline.py:588 ../duplicity/commandline.py:591
-#: ../duplicity/commandline.py:806
+#: ../duplicity/commandline.py:594 ../duplicity/commandline.py:597
+#: ../duplicity/commandline.py:812
msgid "command"
msgstr ""
-#: ../duplicity/commandline.py:594
+#: ../duplicity/commandline.py:600
msgid "pyrax|cloudfiles"
msgstr ""
-#: ../duplicity/commandline.py:615
+#: ../duplicity/commandline.py:621
msgid "pem formatted bundle of certificate authorities"
msgstr ""
-#: ../duplicity/commandline.py:616
+#: ../duplicity/commandline.py:622
msgid "path to a folder with certificate authority files"
msgstr ""
@@ -625,113 +625,113 @@
#. --backend-retry-delay <seconds>
#. Used in usage help. Example:
#. --timeout <seconds>
-#: ../duplicity/commandline.py:625 ../duplicity/commandline.py:659
-#: ../duplicity/commandline.py:869
+#: ../duplicity/commandline.py:631 ../duplicity/commandline.py:665
+#: ../duplicity/commandline.py:875
msgid "seconds"
msgstr ""
#. abbreviation for "character" (noun)
-#: ../duplicity/commandline.py:631 ../duplicity/commandline.py:803
+#: ../duplicity/commandline.py:637 ../duplicity/commandline.py:809
msgid "char"
msgstr ""
-#: ../duplicity/commandline.py:769
+#: ../duplicity/commandline.py:775
#, python-format
msgid "Using archive dir: %s"
msgstr ""
-#: ../duplicity/commandline.py:770
+#: ../duplicity/commandline.py:776
#, python-format
msgid "Using backup name: %s"
msgstr ""
-#: ../duplicity/commandline.py:777
+#: ../duplicity/commandline.py:783
#, python-format
msgid "Command line error: %s"
msgstr ""
-#: ../duplicity/commandline.py:778
+#: ../duplicity/commandline.py:784
msgid "Enter 'duplicity --help' for help screen."
msgstr ""
#. Used in usage help to represent a Unix-style path name. Example:
#. rsync://user[:password]@other_host[:port]//absolute_path
-#: ../duplicity/commandline.py:791
+#: ../duplicity/commandline.py:797
msgid "absolute_path"
msgstr ""
#. Used in usage help. Example:
#. tahoe://alias/some_dir
-#: ../duplicity/commandline.py:795
+#: ../duplicity/commandline.py:801
msgid "alias"
msgstr ""
#. Used in help to represent a "bucket name" for Amazon Web
#. Services' Simple Storage Service (S3). Example:
#. s3://other.host/bucket_name[/prefix]
-#: ../duplicity/commandline.py:800
+#: ../duplicity/commandline.py:806
msgid "bucket_name"
msgstr ""
#. Used in usage help to represent the name of a container in
#. Amazon Web Services' Cloudfront. Example:
#. cf+http://container_name
-#: ../duplicity/commandline.py:811
+#: ../duplicity/commandline.py:817
msgid "container_name"
msgstr ""
#. noun
-#: ../duplicity/commandline.py:814
+#: ../duplicity/commandline.py:820
msgid "count"
msgstr ""
#. Used in usage help to represent the name of a file directory
-#: ../duplicity/commandline.py:817
+#: ../duplicity/commandline.py:823
msgid "directory"
msgstr ""
#. Used in usage help, e.g. to represent the name of a code
#. module. Example:
#. rsync://user[:password]@other.host[:port]::/module/some_dir
-#: ../duplicity/commandline.py:830
+#: ../duplicity/commandline.py:836
msgid "module"
msgstr ""
#. Used in usage help to represent an internet hostname. Example:
#. ftp://user[:password]@other.host[:port]/some_dir
-#: ../duplicity/commandline.py:844
+#: ../duplicity/commandline.py:850
msgid "other.host"
msgstr ""
#. Used in usage help. Example:
#. ftp://user[:password]@other.host[:port]/some_dir
-#: ../duplicity/commandline.py:848
+#: ../duplicity/commandline.py:854
msgid "password"
msgstr ""
#. Used in usage help to represent a TCP port number. Example:
#. ftp://user[:password]@other.host[:port]/some_dir
-#: ../duplicity/commandline.py:856
+#: ../duplicity/commandline.py:862
msgid "port"
msgstr ""
#. Used in usage help. This represents a string to be used as a
#. prefix to names for backup files created by Duplicity. Example:
#. s3://other.host/bucket_name[/prefix]
-#: ../duplicity/commandline.py:861
+#: ../duplicity/commandline.py:867
msgid "prefix"
msgstr ""
#. Used in usage help to represent a Unix-style path name. Example:
#. rsync://user[:password]@other.host[:port]/relative_path
-#: ../duplicity/commandline.py:865
+#: ../duplicity/commandline.py:871
msgid "relative_path"
msgstr ""
#. Used in usage help to represent the name of a single file
#. directory or a Unix-style path to a directory. Example:
#. file:///some_dir
-#: ../duplicity/commandline.py:880
+#: ../duplicity/commandline.py:886
msgid "some_dir"
msgstr ""
@@ -739,14 +739,14 @@
#. directory or a Unix-style path to a directory where files will be
#. coming FROM. Example:
#. duplicity [full|incremental] [options] source_dir target_url
-#: ../duplicity/commandline.py:886
+#: ../duplicity/commandline.py:892
msgid "source_dir"
msgstr ""
#. Used in usage help to represent a URL files will be coming
#. FROM. Example:
#. duplicity [restore] [options] source_url target_dir
-#: ../duplicity/commandline.py:891
+#: ../duplicity/commandline.py:897
msgid "source_url"
msgstr ""
@@ -754,91 +754,91 @@
#. directory or a Unix-style path to a directory. where files will be
#. going TO. Example:
#. duplicity [restore] [options] source_url target_dir
-#: ../duplicity/commandline.py:897
+#: ../duplicity/commandline.py:903
msgid "target_dir"
msgstr ""
#. Used in usage help to represent a URL files will be going TO.
#. Example:
#. duplicity [full|incremental] [options] source_dir target_url
-#: ../duplicity/commandline.py:902
+#: ../duplicity/commandline.py:908
msgid "target_url"
msgstr ""
#. Used in usage help to represent a user name (i.e. login).
#. Example:
#. ftp://user[:password]@other.host[:port]/some_dir
-#: ../duplicity/commandline.py:912
+#: ../duplicity/commandline.py:918
msgid "user"
msgstr ""
#. account id for b2. Example: b2://account_id@bucket/
-#: ../duplicity/commandline.py:915
+#: ../duplicity/commandline.py:921
msgid "account_id"
msgstr ""
#. application_key for b2.
#. Example: b2://account_id:application_key@bucket/
-#: ../duplicity/commandline.py:919
+#: ../duplicity/commandline.py:925
msgid "application_key"
msgstr ""
#. Header in usage help
-#: ../duplicity/commandline.py:938
+#: ../duplicity/commandline.py:944
msgid "Backends and their URL formats:"
msgstr ""
#. Header in usage help
-#: ../duplicity/commandline.py:970
+#: ../duplicity/commandline.py:976
msgid "Commands:"
msgstr ""
-#: ../duplicity/commandline.py:995
+#: ../duplicity/commandline.py:1001
#, python-format
msgid "Specified archive directory '%s' does not exist, or is not a directory"
msgstr ""
-#: ../duplicity/commandline.py:1004
+#: ../duplicity/commandline.py:1010
#, python-format
msgid ""
"Sign key should be an 8, 16 alt. 40 character hex string, like 'AA0E73D2'.\n"
"Received '%s' instead."
msgstr ""
-#: ../duplicity/commandline.py:1064
+#: ../duplicity/commandline.py:1070
#, python-format
msgid ""
"Restore destination directory %s already exists.\n"
"Will not overwrite."
msgstr ""
-#: ../duplicity/commandline.py:1069
+#: ../duplicity/commandline.py:1075
#, python-format
msgid "Verify directory %s does not exist"
msgstr ""
-#: ../duplicity/commandline.py:1075
+#: ../duplicity/commandline.py:1081
#, python-format
msgid "Backup source directory %s does not exist."
msgstr ""
-#: ../duplicity/commandline.py:1106
+#: ../duplicity/commandline.py:1112
#, python-format
msgid "Command line warning: %s"
msgstr ""
-#: ../duplicity/commandline.py:1106
+#: ../duplicity/commandline.py:1112
msgid ""
"Selection options --exclude/--include\n"
"currently work only when backing up,not restoring."
msgstr ""
-#: ../duplicity/commandline.py:1142
+#: ../duplicity/commandline.py:1148
#, python-format
msgid "GPG binary is %s, version %s"
msgstr ""
-#: ../duplicity/commandline.py:1169
+#: ../duplicity/commandline.py:1175
#, python-format
msgid ""
"Bad URL '%s'.\n"
@@ -846,237 +846,237 @@
"\"file:///usr/local\". See the man page for more information."
msgstr ""
-#: ../duplicity/commandline.py:1199
+#: ../duplicity/commandline.py:1205
msgid "Main action: "
msgstr ""
-#: ../duplicity/backend.py:102
+#: ../duplicity/backend.py:107
#, python-format
msgid "Import of %s %s"
msgstr ""
-#: ../duplicity/backend.py:211
+#: ../duplicity/backend.py:216
#, python-format
msgid "Could not initialize backend: %s"
msgstr ""
-#: ../duplicity/backend.py:375
+#: ../duplicity/backend.py:380
#, python-format
msgid "Backtrace of previous error: %s"
msgstr ""
-#: ../duplicity/backend.py:390
+#: ../duplicity/backend.py:395
#, python-format
msgid "Giving up after %s attempts. %s: %s"
msgstr ""
-#: ../duplicity/backend.py:394
+#: ../duplicity/backend.py:399
#, python-format
msgid "Attempt %s failed. %s: %s"
msgstr ""
-#: ../duplicity/backend.py:488
+#: ../duplicity/backend.py:493
#, python-format
msgid "Reading results of '%s'"
msgstr ""
-#: ../duplicity/backend.py:514
+#: ../duplicity/backend.py:519
#, python-format
msgid "Writing %s"
msgstr ""
-#: ../duplicity/backend.py:555
+#: ../duplicity/backend.py:560
#, python-format
msgid "File %s not found locally after get from backend"
msgstr ""
-#: ../duplicity/asyncscheduler.py:67
+#: ../duplicity/asyncscheduler.py:71
#, python-format
msgid "instantiating at concurrency %d"
msgstr ""
-#: ../duplicity/asyncscheduler.py:94
+#: ../duplicity/asyncscheduler.py:98
msgid "inserting barrier"
msgstr ""
-#: ../duplicity/asyncscheduler.py:143
+#: ../duplicity/asyncscheduler.py:147
msgid "running task synchronously (asynchronicity disabled)"
msgstr ""
-#: ../duplicity/asyncscheduler.py:149
+#: ../duplicity/asyncscheduler.py:153
msgid "scheduling task for asynchronous execution"
msgstr ""
-#: ../duplicity/asyncscheduler.py:178
+#: ../duplicity/asyncscheduler.py:182
msgid "task completed successfully"
msgstr ""
-#: ../duplicity/asyncscheduler.py:189
+#: ../duplicity/asyncscheduler.py:193
msgid ""
"a previously scheduled task has failed; propagating the result immediately"
msgstr ""
-#: ../duplicity/asyncscheduler.py:212 ../duplicity/asyncscheduler.py:233
+#: ../duplicity/asyncscheduler.py:216 ../duplicity/asyncscheduler.py:237
#, python-format
msgid "active workers = %d"
msgstr ""
-#: ../duplicity/asyncscheduler.py:253
+#: ../duplicity/asyncscheduler.py:257
#, python-format
msgid "task execution done (success: %s)"
msgstr ""
-#: ../duplicity/patchdir.py:80 ../duplicity/patchdir.py:85
+#: ../duplicity/patchdir.py:85 ../duplicity/patchdir.py:90
#, python-format
msgid "Patching %s"
msgstr ""
-#: ../duplicity/patchdir.py:530
+#: ../duplicity/patchdir.py:542
#, python-format
msgid "Error '%s' patching %s"
msgstr ""
-#: ../duplicity/patchdir.py:605
+#: ../duplicity/patchdir.py:617
#, python-format
msgid "Writing %s of type %s"
msgstr ""
-#: ../duplicity/collections.py:171 ../duplicity/collections.py:185
+#: ../duplicity/collections.py:178 ../duplicity/collections.py:192
#, python-format
msgid "BackupSet.delete: missing %s"
msgstr ""
-#: ../duplicity/collections.py:210
+#: ../duplicity/collections.py:217
msgid "Fatal Error: No manifests found for most recent backup"
msgstr ""
-#: ../duplicity/collections.py:219
+#: ../duplicity/collections.py:226
msgid ""
"Fatal Error: Remote manifest does not match local one. Either the remote "
"backup set or the local archive directory has been corrupted."
msgstr ""
-#: ../duplicity/collections.py:227
+#: ../duplicity/collections.py:234
msgid "Fatal Error: Neither remote nor local manifest is readable."
msgstr ""
-#: ../duplicity/collections.py:238
+#: ../duplicity/collections.py:245
#, python-format
msgid "Processing local manifest %s (%s)"
msgstr ""
-#: ../duplicity/collections.py:250
+#: ../duplicity/collections.py:257
#, python-format
msgid "Error processing remote manifest (%s): %s"
msgstr ""
-#: ../duplicity/collections.py:253
+#: ../duplicity/collections.py:260
#, python-format
msgid "Processing remote manifest %s (%s)"
msgstr ""
-#: ../duplicity/collections.py:349
+#: ../duplicity/collections.py:356
msgid "Preferring Backupset over previous one!"
msgstr ""
-#: ../duplicity/collections.py:352
+#: ../duplicity/collections.py:359
#, python-format
msgid "Ignoring incremental Backupset (start_time: %s; needed: %s)"
msgstr ""
-#: ../duplicity/collections.py:357
+#: ../duplicity/collections.py:364
#, python-format
msgid "Added incremental Backupset (start_time: %s / end_time: %s)"
msgstr ""
-#: ../duplicity/collections.py:427
+#: ../duplicity/collections.py:434
msgid "Chain start time: "
msgstr ""
-#: ../duplicity/collections.py:428
+#: ../duplicity/collections.py:435
msgid "Chain end time: "
msgstr ""
-#: ../duplicity/collections.py:429
+#: ../duplicity/collections.py:436
#, python-format
msgid "Number of contained backup sets: %d"
msgstr ""
-#: ../duplicity/collections.py:431
+#: ../duplicity/collections.py:438
#, python-format
msgid "Total number of contained volumes: %d"
msgstr ""
-#: ../duplicity/collections.py:433 ../duplicity/collections.py:1237
+#: ../duplicity/collections.py:440 ../duplicity/collections.py:1236
msgid "Type of backup set:"
msgstr ""
-#: ../duplicity/collections.py:433 ../duplicity/collections.py:1237
+#: ../duplicity/collections.py:440 ../duplicity/collections.py:1236
msgid "Time:"
msgstr ""
-#: ../duplicity/collections.py:433
+#: ../duplicity/collections.py:440
msgid "Num volumes:"
msgstr ""
-#: ../duplicity/collections.py:437 ../duplicity/collections.py:1243
+#: ../duplicity/collections.py:444 ../duplicity/collections.py:1242
msgid "Full"
msgstr ""
-#: ../duplicity/collections.py:440 ../duplicity/collections.py:1245
+#: ../duplicity/collections.py:447 ../duplicity/collections.py:1244
msgid "Incremental"
msgstr ""
-#: ../duplicity/collections.py:500
+#: ../duplicity/collections.py:507
msgid "local"
msgstr ""
-#: ../duplicity/collections.py:502
+#: ../duplicity/collections.py:509
msgid "remote"
msgstr ""
-#: ../duplicity/collections.py:658
+#: ../duplicity/collections.py:664
msgid "Collection Status"
msgstr ""
-#: ../duplicity/collections.py:660
+#: ../duplicity/collections.py:666
#, python-format
msgid "Connecting with backend: %s"
msgstr ""
-#: ../duplicity/collections.py:662
+#: ../duplicity/collections.py:668
#, python-format
msgid "Archive dir: %s"
msgstr ""
-#: ../duplicity/collections.py:665
+#: ../duplicity/collections.py:671
#, python-format
msgid "Found %d secondary backup chain."
msgid_plural "Found %d secondary backup chains."
msgstr[0] ""
msgstr[1] ""
-#: ../duplicity/collections.py:670
+#: ../duplicity/collections.py:676
#, python-format
msgid "Secondary chain %d of %d:"
msgstr ""
-#: ../duplicity/collections.py:676
+#: ../duplicity/collections.py:682
msgid "Found primary backup chain with matching signature chain:"
msgstr ""
-#: ../duplicity/collections.py:680
+#: ../duplicity/collections.py:686
msgid "No backup chains with active signatures found"
msgstr ""
-#: ../duplicity/collections.py:683
+#: ../duplicity/collections.py:689
#, python-format
msgid "Also found %d backup set not part of any chain,"
msgid_plural "Also found %d backup sets not part of any chain,"
msgstr[0] ""
msgstr[1] ""
-#: ../duplicity/collections.py:687
+#: ../duplicity/collections.py:693
#, python-format
msgid "and %d incomplete backup set."
msgid_plural "and %d incomplete backup sets."
@@ -1084,112 +1084,112 @@
msgstr[1] ""
#. "cleanup" is a hard-coded command, so do not translate it
-#: ../duplicity/collections.py:692
+#: ../duplicity/collections.py:698
msgid "These may be deleted by running duplicity with the \"cleanup\" command."
msgstr ""
-#: ../duplicity/collections.py:695
+#: ../duplicity/collections.py:701
msgid "No orphaned or incomplete backup sets found."
msgstr ""
-#: ../duplicity/collections.py:711
+#: ../duplicity/collections.py:717
#, python-format
msgid "%d file exists on backend"
msgid_plural "%d files exist on backend"
msgstr[0] ""
msgstr[1] ""
-#: ../duplicity/collections.py:721
+#: ../duplicity/collections.py:727
#, python-format
msgid "%d file exists in cache"
msgid_plural "%d files exist in cache"
msgstr[0] ""
msgstr[1] ""
-#: ../duplicity/collections.py:774
+#: ../duplicity/collections.py:780
msgid "Warning, discarding last backup set, because of missing signature file."
msgstr ""
-#: ../duplicity/collections.py:797
+#: ../duplicity/collections.py:803
msgid "Warning, found the following local orphaned signature file:"
msgid_plural "Warning, found the following local orphaned signature files:"
msgstr[0] ""
msgstr[1] ""
-#: ../duplicity/collections.py:806
+#: ../duplicity/collections.py:812
msgid "Warning, found the following remote orphaned signature file:"
msgid_plural "Warning, found the following remote orphaned signature files:"
msgstr[0] ""
msgstr[1] ""
-#: ../duplicity/collections.py:815
+#: ../duplicity/collections.py:821
msgid "Warning, found signatures but no corresponding backup files"
msgstr ""
-#: ../duplicity/collections.py:819
+#: ../duplicity/collections.py:825
msgid ""
"Warning, found incomplete backup sets, probably left from aborted session"
msgstr ""
-#: ../duplicity/collections.py:823
+#: ../duplicity/collections.py:829
msgid "Warning, found the following orphaned backup file:"
msgid_plural "Warning, found the following orphaned backup files:"
msgstr[0] ""
msgstr[1] ""
-#: ../duplicity/collections.py:840
+#: ../duplicity/collections.py:846
#, python-format
msgid "Extracting backup chains from list of files: %s"
msgstr ""
-#: ../duplicity/collections.py:851
+#: ../duplicity/collections.py:857
#, python-format
msgid "File %s is part of known set"
msgstr ""
-#: ../duplicity/collections.py:854
+#: ../duplicity/collections.py:860
#, python-format
msgid "File %s is not part of a known set; creating new set"
msgstr ""
-#: ../duplicity/collections.py:859
+#: ../duplicity/collections.py:865
#, python-format
msgid "Ignoring file (rejected by backup set) '%s'"
msgstr ""
-#: ../duplicity/collections.py:875
+#: ../duplicity/collections.py:881
#, python-format
msgid "Found backup chain %s"
msgstr ""
-#: ../duplicity/collections.py:880
+#: ../duplicity/collections.py:886
#, python-format
msgid "Added set %s to pre-existing chain %s"
msgstr ""
-#: ../duplicity/collections.py:884
+#: ../duplicity/collections.py:890
#, python-format
msgid "Found orphaned set %s"
msgstr ""
-#: ../duplicity/collections.py:1038
+#: ../duplicity/collections.py:1040
#, python-format
msgid ""
"No signature chain for the requested time. Using oldest available chain, "
"starting at time %s."
msgstr ""
+#: ../duplicity/collections.py:1234
+#, python-format
+msgid "File: %s"
+msgstr ""
+
#: ../duplicity/collections.py:1235
#, python-format
-msgid "File: %s"
+msgid "Total number of backup: %d"
msgstr ""
#: ../duplicity/collections.py:1236
-#, python-format
-msgid "Total number of backup: %d"
-msgstr ""
-
-#: ../duplicity/collections.py:1237
msgid "Type of file change:"
msgstr ""
@@ -1198,47 +1198,47 @@
msgid "Error listing directory %s"
msgstr ""
-#: ../duplicity/diffdir.py:108 ../duplicity/diffdir.py:398
+#: ../duplicity/diffdir.py:117 ../duplicity/diffdir.py:419
#, python-format
msgid "Error %s getting delta for %s"
msgstr ""
-#: ../duplicity/diffdir.py:122
+#: ../duplicity/diffdir.py:131
#, python-format
msgid "Getting delta of %s and %s"
msgstr ""
-#: ../duplicity/diffdir.py:167
+#: ../duplicity/diffdir.py:182
#, python-format
msgid "A %s"
msgstr ""
-#: ../duplicity/diffdir.py:174
+#: ../duplicity/diffdir.py:189
#, python-format
msgid "M %s"
msgstr ""
-#: ../duplicity/diffdir.py:196
+#: ../duplicity/diffdir.py:211
#, python-format
msgid "Comparing %s and %s"
msgstr ""
-#: ../duplicity/diffdir.py:204
+#: ../duplicity/diffdir.py:219
#, python-format
msgid "D %s"
msgstr ""
-#: ../duplicity/lazy.py:338
+#: ../duplicity/lazy.py:342
#, python-format
msgid "Warning: oldindex %s >= newindex %s"
msgstr ""
-#: ../duplicity/lazy.py:413
+#: ../duplicity/lazy.py:417
#, python-format
msgid "Error '%s' processing %s"
msgstr ""
-#: ../duplicity/lazy.py:423
+#: ../duplicity/lazy.py:427
#, python-format
msgid "Skipping %s because of previous error"
msgstr ""
@@ -1248,126 +1248,126 @@
msgid "Connection failed, please check your password: %s"
msgstr ""
-#: ../duplicity/backends/multibackend.py:86
+#: ../duplicity/backends/multibackend.py:89
#, python-format
msgid "MultiBackend: Could not parse query string %s: %s "
msgstr ""
-#: ../duplicity/backends/multibackend.py:95
+#: ../duplicity/backends/multibackend.py:98
#, python-format
msgid "MultiBackend: Invalid query string %s: more than one value for %s"
msgstr ""
-#: ../duplicity/backends/multibackend.py:100
+#: ../duplicity/backends/multibackend.py:103
#, python-format
msgid "MultiBackend: Invalid query string %s: unknown parameter %s"
msgstr ""
-#: ../duplicity/backends/multibackend.py:150
-#: ../duplicity/backends/multibackend.py:155
+#: ../duplicity/backends/multibackend.py:153
+#: ../duplicity/backends/multibackend.py:158
#, python-format
msgid "MultiBackend: illegal value for %s: %s"
msgstr ""
-#: ../duplicity/backends/multibackend.py:163
+#: ../duplicity/backends/multibackend.py:166
#, python-format
msgid "MultiBackend: Url %s"
msgstr ""
-#: ../duplicity/backends/multibackend.py:167
+#: ../duplicity/backends/multibackend.py:170
#, python-format
msgid "MultiBackend: Could not load config file %s: %s "
msgstr ""
-#: ../duplicity/backends/multibackend.py:176
+#: ../duplicity/backends/multibackend.py:179
#, python-format
msgid "MultiBackend: use store %s"
msgstr ""
-#: ../duplicity/backends/multibackend.py:181
+#: ../duplicity/backends/multibackend.py:184
#, python-format
msgid "MultiBackend: set env %s = %s"
msgstr ""
-#: ../duplicity/backends/multibackend.py:194
+#: ../duplicity/backends/multibackend.py:197
#, python-format
msgid "Multibackend: register affinity for prefix %s"
msgstr ""
-#: ../duplicity/backends/multibackend.py:235
+#: ../duplicity/backends/multibackend.py:238
#, python-format
msgid "MultiBackend: _put: write to store #%s (%s)"
msgstr ""
-#: ../duplicity/backends/multibackend.py:248
+#: ../duplicity/backends/multibackend.py:251
#, python-format
msgid "MultiBackend: failed to write to store #%s (%s), try #%s, Exception: %s"
msgstr ""
-#: ../duplicity/backends/multibackend.py:255
+#: ../duplicity/backends/multibackend.py:258
#, python-format
msgid "MultiBackend: failed to write %s. Aborting process."
msgstr ""
-#: ../duplicity/backends/multibackend.py:262
+#: ../duplicity/backends/multibackend.py:265
#, python-format
msgid ""
"MultiBackend: failed to write %s. Tried all backing stores and none succeeded"
msgstr ""
-#: ../duplicity/backends/multibackend.py:281
+#: ../duplicity/backends/multibackend.py:284
#, python-format
msgid "MultiBackend: failed to get %s to %s from %s"
msgstr ""
-#: ../duplicity/backends/multibackend.py:284
+#: ../duplicity/backends/multibackend.py:287
#, python-format
msgid ""
"MultiBackend: failed to get %s. Tried all backing stores and none succeeded"
msgstr ""
-#: ../duplicity/backends/multibackend.py:293
+#: ../duplicity/backends/multibackend.py:296
#, python-format
msgid "MultiBackend: list from %s: %s"
msgstr ""
-#: ../duplicity/backends/multibackend.py:299
+#: ../duplicity/backends/multibackend.py:302
#, python-format
msgid "MultiBackend: combined list: %s"
msgstr ""
-#: ../duplicity/backends/multibackend.py:324
+#: ../duplicity/backends/multibackend.py:327
#, python-format
msgid "MultiBackend: failed to delete %s from %s"
msgstr ""
-#: ../duplicity/backends/multibackend.py:328
+#: ../duplicity/backends/multibackend.py:331
#, python-format
msgid ""
"MultiBackend: failed to delete %s. Tried all backing stores and none "
"succeeded"
msgstr ""
-#: ../duplicity/backends/pydrivebackend.py:142
+#: ../duplicity/backends/pydrivebackend.py:144
#, python-format
msgid "PyDrive backend: multiple files called '%s'."
msgstr ""
-#: ../duplicity/backends/webdavbackend.py:61
+#: ../duplicity/backends/webdavbackend.py:65
msgid "Missing socket or ssl python modules."
msgstr ""
-#: ../duplicity/backends/webdavbackend.py:79
+#: ../duplicity/backends/webdavbackend.py:83
#, python-format
msgid "Cacert database file '%s' is not readable."
msgstr ""
-#: ../duplicity/backends/webdavbackend.py:100
+#: ../duplicity/backends/webdavbackend.py:104
msgid ""
"Option '--ssl-cacert-path' is not supported with python 2.7.8 and below."
msgstr ""
-#: ../duplicity/backends/webdavbackend.py:104
+#: ../duplicity/backends/webdavbackend.py:108
#, python-format
msgid ""
"For certificate verification with python 2.7.8 or earlier a cacert database\n"
@@ -1377,124 +1377,124 @@
" Consider using the options --ssl-cacert-file, --ssl-no-check-certificate ."
msgstr ""
-#: ../duplicity/backends/webdavbackend.py:150
+#: ../duplicity/backends/webdavbackend.py:154
#, python-format
msgid "Using WebDAV protocol %s"
msgstr ""
-#: ../duplicity/backends/webdavbackend.py:151
+#: ../duplicity/backends/webdavbackend.py:155
#, python-format
msgid "Using WebDAV host %s port %s"
msgstr ""
-#: ../duplicity/backends/webdavbackend.py:153
+#: ../duplicity/backends/webdavbackend.py:157
#, python-format
msgid "Using WebDAV directory %s"
msgstr ""
-#: ../duplicity/backends/webdavbackend.py:184
+#: ../duplicity/backends/webdavbackend.py:188
#, python-format
msgid "WebDAV create connection on '%s'"
msgstr ""
-#: ../duplicity/backends/webdavbackend.py:195
+#: ../duplicity/backends/webdavbackend.py:199
#, python-format
msgid "WebDAV Unknown URI scheme: %s"
msgstr ""
-#: ../duplicity/backends/webdavbackend.py:214
+#: ../duplicity/backends/webdavbackend.py:218
#, python-format
msgid "WebDAV %s %s request with headers: %s "
msgstr ""
-#: ../duplicity/backends/webdavbackend.py:215
-#: ../duplicity/backends/webdavbackend.py:238
+#: ../duplicity/backends/webdavbackend.py:219
+#: ../duplicity/backends/webdavbackend.py:242
#, python-format
msgid "WebDAV data length: %s "
msgstr ""
-#: ../duplicity/backends/webdavbackend.py:218
+#: ../duplicity/backends/webdavbackend.py:222
#, python-format
msgid "WebDAV response status %s with reason '%s'."
msgstr ""
-#: ../duplicity/backends/webdavbackend.py:224
+#: ../duplicity/backends/webdavbackend.py:228
#, python-format
msgid "WebDAV redirect to: %s "
msgstr ""
-#: ../duplicity/backends/webdavbackend.py:226
+#: ../duplicity/backends/webdavbackend.py:230
msgid "WebDAV redirected 10 times. Giving up."
msgstr ""
-#: ../duplicity/backends/webdavbackend.py:231
+#: ../duplicity/backends/webdavbackend.py:235
msgid "WebDAV missing location header in redirect response."
msgstr ""
-#: ../duplicity/backends/webdavbackend.py:236
+#: ../duplicity/backends/webdavbackend.py:240
msgid "WebDAV retry request with authentification headers."
msgstr ""
-#: ../duplicity/backends/webdavbackend.py:237
+#: ../duplicity/backends/webdavbackend.py:241
#, python-format
msgid "WebDAV %s %s request2 with headers: %s "
msgstr ""
-#: ../duplicity/backends/webdavbackend.py:241
+#: ../duplicity/backends/webdavbackend.py:245
#, python-format
msgid "WebDAV response2 status %s with reason '%s'."
msgstr ""
-#: ../duplicity/backends/webdavbackend.py:258
+#: ../duplicity/backends/webdavbackend.py:262
msgid ""
"python-kerberos needed to use kerberos "
"authorization, falling back to basic auth."
msgstr ""
-#: ../duplicity/backends/webdavbackend.py:262
+#: ../duplicity/backends/webdavbackend.py:266
#, python-format
msgid ""
"Kerberos authorization failed: %s. Falling back to "
"basic auth."
msgstr ""
-#: ../duplicity/backends/webdavbackend.py:360
+#: ../duplicity/backends/webdavbackend.py:364
#, python-format
msgid "Creating missing directory %s"
msgstr ""
-#: ../duplicity/backends/webdavbackend.py:364
+#: ../duplicity/backends/webdavbackend.py:368
#, python-format
msgid "WebDAV MKCOL %s failed: %s %s"
msgstr ""
-#: ../duplicity/backends/webdavbackend.py:377
+#: ../duplicity/backends/webdavbackend.py:381
#, python-format
msgid "WebDAV path decoding and translation: %s -> %s"
msgstr ""
-#: ../duplicity/backends/webdavbackend.py:422
+#: ../duplicity/backends/webdavbackend.py:426
#, python-format
msgid "WebDAV GET Bad status code %s reason %s."
msgstr ""
-#: ../duplicity/backends/webdavbackend.py:444
+#: ../duplicity/backends/webdavbackend.py:448
#, python-format
msgid "WebDAV PUT Bad status code %s reason %s."
msgstr ""
-#: ../duplicity/backends/webdavbackend.py:464
+#: ../duplicity/backends/webdavbackend.py:468
#, python-format
msgid "WebDAV DEL Bad status code %s reason %s."
msgstr ""
-#: ../duplicity/librsync.py:184
+#: ../duplicity/librsync.py:195
msgid ""
"basis_file must be a (true) file or an object whose file attribute is the "
"underlying true file object"
msgstr ""
-#: ../duplicity/manifest.py:92
+#: ../duplicity/manifest.py:97
#, python-format
msgid ""
"Fatal Error: Backup source host has changed.\n"
@@ -1502,7 +1502,7 @@
"Previous hostname: %s"
msgstr ""
-#: ../duplicity/manifest.py:99
+#: ../duplicity/manifest.py:104
#, python-format
msgid ""
"Fatal Error: Backup source directory has changed.\n"
@@ -1510,7 +1510,7 @@
"Previous directory: %s"
msgstr ""
-#: ../duplicity/manifest.py:109
+#: ../duplicity/manifest.py:114
msgid ""
"Aborting because you may have accidentally tried to backup two different "
"data sets to the same remote location, or using the same archive directory. "
@@ -1518,142 +1518,142 @@
"seeing this message"
msgstr ""
-#: ../duplicity/manifest.py:206
+#: ../duplicity/manifest.py:216
#, python-format
msgid "Found manifest volume %s"
msgstr ""
-#: ../duplicity/manifest.py:213
+#: ../duplicity/manifest.py:223
#, python-format
msgid "Found %s volumes in manifest"
msgstr ""
-#: ../duplicity/manifest.py:230
+#: ../duplicity/manifest.py:240
#, python-format
msgid ""
"Manifest file '%s' is corrupt: File count says %d, File list contains %d"
msgstr ""
-#: ../duplicity/manifest.py:249
+#: ../duplicity/manifest.py:259
msgid "Manifests not equal because different volume numbers"
msgstr ""
-#: ../duplicity/manifest.py:254
+#: ../duplicity/manifest.py:264
msgid "Manifests not equal because volume lists differ"
msgstr ""
-#: ../duplicity/manifest.py:259
+#: ../duplicity/manifest.py:269
msgid "Manifests not equal because hosts or directories differ"
msgstr ""
-#: ../duplicity/manifest.py:406
+#: ../duplicity/manifest.py:426
msgid "Warning, found extra Volume identifier"
msgstr ""
-#: ../duplicity/manifest.py:432
+#: ../duplicity/manifest.py:452
msgid "Other is not VolumeInfo"
msgstr ""
-#: ../duplicity/manifest.py:435
+#: ../duplicity/manifest.py:455
msgid "Volume numbers don't match"
msgstr ""
-#: ../duplicity/manifest.py:438
+#: ../duplicity/manifest.py:458
msgid "start_indicies don't match"
msgstr ""
-#: ../duplicity/manifest.py:441
+#: ../duplicity/manifest.py:461
msgid "end_index don't match"
msgstr ""
-#: ../duplicity/manifest.py:448
+#: ../duplicity/manifest.py:468
msgid "Hashes don't match"
msgstr ""
-#: ../duplicity/path.py:111
+#: ../duplicity/path.py:114
#, python-format
msgid "Warning: %s invalid devnums (0x%X), treating as (0, 0)."
msgstr ""
-#: ../duplicity/path.py:238 ../duplicity/path.py:297
+#: ../duplicity/path.py:243 ../duplicity/path.py:304
#, python-format
msgid "Warning: %s has negative mtime, treating as 0."
msgstr ""
-#: ../duplicity/path.py:361
+#: ../duplicity/path.py:368
msgid "Difference found:"
msgstr ""
-#: ../duplicity/path.py:370
+#: ../duplicity/path.py:377
#, python-format
msgid "New file %s"
msgstr ""
-#: ../duplicity/path.py:373
+#: ../duplicity/path.py:380
#, python-format
msgid "File %s is missing"
msgstr ""
-#: ../duplicity/path.py:376
+#: ../duplicity/path.py:383
#, python-format
msgid "File %%s has type %s, expected %s"
msgstr ""
-#: ../duplicity/path.py:382 ../duplicity/path.py:408
+#: ../duplicity/path.py:389 ../duplicity/path.py:415
#, python-format
msgid "File %%s has permissions %s, expected %s"
msgstr ""
-#: ../duplicity/path.py:387
+#: ../duplicity/path.py:394
#, python-format
msgid "File %%s has mtime %s, expected %s"
msgstr ""
-#: ../duplicity/path.py:395
+#: ../duplicity/path.py:402
#, python-format
msgid "Data for file %s is different"
msgstr ""
-#: ../duplicity/path.py:403
+#: ../duplicity/path.py:410
#, python-format
msgid "Symlink %%s points to %s, expected %s"
msgstr ""
-#: ../duplicity/path.py:412
+#: ../duplicity/path.py:419
#, python-format
msgid "Device file %%s has numbers %s, expected %s"
msgstr ""
-#: ../duplicity/path.py:592
+#: ../duplicity/path.py:601
#, python-format
msgid "Making directory %s"
msgstr ""
-#: ../duplicity/path.py:602
+#: ../duplicity/path.py:611
#, python-format
msgid "Deleting %s"
msgstr ""
-#: ../duplicity/path.py:611
+#: ../duplicity/path.py:620
#, python-format
msgid "Touching %s"
msgstr ""
-#: ../duplicity/path.py:618
+#: ../duplicity/path.py:627
#, python-format
msgid "Deleting tree %s"
msgstr ""
-#: ../duplicity/gpginterface.py:237
+#: ../duplicity/gpginterface.py:238
msgid "Threading not available -- zombie processes may appear"
msgstr ""
-#: ../duplicity/gpginterface.py:701
+#: ../duplicity/gpginterface.py:705
#, python-format
msgid "GPG process %d terminated before wait()"
msgstr ""
-#: ../duplicity/dup_time.py:61
+#: ../duplicity/dup_time.py:62
#, python-format
msgid ""
"Bad interval string \"%s\"\n"
@@ -1663,7 +1663,7 @@
"page for more information."
msgstr ""
-#: ../duplicity/dup_time.py:67
+#: ../duplicity/dup_time.py:68
#, python-format
msgid ""
"Bad time string \"%s\"\n"
@@ -1676,52 +1676,52 @@
"the day)."
msgstr ""
-#: ../duplicity/tempdir.py:132
+#: ../duplicity/tempdir.py:135
#, python-format
msgid "Using temporary directory %s"
msgstr ""
-#: ../duplicity/tempdir.py:176
+#: ../duplicity/tempdir.py:179
#, python-format
msgid "Registering (mktemp) temporary file %s"
msgstr ""
-#: ../duplicity/tempdir.py:198
+#: ../duplicity/tempdir.py:201
#, python-format
msgid "Registering (mkstemp) temporary file %s"
msgstr ""
-#: ../duplicity/tempdir.py:230
+#: ../duplicity/tempdir.py:233
#, python-format
msgid "Forgetting temporary file %s"
msgstr ""
-#: ../duplicity/tempdir.py:233
+#: ../duplicity/tempdir.py:236
#, python-format
msgid "Attempt to forget unknown tempfile %s - this is probably a bug."
msgstr ""
-#: ../duplicity/tempdir.py:252
+#: ../duplicity/tempdir.py:255
#, python-format
msgid "Removing still remembered temporary file %s"
msgstr ""
-#: ../duplicity/tempdir.py:255
+#: ../duplicity/tempdir.py:258
#, python-format
msgid "Cleanup of temporary file %s failed"
msgstr ""
-#: ../duplicity/tempdir.py:260
+#: ../duplicity/tempdir.py:263
#, python-format
msgid "Cleanup of temporary directory %s failed - this is probably a bug."
msgstr ""
-#: ../duplicity/util.py:120
+#: ../duplicity/util.py:128
#, python-format
msgid "IGNORED_ERROR: Warning: ignoring error as requested: %s: %s"
msgstr ""
-#: ../duplicity/util.py:187
+#: ../duplicity/util.py:195
#, python-format
msgid "Releasing lockfile %s"
msgstr ""
=== modified file 'setup.py'
--- setup.py 2017-12-09 21:38:43 +0000
+++ setup.py 2018-11-29 19:03:43 +0000
@@ -30,8 +30,8 @@
version_string = "$version"
-if sys.version_info[:2] < (2, 7) or sys.version_info[:2] > (2, 7):
- print("Sorry, duplicity requires version 2.7 of python.")
+if sys.version_info[:2] < (2, 7):
+ print("Sorry, duplicity requires version 2.7 or later of python.")
sys.exit(1)
incdir_list = libdir_list = None
=== modified file 'testing/__init__.py'
--- testing/__init__.py 2018-07-24 20:28:53 +0000
+++ testing/__init__.py 2018-11-29 19:03:43 +0000
@@ -57,8 +57,6 @@
sign_key = u'839E6A2856538CCF'
sign_passphrase = u'test'
- # ToDo: remove the below when gpg code (and test_gpg) is converted to unicode
- sign_passphrase_bytes = b'test'
encrypt_key1 = u'839E6A2856538CCF'
encrypt_key2 = u'453005CE9B736B2A'
=== modified file 'testing/functional/__init__.py'
--- testing/functional/__init__.py 2018-10-11 21:54:47 +0000
+++ testing/functional/__init__.py 2018-11-29 19:03:43 +0000
@@ -19,7 +19,7 @@
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-from future_builtins import map
+from future.builtins import map
import os
import pexpect
@@ -89,10 +89,11 @@
# Check all string inputs are unicode -- we will convert to system encoding before running the command
for item in options:
- assert not isinstance(item, str), u"item " + unicode(item) + u" in options is not unicode"
+ if sys.version_info.major == 2:
+ assert not isinstance(item, str), u"item " + unicode(item) + u" in options is not unicode"
for item in passphrase_input:
- assert isinstance(item, unicode), u"item " + unicode(item) + u" in passphrase_input is not unicode"
+ assert isinstance(item, u"".__class__), u"item " + unicode(item) + u" in passphrase_input is not unicode"
if platform.platform().startswith(u'Linux'):
cmd_list = [u'setsid']
@@ -110,8 +111,8 @@
cmd_list.extend([u"--current-time", current_time])
cmd_list.extend(self.class_args)
if fail:
- cmd_list.extend([u"--fail", unicode(fail)])
- cmdline = u" ".join(map(lambda x: u'"%s"' % x, cmd_list))
+ cmd_list.extend([u"--fail", u"".__class__(fail)])
+ cmdline = u" ".join([u'"%s"' % x for x in cmd_list])
if not passphrase_input:
cmdline += u" < /dev/null"
@@ -132,7 +133,10 @@
# Manually encode to filesystem encoding and send to spawn as bytes
# ToDo: Remove this once we no longer have to support systems with pexpect < 4.0
- child = pexpect.spawn(b'/bin/sh', [b'-c', cmdline.encode(sys.getfilesystemencoding(),
+ if sys.version_info.major > 2:
+ child = pexpect.spawn(u'/bin/sh', [u'-c', cmdline], timeout=None)
+ else:
+ child = pexpect.spawn(b'/bin/sh', [b'-c', cmdline.encode(sys.getfilesystemencoding(),
u'replace')], timeout=None)
for passphrase in passphrase_input:
@@ -182,7 +186,7 @@
if file_to_restore:
options.extend([u'--file-to-restore', file_to_restore])
if time:
- options.extend([u'--restore-time', unicode(time)])
+ options.extend([u'--restore-time', u"".__class__(time)])
self.run_duplicity(options=options, **kwargs)
def verify(self, dirname, file_to_verify=None, time=None, options=[],
@@ -191,7 +195,7 @@
if file_to_verify:
options.extend([u'--file-to-restore', file_to_verify])
if time:
- options.extend([u'--restore-time', unicode(time)])
+ options.extend([u'--restore-time', u"".__class__(time)])
self.run_duplicity(options=options, **kwargs)
def cleanup(self, options=[]):
=== modified file 'testing/functional/test_final.py'
--- testing/functional/test_final.py 2018-07-24 20:57:03 +0000
+++ testing/functional/test_final.py 2018-11-29 19:03:43 +0000
@@ -127,7 +127,7 @@
assert lf4_1.issym()
lf4_2 = lf3.append(u"DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD")
fp = lf4_2.open(u"wb")
- fp.write(u"hello" * 1000)
+ fp.write(b"hello" * 1000)
assert not fp.close()
self.runtest([u"testfiles/empty_dir", lf_dir.uc_name,
=== modified file 'testing/functional/test_rdiffdir.py'
--- testing/functional/test_rdiffdir.py 2018-07-24 20:57:03 +0000
+++ testing/functional/test_rdiffdir.py 2018-11-29 19:03:43 +0000
@@ -45,9 +45,9 @@
delta_path = path.Path(u"testfiles/output/delta.tar")
sig_path = path.Path(u"testfiles/output/sig.tar")
- self.run_cmd(u"cp -pR %s %s" % (new_path.name, seq_path.name))
+ self.run_cmd(u"cp -pR %s %s" % (new_path.uc_name, seq_path.uc_name))
seq_path.setdata()
- self.run_rdiffdir(u"sig %s %s" % (seq_path.name, sig_path.name))
+ self.run_rdiffdir(u"sig %s %s" % (seq_path.uc_name, sig_path.uc_name))
sig_path.setdata()
assert sig_path.exists()
@@ -62,12 +62,12 @@
delta_path.delete()
assert not delta_path.exists()
self.run_rdiffdir(u"delta %s %s %s" %
- (sig_path.name, new_path.name, delta_path.name))
+ (sig_path.uc_name, new_path.uc_name, delta_path.uc_name))
delta_path.setdata()
assert delta_path.exists()
# patch and compare
- self.run_rdiffdir(u"patch %s %s" % (seq_path.name, delta_path.name))
+ self.run_rdiffdir(u"patch %s %s" % (seq_path.uc_name, delta_path.uc_name))
seq_path.setdata()
new_path.setdata()
assert new_path.compare_recursive(seq_path, verbose=1)
@@ -75,7 +75,7 @@
# Make new signature
sig_path.delete()
assert not sig_path.exists()
- self.run_rdiffdir(u"sig %s %s" % (seq_path.name, sig_path.name))
+ self.run_rdiffdir(u"sig %s %s" % (seq_path.uc_name, sig_path.uc_name))
sig_path.setdata()
assert sig_path.isreg()
=== modified file 'testing/functional/test_restart.py'
--- testing/functional/test_restart.py 2018-07-24 20:57:03 +0000
+++ testing/functional/test_restart.py 2018-11-29 19:03:43 +0000
@@ -312,7 +312,7 @@
self.assertEqual(1, len(sigtars))
sigtar = sigtars[0]
output = subprocess.Popen([u"tar", u"t", u"--file=%s" % sigtar], stdout=subprocess.PIPE).communicate()[0]
- self.assertEqual(1, output.split(u"\n").count(u"snapshot/"))
+ self.assertEqual(1, output.split(b"\n").count(b"snapshot/"))
def test_ignore_double_snapshot(self):
u"""
=== modified file 'testing/functional/test_selection.py'
--- testing/functional/test_selection.py 2018-07-19 21:06:44 +0000
+++ testing/functional/test_selection.py 2018-11-29 19:03:43 +0000
@@ -1122,7 +1122,7 @@
self.assertEqual(restored, self.expected_restored_tree)
-@unittest.skipUnless(sys.getfilesystemencoding() == u"UTF-8",
+@unittest.skipUnless(sys.getfilesystemencoding().upper() == u"UTF-8",
u"Skipping TestUnicode -- Only tested to work on UTF-8 systems")
class TestUnicode(IncludeExcludeFunctionalTest):
u""" Tests include/exclude options with unicode paths"""
=== modified file 'testing/overrides/gettext.py'
--- testing/overrides/gettext.py 2018-10-10 20:25:04 +0000
+++ testing/overrides/gettext.py 2018-11-29 19:03:43 +0000
@@ -25,6 +25,12 @@
def install(*args, **kwargs):
ZWSP = u"" # ZERO WIDTH SPACE, basically an invisible space separator
- import __builtin__
- __builtin__.__dict__[u'_'] = lambda x: x + ZWSP
- __builtin__.__dict__[u'ngettext'] = lambda one, more, n: one + ZWSP if n == 1 else more + ZWSP
+ import sys
+ if sys.version_info.major >= 3:
+ import builtins
+ b = builtins
+ else:
+ import __builtin__
+ b = __builtin__
+ b.__dict__[u'_'] = lambda x: x + ZWSP
+ b.__dict__[u'ngettext'] = lambda one, more, n: one + ZWSP if n == 1 else more + ZWSP
=== modified file 'testing/test_code.py'
--- testing/test_code.py 2018-10-10 20:25:04 +0000
+++ testing/test_code.py 2018-11-29 19:03:43 +0000
@@ -44,7 +44,7 @@
stderr=subprocess.PIPE)
output = process.communicate()[0]
self.assertTrue(process.returncode in returncodes, output)
- self.assertEqual(u"", output, output)
+ self.assertEqual(b"", output, output)
@skipCodeTest
def test_2to3(self):
@@ -65,6 +65,7 @@
u"--nofix=raw_input",
u"--nofix=urllib",
u"--nofix=xrange",
+ u"--nofix=map",
_top_dir])
@skipCodeTest
@@ -78,6 +79,7 @@
u"--disable=E0602", # Undefined variable
u"--disable=E0611", # No name in module
u"--disable=E1101", # Has no member
+ u"--disable=E1102", # is not callable (_)
u"--disable=E1103", # Maybe has no member
u"--disable=E0712", # Catching an exception which doesn't inherit from BaseException
u"--ignore=_librsync.so",
=== modified file 'testing/unit/test_backend_instance.py'
--- testing/unit/test_backend_instance.py 2018-07-27 02:18:12 +0000
+++ testing/unit/test_backend_instance.py 2018-11-29 19:03:43 +0000
@@ -19,7 +19,7 @@
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import os
-import StringIO
+import io
import unittest
import duplicity.backend
@@ -37,7 +37,7 @@
os.makedirs(u'testfiles')
self.backend = None
self.local = path.Path(u'testfiles/local')
- self.local.writefileobj(StringIO.StringIO(u"hello"))
+ self.local.writefileobj(io.BytesIO(b"hello"))
def tearDown(self):
if self.backend is None:
@@ -50,18 +50,18 @@
return
self.backend._put(self.local, u'a')
getfile = path.Path(u'testfiles/getfile')
- self.backend._get(u'a', getfile)
+ self.backend._get(b'a', getfile)
self.assertTrue(self.local.compare_data(getfile))
def test_list(self):
if self.backend is None:
return
- self.backend._put(self.local, u'a')
- self.backend._put(self.local, u'b')
+ self.backend._put(self.local, b'a')
+ self.backend._put(self.local, b'b')
# It's OK for backends to create files as a side effect of put (e.g.
# the par2 backend does), so only check that at least a and b exist.
- self.assertTrue(u'a' in self.backend._list())
- self.assertTrue(u'b' in self.backend._list())
+ self.assertTrue(b'a' in self.backend._list())
+ self.assertTrue(b'b' in self.backend._list())
def test_delete(self):
if self.backend is None:
@@ -69,11 +69,11 @@
if not hasattr(self.backend, u'_delete'):
self.assertTrue(hasattr(self.backend, u'_delete_list'))
return
- self.backend._put(self.local, u'a')
- self.backend._put(self.local, u'b')
- self.backend._delete(u'a')
- self.assertFalse(u'a' in self.backend._list())
- self.assertTrue(u'b' in self.backend._list())
+ self.backend._put(self.local, b'a')
+ self.backend._put(self.local, b'b')
+ self.backend._delete(b'a')
+ self.assertFalse(b'a' in self.backend._list())
+ self.assertTrue(b'b' in self.backend._list())
def test_delete_clean(self):
if self.backend is None:
@@ -81,8 +81,8 @@
if not hasattr(self.backend, u'_delete'):
self.assertTrue(hasattr(self.backend, u'_delete_list'))
return
- self.backend._put(self.local, u'a')
- self.backend._delete(u'a')
+ self.backend._put(self.local, b'a')
+ self.backend._delete(b'a')
self.assertEqual(self.backend._list(), [])
def test_delete_missing(self):
@@ -94,7 +94,7 @@
# Backends can either silently ignore this, or throw an error
# that gives log.ErrorCode.backend_not_found.
try:
- self.backend._delete(u'a')
+ self.backend._delete(b'a')
except BackendException as e:
pass # Something went wrong, but it was an 'expected' something
except Exception as e:
@@ -107,14 +107,14 @@
if not hasattr(self.backend, u'_delete_list'):
self.assertTrue(hasattr(self.backend, u'_delete'))
return
- self.backend._put(self.local, u'a')
- self.backend._put(self.local, u'b')
- self.backend._put(self.local, u'c')
- self.backend._delete_list([u'a', u'd', u'c'])
+ self.backend._put(self.local, b'a')
+ self.backend._put(self.local, b'b')
+ self.backend._put(self.local, b'c')
+ self.backend._delete_list([b'a', b'd', b'c'])
files = self.backend._list()
- self.assertFalse(u'a' in files, files)
- self.assertTrue(u'b' in files, files)
- self.assertFalse(u'c' in files, files)
+ self.assertFalse(b'a' in files, files)
+ self.assertTrue(b'b' in files, files)
+ self.assertFalse(b'c' in files, files)
def test_move(self):
if self.backend is None:
@@ -126,11 +126,11 @@
self.local.copy(copy)
self.backend._move(self.local, u'a')
- self.assertTrue(u'a' in self.backend._list())
+ self.assertTrue(b'a' in self.backend._list())
self.assertFalse(self.local.exists())
getfile = path.Path(u'testfiles/getfile')
- self.backend._get(u'a', getfile)
+ self.backend._get(b'a', getfile)
self.assertTrue(copy.compare_data(getfile))
def test_query_exists(self):
=== modified file 'testing/unit/test_collections.py'
--- testing/unit/test_collections.py 2018-10-04 14:46:47 +0000
+++ testing/unit/test_collections.py 2018-11-29 19:03:43 +0000
@@ -34,44 +34,44 @@
from duplicity import dup_time
from . import UnitTestCase
-filename_list1 = [u"duplicity-full.2002-08-17T16:17:01-07:00.manifest.gpg",
- u"duplicity-full.2002-08-17T16:17:01-07:00.vol1.difftar.gpg",
- u"duplicity-full.2002-08-17T16:17:01-07:00.vol2.difftar.gpg",
- u"duplicity-full.2002-08-17T16:17:01-07:00.vol3.difftar.gpg",
- u"duplicity-full.2002-08-17T16:17:01-07:00.vol4.difftar.gpg",
- u"duplicity-full.2002-08-17T16:17:01-07:00.vol5.difftar.gpg",
- u"duplicity-full.2002-08-17T16:17:01-07:00.vol6.difftar.gpg",
- u"duplicity-inc.2002-08-17T16:17:01-07:00.to.2002-08-18T00:04:30-07:00.manifest.gpg",
- u"duplicity-inc.2002-08-17T16:17:01-07:00.to.2002-08-18T00:04:30-07:00.vol1.difftar.gpg",
- u"Extra stuff to be ignored"]
-
-remote_sigchain_filename_list = [u"duplicity-full-signatures.2002-08-17T16:17:01-07:00.sigtar.gpg",
- u"duplicity-new-signatures.2002-08-17T16:17:01-07:00.to.2002-08-18T00:04:30-07:00.sigtar.gpg",
- u"duplicity-new-signatures.2002-08-18T00:04:30-07:00.to.2002-08-20T00:00:00-07:00.sigtar.gpg"]
-
-local_sigchain_filename_list = [u"duplicity-full-signatures.2002-08-17T16:17:01-07:00.sigtar.gz",
- u"duplicity-new-signatures.2002-08-17T16:17:01-07:00.to.2002-08-18T00:04:30-07:00.sigtar.gz",
- u"duplicity-new-signatures.2002-08-18T00:04:30-07:00.to.2002-08-20T00:00:00-07:00.sigtar.gz"]
+filename_list1 = [b"duplicity-full.2002-08-17T16:17:01-07:00.manifest.gpg",
+ b"duplicity-full.2002-08-17T16:17:01-07:00.vol1.difftar.gpg",
+ b"duplicity-full.2002-08-17T16:17:01-07:00.vol2.difftar.gpg",
+ b"duplicity-full.2002-08-17T16:17:01-07:00.vol3.difftar.gpg",
+ b"duplicity-full.2002-08-17T16:17:01-07:00.vol4.difftar.gpg",
+ b"duplicity-full.2002-08-17T16:17:01-07:00.vol5.difftar.gpg",
+ b"duplicity-full.2002-08-17T16:17:01-07:00.vol6.difftar.gpg",
+ b"duplicity-inc.2002-08-17T16:17:01-07:00.to.2002-08-18T00:04:30-07:00.manifest.gpg",
+ b"duplicity-inc.2002-08-17T16:17:01-07:00.to.2002-08-18T00:04:30-07:00.vol1.difftar.gpg",
+ b"Extra stuff to be ignored"]
+
+remote_sigchain_filename_list = [b"duplicity-full-signatures.2002-08-17T16:17:01-07:00.sigtar.gpg",
+ b"duplicity-new-signatures.2002-08-17T16:17:01-07:00.to.2002-08-18T00:04:30-07:00.sigtar.gpg",
+ b"duplicity-new-signatures.2002-08-18T00:04:30-07:00.to.2002-08-20T00:00:00-07:00.sigtar.gpg"]
+
+local_sigchain_filename_list = [b"duplicity-full-signatures.2002-08-17T16:17:01-07:00.sigtar.gz",
+ b"duplicity-new-signatures.2002-08-17T16:17:01-07:00.to.2002-08-18T00:04:30-07:00.sigtar.gz",
+ b"duplicity-new-signatures.2002-08-18T00:04:30-07:00.to.2002-08-20T00:00:00-07:00.sigtar.gz"]
# A filename list with some incomplete volumes, an older full volume,
# and a complete chain.
-filename_list2 = [u"duplicity-full.2001-01-01T16:17:01-07:00.manifest.gpg",
- u"duplicity-full.2001-01-01T16:17:01-07:00.vol1.difftar.gpg",
- u"duplicity-full.2002-08-17T16:17:01-07:00.manifest.gpg",
- u"duplicity-full.2002-08-17T16:17:01-07:00.vol1.difftar.gpg",
- u"duplicity-full.2002-08-17T16:17:01-07:00.vol2.difftar.gpg",
- u"duplicity-full.2002-08-17T16:17:01-07:00.vol3.difftar.gpg",
- u"duplicity-full.2002-08-17T16:17:01-07:00.vol4.difftar.gpg",
- u"duplicity-full.2002-08-17T16:17:01-07:00.vol5.difftar.gpg",
- u"duplicity-full.2002-08-17T16:17:01-07:00.vol6.difftar.gpg",
- u"duplicity-inc.2002-08-17T16:17:01-07:00.to.2002-08-18T00:04:30-07:00.manifest.gpg",
- u"duplicity-inc.2002-08-17T16:17:01-07:00.to.2002-08-18T00:04:30-07:00.vol1.difftar.gpg",
- u"The following are extraneous duplicity files",
- u"duplicity-new-signatures.2001-08-17T02:05:13-05:00.to.2002-08-17T05:05:14-05:00.sigtar.gpg",
- u"duplicity-full.2002-08-15T01:01:01-07:00.vol1.difftar.gpg",
- u"duplicity-inc.2000-08-17T16:17:01-07:00.to.2000-08-18T00:04:30-07:00.manifest.gpg",
- u"duplicity-inc.2000-08-17T16:17:01-07:00.to.2000-08-18T00:04:30-07:00.vol1.difftar.gpg",
- u"Extra stuff to be ignored"]
+filename_list2 = [b"duplicity-full.2001-01-01T16:17:01-07:00.manifest.gpg",
+ b"duplicity-full.2001-01-01T16:17:01-07:00.vol1.difftar.gpg",
+ b"duplicity-full.2002-08-17T16:17:01-07:00.manifest.gpg",
+ b"duplicity-full.2002-08-17T16:17:01-07:00.vol1.difftar.gpg",
+ b"duplicity-full.2002-08-17T16:17:01-07:00.vol2.difftar.gpg",
+ b"duplicity-full.2002-08-17T16:17:01-07:00.vol3.difftar.gpg",
+ b"duplicity-full.2002-08-17T16:17:01-07:00.vol4.difftar.gpg",
+ b"duplicity-full.2002-08-17T16:17:01-07:00.vol5.difftar.gpg",
+ b"duplicity-full.2002-08-17T16:17:01-07:00.vol6.difftar.gpg",
+ b"duplicity-inc.2002-08-17T16:17:01-07:00.to.2002-08-18T00:04:30-07:00.manifest.gpg",
+ b"duplicity-inc.2002-08-17T16:17:01-07:00.to.2002-08-18T00:04:30-07:00.vol1.difftar.gpg",
+ b"The following are extraneous duplicity files",
+ b"duplicity-new-signatures.2001-08-17T02:05:13-05:00.to.2002-08-17T05:05:14-05:00.sigtar.gpg",
+ b"duplicity-full.2002-08-15T01:01:01-07:00.vol1.difftar.gpg",
+ b"duplicity-inc.2000-08-17T16:17:01-07:00.to.2000-08-18T00:04:30-07:00.manifest.gpg",
+ b"duplicity-inc.2000-08-17T16:17:01-07:00.to.2000-08-18T00:04:30-07:00.vol1.difftar.gpg",
+ b"Extra stuff to be ignored"]
class CollectionTest(UnitTestCase):
@@ -88,7 +88,7 @@
u"/archive_dir")
self.real_backend = backend.get_backend(u"file://%s/%s" %
- (col_test_dir.name, u"remote_dir"))
+ (col_test_dir.uc_name, u"remote_dir"))
self.output_dir = path.Path(u"testfiles/output") # used as a temp directory
self.output_dir_backend = backend.get_backend(u"file://testfiles/output")
@@ -129,7 +129,7 @@
chain = collections.SignatureChain(1, globals.archive_dir_path)
for filename in local_sigchain_filename_list:
assert chain.add_filename(filename)
- assert not chain.add_filename(u"duplicity-new-signatures.2002-08-18T00:04:30-07:00.to.2002-08-20T00:00:00-07:00.sigtar.gpg")
+ assert not chain.add_filename(b"duplicity-new-signatures.2002-08-18T00:04:30-07:00.to.2002-08-20T00:00:00-07:00.sigtar.gpg")
def test_sig_chains(self):
u"""Test making signature chains from filename list"""
@@ -179,9 +179,9 @@
fileobjlist[i].close()
assert buf == s, (buf, s)
- test_fileobj(0, u"Hello, world!")
- test_fileobj(1, u"hello 1")
- test_fileobj(2, u"Hello 2")
+ test_fileobj(0, b"Hello, world!")
+ test_fileobj(1, b"hello 1")
+ test_fileobj(2, b"Hello 2")
@pytest.mark.usefixtures(u"redirect_stdin")
def test_sigchain_fileobj(self):
@@ -209,15 +209,15 @@
assert len(cs.remote_orphaned_sig_names) == 1, cs.remote_orphaned_sig_names
assert len(cs.incomplete_backup_sets) == 1, cs.incomplete_backup_sets
- right_list = [u"duplicity-new-signatures.2001-08-17T02:05:13-05:00.to.2002-08-17T05:05:14-05:00.sigtar.gpg",
- u"duplicity-full.2002-08-15T01:01:01-07:00.vol1.difftar.gpg",
- u"duplicity-inc.2000-08-17T16:17:01-07:00.to.2000-08-18T00:04:30-07:00.manifest.gpg",
- u"duplicity-inc.2000-08-17T16:17:01-07:00.to.2000-08-18T00:04:30-07:00.vol1.difftar.gpg"]
+ right_list = [b"duplicity-new-signatures.2001-08-17T02:05:13-05:00.to.2002-08-17T05:05:14-05:00.sigtar.gpg",
+ b"duplicity-full.2002-08-15T01:01:01-07:00.vol1.difftar.gpg",
+ b"duplicity-inc.2000-08-17T16:17:01-07:00.to.2000-08-18T00:04:30-07:00.manifest.gpg",
+ b"duplicity-inc.2000-08-17T16:17:01-07:00.to.2000-08-18T00:04:30-07:00.vol1.difftar.gpg"]
local_received_list, remote_received_list = cs.get_extraneous(False) # @UnusedVariable
errors = []
for filename in remote_received_list:
if filename not in right_list:
- errors.append(u"### Got bad extraneous filename " + filename)
+ errors.append(u"### Got bad extraneous filename " + filename.decode())
else:
right_list.remove(filename)
for filename in right_list:
=== modified file 'testing/unit/test_diffdir.py'
--- testing/unit/test_diffdir.py 2018-07-27 02:18:12 +0000
+++ testing/unit/test_diffdir.py 2018-11-29 19:03:43 +0000
@@ -66,7 +66,7 @@
sigtar = diffdir.SigTarBlockIter(select)
diffdir.write_block_iter(sigtar, u"testfiles/output/sigtar")
- sigtar_fp = open(u"testfiles/output/sigtar")
+ sigtar_fp = open(u"testfiles/output/sigtar", u"rb")
select2 = selection.Select(Path(dirname))
select2.set_iter()
diffdir.write_block_iter(diffdir.DirDelta(select2, sigtar_fp),
@@ -78,7 +78,7 @@
fin = open(u"testfiles/output/difftar", u"rb")
diff_buf = fin.read()
assert not fin.close()
- assert diff_buf == u'\0' * 10240
+ assert diff_buf == b'\0' * 10240
def test_empty_diff(self):
u"""Test producing a diff against same sig; should be len 0"""
@@ -89,7 +89,7 @@
sigtar = diffdir.SigTarBlockIter(select)
diffdir.write_block_iter(sigtar, u"testfiles/output/sigtar")
- sigtar_fp = open(u"testfiles/output/sigtar")
+ sigtar_fp = open(u"testfiles/output/sigtar", u"rb")
select2 = selection.Select(Path(u"testfiles/various_file_types"))
select2.set_iter()
diffdir.write_block_iter(diffdir.DirDelta(select2, sigtar_fp),
@@ -141,7 +141,7 @@
diffdir.write_block_iter(delta_tar,
u"testfiles/output/dir2dir3.difftar")
- buffer = r""
+ buffer = b""
tf = tarfile.TarFile(u"testfiles/output/dir2dir3.difftar", u"r")
for tarinfo in tf:
if tarinfo.name.startswith(r"multivol_diff/"):
@@ -201,7 +201,7 @@
# print delta1.name, delta2.name
compare_tar(delta1.open(u"rb"), delta2.open(u"rb"))
- assert not os.system(u"cmp %s %s" % (delta1.name, delta2.name))
+ assert not os.system(u"cmp %s %s" % (delta1.uc_name, delta2.uc_name))
# Write old-style signature to cur_full_sigs
diffdir.write_block_iter(diffdir.SigTarBlockIter(get_sel(cur_dir)),
=== modified file 'testing/unit/test_dup_temp.py'
--- testing/unit/test_dup_temp.py 2018-07-27 02:18:12 +0000
+++ testing/unit/test_dup_temp.py 2018-11-29 19:03:43 +0000
@@ -35,14 +35,14 @@
tp = dup_temp.new_temppath()
assert not tp.exists()
fileobj = tp.open(u"wb")
- fileobj.write(u"hello, there")
+ fileobj.write(b"hello, there")
fileobj.close()
tp.setdata()
assert tp.isreg()
fin = tp.open_with_delete(u"rb")
buf = fin.read()
- assert buf == u"hello, there", buf
+ assert buf == b"hello, there", buf
fin.close()
assert not tp.exists()
@@ -56,19 +56,19 @@
tdp = dup_temp.new_tempduppath(pr)
assert not tdp.exists()
fout = tdp.filtered_open(u"wb")
- fout.write(u"hello, there")
+ fout.write(b"hello, there")
fout.close()
tdp.setdata()
assert tdp.isreg()
fin1 = gzip.GzipFile(tdp.name, u"rb")
buf = fin1.read()
- assert buf == u"hello, there", buf
+ assert buf == b"hello, there", buf
fin1.close()
fin2 = tdp.filtered_open_with_delete(u"rb")
buf2 = fin2.read()
- assert buf2 == u"hello, there", buf
+ assert buf2 == b"hello, there", buf
fin2.close()
assert not tdp.exists()
=== modified file 'testing/unit/test_file_naming.py'
--- testing/unit/test_file_naming.py 2018-07-27 02:18:12 +0000
+++ testing/unit/test_file_naming.py 2018-11-29 19:03:43 +0000
@@ -25,6 +25,7 @@
from duplicity import file_naming
from duplicity import log
from duplicity import globals
+from duplicity import util
from . import UnitTestCase
@@ -47,7 +48,7 @@
file_naming.prepare_regex(force=True)
filename = file_naming.get(u"inc", volume_number=23)
- log.Info(u"Inc filename: " + filename)
+ log.Info(u"Inc filename: " + util.fsdecode(filename))
pr = file_naming.parse(filename)
assert pr and pr.type == u"inc", pr
assert pr.start_time == 10
@@ -56,7 +57,7 @@
assert not pr.partial
filename = file_naming.get(u"full-sig")
- log.Info(u"Full sig filename: " + filename)
+ log.Info(u"Full sig filename: " + util.fsdecode(filename))
pr = file_naming.parse(filename)
assert pr.type == u"full-sig"
assert pr.time == 20
@@ -85,18 +86,18 @@
def test_more(self):
u"""More file_parsing tests"""
file_naming.prepare_regex(force=True)
- pr = file_naming.parse(globals.file_prefix + globals.file_prefix_signature + u"dns.h112bi.h14rg0.st.g")
+ pr = file_naming.parse(globals.file_prefix + globals.file_prefix_signature + b"dns.h112bi.h14rg0.st.g")
assert pr, pr
assert pr.type == u"new-sig"
assert pr.end_time == 1029826800
if not globals.short_filenames:
- pr = file_naming.parse(globals.file_prefix + globals.file_prefix_signature + u"duplicity-new-signatures.2002-08-18T00:04:30-07:00.to.2002-08-20T00:00:00-07:00.sigtar.gpg")
+ pr = file_naming.parse(globals.file_prefix + globals.file_prefix_signature + b"duplicity-new-signatures.2002-08-18T00:04:30-07:00.to.2002-08-20T00:00:00-07:00.sigtar.gpg")
assert pr, pr
assert pr.type == u"new-sig"
assert pr.end_time == 1029826800
- pr = file_naming.parse(globals.file_prefix + globals.file_prefix_signature + u"dfs.h5dixs.st.g")
+ pr = file_naming.parse(globals.file_prefix + globals.file_prefix_signature + b"dfs.h5dixs.st.g")
assert pr, pr
assert pr.type == u"full-sig"
assert pr.time == 1036954144, repr(pr.time)
@@ -104,20 +105,20 @@
def test_partial(self):
u"""Test addition of partial flag"""
file_naming.prepare_regex(force=True)
- pr = file_naming.parse(globals.file_prefix + globals.file_prefix_signature + u"dns.h112bi.h14rg0.st.p.g")
+ pr = file_naming.parse(globals.file_prefix + globals.file_prefix_signature + b"dns.h112bi.h14rg0.st.p.g")
assert pr, pr
assert pr.partial
assert pr.type == u"new-sig"
assert pr.end_time == 1029826800
if not globals.short_filenames:
- pr = file_naming.parse(globals.file_prefix + globals.file_prefix_signature + u"duplicity-new-signatures.2002-08-18T00:04:30-07:00.to.2002-08-20T00:00:00-07:00.sigtar.part.gpg")
+ pr = file_naming.parse(globals.file_prefix + globals.file_prefix_signature + b"duplicity-new-signatures.2002-08-18T00:04:30-07:00.to.2002-08-20T00:00:00-07:00.sigtar.part.gpg")
assert pr, pr
assert pr.partial
assert pr.type == u"new-sig"
assert pr.end_time == 1029826800
- pr = file_naming.parse(globals.file_prefix + globals.file_prefix_signature + u"dfs.h5dixs.st.p.g")
+ pr = file_naming.parse(globals.file_prefix + globals.file_prefix_signature + b"dfs.h5dixs.st.p.g")
assert pr, pr
assert pr.partial
assert pr.type == u"full-sig"
@@ -142,10 +143,10 @@
u"""Test filename parsing and generation with prefixes"""
def setUp(self):
super(FileNamingPrefixes, self).setUp()
- self.set_global(u'file_prefix', u"global-")
- self.set_global(u'file_prefix_manifest', u"mani-")
- self.set_global(u'file_prefix_signature', u"sign-")
- self.set_global(u'file_prefix_archive', u"arch-")
+ self.set_global(u'file_prefix', b"global-")
+ self.set_global(u'file_prefix_manifest', b"mani-")
+ self.set_global(u'file_prefix_signature', b"sign-")
+ self.set_global(u'file_prefix_archive', b"arch-")
if __name__ == u"__main__":
=== modified file 'testing/unit/test_gpg.py'
--- testing/unit/test_gpg.py 2018-10-04 14:46:47 +0000
+++ testing/unit/test_gpg.py 2018-11-29 19:03:43 +0000
@@ -56,13 +56,13 @@
def test_gpg1(self):
u"""Test gpg short strings"""
- self.gpg_cycle(u"hello, world")
- self.gpg_cycle(u"ansoetuh aoetnuh aoenstuh aoetnuh asoetuh saoteuh ")
+ self.gpg_cycle(b"hello, world")
+ self.gpg_cycle(b"ansoetuh aoetnuh aoenstuh aoetnuh asoetuh saoteuh ")
def test_gpg2(self):
u"""Test gpg long strings easily compressed"""
- self.gpg_cycle(u" " * 50000)
- self.gpg_cycle(u"aoeu" * 1000000)
+ self.gpg_cycle(b" " * 50000)
+ self.gpg_cycle(b"aoeu" * 1000000)
def test_gpg3(self):
u"""Test on random data - must have /dev/urandom device"""
@@ -73,31 +73,31 @@
def test_gpg_asym(self):
u"""Test GPG asymmetric encryption"""
- profile = gpg.GPGProfile(passphrase=self.sign_passphrase_bytes,
+ profile = gpg.GPGProfile(passphrase=self.sign_passphrase,
recipients=[self.encrypt_key1,
self.encrypt_key2])
- self.gpg_cycle(u"aoensutha aonetuh saoe", profile)
+ self.gpg_cycle(b"aoensutha aonetuh saoe", profile)
- profile2 = gpg.GPGProfile(passphrase=self.sign_passphrase_bytes,
+ profile2 = gpg.GPGProfile(passphrase=self.sign_passphrase,
recipients=[self.encrypt_key1])
- self.gpg_cycle(u"aoeu" * 10000, profile2)
+ self.gpg_cycle(b"aoeu" * 10000, profile2)
def test_gpg_hidden_asym(self):
u"""Test GPG asymmetric encryption with hidden key id"""
- profile = gpg.GPGProfile(passphrase=self.sign_passphrase_bytes,
+ profile = gpg.GPGProfile(passphrase=self.sign_passphrase,
hidden_recipients=[self.encrypt_key1,
self.encrypt_key2])
- self.gpg_cycle(u"aoensutha aonetuh saoe", profile)
+ self.gpg_cycle(b"aoensutha aonetuh saoe", profile)
- profile2 = gpg.GPGProfile(passphrase=self.sign_passphrase_bytes,
+ profile2 = gpg.GPGProfile(passphrase=self.sign_passphrase,
hidden_recipients=[self.encrypt_key1])
- self.gpg_cycle(u"aoeu" * 10000, profile2)
+ self.gpg_cycle(b"aoeu" * 10000, profile2)
def test_gpg_signing(self):
u"""Test to make sure GPG reports the proper signature key"""
- plaintext = u"hello" * 50000
+ plaintext = b"hello" * 50000
- signing_profile = gpg.GPGProfile(passphrase=self.sign_passphrase_bytes,
+ signing_profile = gpg.GPGProfile(passphrase=self.sign_passphrase,
sign_key=self.sign_key,
recipients=[self.encrypt_key1])
@@ -114,9 +114,9 @@
def test_gpg_signing_and_hidden_encryption(self):
u"""Test to make sure GPG reports the proper signature key even with hidden encryption key id"""
- plaintext = u"hello" * 50000
+ plaintext = b"hello" * 50000
- signing_profile = gpg.GPGProfile(passphrase=self.sign_passphrase_bytes,
+ signing_profile = gpg.GPGProfile(passphrase=self.sign_passphrase,
sign_key=self.sign_key,
hidden_recipients=[self.encrypt_key1])
@@ -177,11 +177,11 @@
def get_buffer(self, size):
u"""Return buffer of size size, consisting of half random data"""
- s1 = size / 2
+ s1 = int(size / 2)
s2 = size - s1
- return r"a" * s1 + self.from_random_fp.read(s2)
+ return b"a" * s1 + self.from_random_fp.read(s2)
- def next(self):
+ def __next__(self):
if self.at_end:
raise StopIteration
block_data = self.get_buffer(self.get_read_size())
@@ -195,7 +195,7 @@
return random.randrange(0, size)
def get_footer(self):
- return u"e" * random.randrange(0, 15000)
+ return b"e" * random.randrange(0, 15000)
class SHATest(UnitTestCase):
=== modified file 'testing/unit/test_gpginterface.py'
--- testing/unit/test_gpginterface.py 2018-07-27 02:18:12 +0000
+++ testing/unit/test_gpginterface.py 2018-11-29 19:03:43 +0000
@@ -110,7 +110,7 @@
def test_create_fhs_solely(self):
u"""Do GnuPG operations using solely the create_fhs feature"""
- plaintext = u"Three blind mice"
+ plaintext = b"Three blind mice"
ciphertext = self.do_create_fh_operation([u'--symmetric'],
plaintext)
@@ -125,7 +125,7 @@
u"""Do GnuPG operations using the attach_fhs feature"""
plaintext_source = __file__
- plainfile = open(plaintext_source)
+ plainfile = open(plaintext_source, u"rb")
temp1 = tempfile.TemporaryFile()
temp2 = tempfile.TemporaryFile()
=== modified file 'testing/unit/test_lazy.py'
--- testing/unit/test_lazy.py 2018-07-27 02:18:12 +0000
+++ testing/unit/test_lazy.py 2018-11-29 19:03:43 +0000
@@ -19,6 +19,8 @@
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+from builtins import next
+from builtins import range
import unittest
import pickle
import sys
@@ -29,9 +31,9 @@
class Iterators(UnitTestCase):
- one_to_100 = lambda s: iter(range(1, 101))
- evens = lambda s: iter(range(2, 101, 2))
- odds = lambda s: iter(range(1, 100, 2))
+ one_to_100 = lambda s: iter(list(range(1, 101)))
+ evens = lambda s: iter(list(range(2, 101, 2)))
+ odds = lambda s: iter(list(range(1, 100, 2)))
empty = lambda s: iter([])
def __init__(self, *args):
@@ -81,8 +83,8 @@
def testNormal(self):
u"""See if normal iterators are equal"""
assert Iter.equal(iter((1, 2, 3)), iter((1, 2, 3)))
- assert Iter.equal(self.odds(), iter(range(1, 100, 2)))
- assert Iter.equal(iter((1, 2, 3)), iter(range(1, 4)))
+ assert Iter.equal(self.odds(), iter(list(range(1, 100, 2))))
+ assert Iter.equal(iter((1, 2, 3)), iter(list(range(1, 4))))
def testNormalInequality(self):
u"""See if normal unequals work"""
@@ -127,7 +129,10 @@
def testError(self):
u"""Should raise appropriate error"""
i = Iter.filter(lambda x: x, self.falseerror_maker())
- self.assertRaises(Exception, i.next)
+ if sys.version_info.major >= 3:
+ self.assertRaises(Exception, i.__next__)
+ else:
+ self.assertRaises(Exception, i.next)
class MapTestCase(Iterators):
@@ -135,7 +140,7 @@
def testNumbers(self):
u"""1 to 100 * 2 = 2 to 200"""
assert Iter.equal(Iter.map(lambda x: 2 * x, self.one_to_100()),
- iter(range(2, 201, 2)))
+ iter(list(range(2, 201, 2))))
def testShortcut(self):
u"""Map should go in order"""
@@ -144,7 +149,10 @@
raise NameError
i = Iter.map(f, self.trueerror_maker())
next(i)
- self.assertRaises(NameError, i.next)
+ if sys.version_info.major >= 3:
+ self.assertRaises(NameError, i.__next__)
+ else:
+ self.assertRaises(NameError, i.next)
def testEmpty(self):
u"""Map of an empty iterator is empty"""
@@ -159,7 +167,7 @@
def testNumbers(self):
u"""1 to 50 + 51 to 100 = 1 to 100"""
- assert Iter.equal(Iter.cat(iter(range(1, 51)), iter(range(51, 101))),
+ assert Iter.equal(Iter.cat(iter(list(range(1, 51))), iter(list(range(51, 101)))),
self.one_to_100())
def testShortcut(self):
@@ -167,7 +175,10 @@
i = Iter.cat(self.typeerror_maker(), self.nameerror_maker())
next(i)
next(i)
- self.assertRaises(TypeError, i.next)
+ if sys.version_info.major >= 3:
+ self.assertRaises(TypeError, i.__next__)
+ else:
+ self.assertRaises(TypeError, i.next)
class AndOrTestCase(Iterators):
@@ -213,9 +224,9 @@
def testLargeAddition(self):
u"""Folds on 10000 element iterators"""
- assert Iter.foldl(self.f, 0, iter(range(1, 10001))) == 50005000
+ assert Iter.foldl(self.f, 0, iter(list(range(1, 10001)))) == 50005000
self.assertRaises(RuntimeError,
- Iter.foldr, self.f, 0, iter(range(1, 10001)))
+ Iter.foldr, self.f, 0, iter(list(range(1, 10001))))
def testLen(self):
u"""Use folds to calculate length of lists"""
=== modified file 'testing/unit/test_manifest.py'
--- testing/unit/test_manifest.py 2018-10-11 21:54:47 +0000
+++ testing/unit/test_manifest.py 2018-11-29 19:03:43 +0000
@@ -19,7 +19,7 @@
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-from StringIO import StringIO
+from io import StringIO
import re
import sys
import types
@@ -37,10 +37,10 @@
def test_basic(self):
u"""Basic VolumeInfoTest"""
vi = manifest.VolumeInfo()
- vi.set_info(3, (u"hello", u"there"), None, (), None)
+ vi.set_info(3, (b"hello", b"there"), None, (), None)
vi.set_hash(u"MD5", u"aoseutaohe")
s = vi.to_string()
- assert isinstance(s, (str, u"".__class__))
+ assert isinstance(s, (b"".__class__, u"".__class__))
# print "---------\n%s\n---------" % s
vi2 = manifest.VolumeInfo()
vi2.from_string(s)
@@ -50,12 +50,12 @@
u"""Test VolumeInfo with special characters"""
vi = manifest.VolumeInfo()
vi.set_info(3234,
- (r"\n eu \x233", r"heuo", r'\xd8\xab\xb1Wb\xae\xc5]\x8a\xbb\x15v*\xf4\x0f!\xf9>\xe2Y\x86\xbb\xab\xdbp\xb0\x84\x13k\x1d\xc2\xf1\xf5e\xa5U\x82\x9aUV\xa0\xf4\xdf4\xba\xfdX\x03\x82\x07s\xce\x9e\x8b\xb34\x04\x9f\x17 \xf4\x8f\xa6\xfa\x97\xab\xd8\xac\xda\x85\xdcKvC\xfa#\x94\x92\x9e\xc9\xb7\xc3_\x0f\x84g\x9aB\x11<=^\xdbM\x13\x96c\x8b\xa7|*"\\\'^$@#!(){}?+ ~` '),
+ (b"\n eu \x233", b"heuo", b'\xd8\xab\xb1Wb\xae\xc5]\x8a\xbb\x15v*\xf4\x0f!\xf9>\xe2Y\x86\xbb\xab\xdbp\xb0\x84\x13k\x1d\xc2\xf1\xf5e\xa5U\x82\x9aUV\xa0\xf4\xdf4\xba\xfdX\x03\x82\x07s\xce\x9e\x8b\xb34\x04\x9f\x17 \xf4\x8f\xa6\xfa\x97\xab\xd8\xac\xda\x85\xdcKvC\xfa#\x94\x92\x9e\xc9\xb7\xc3_\x0f\x84g\x9aB\x11<=^\xdbM\x13\x96c\x8b\xa7|*"\\\'^$@#!(){}?+ ~` '),
None,
- (r"\n",),
+ (b"\n",),
None)
s = vi.to_string()
- assert isinstance(s, (str, u"".__class__))
+ assert isinstance(s, (str, bytes))
# print "---------\n%s\n---------" % s
vi2 = manifest.VolumeInfo()
vi2.from_string(s)
@@ -92,9 +92,9 @@
def test_basic(self):
vi1 = manifest.VolumeInfo()
- vi1.set_info(3, (u"hello",), None, (), None)
+ vi1.set_info(3, (b"hello",), None, (), None)
vi2 = manifest.VolumeInfo()
- vi2.set_info(4, (u"goodbye", u"there"), None, (u"aoeusht",), None)
+ vi2.set_info(4, (b"goodbye", b"there"), None, (b"aoeusht",), None)
vi3 = manifest.VolumeInfo()
vi3.set_info(34, (), None, (), None)
m = manifest.Manifest()
@@ -106,17 +106,17 @@
m.set_files_changed_info([])
s = m.to_string()
- assert s.lower().startswith(u"hostname")
- assert s.endswith(u"\n")
+ assert s.lower().startswith(b"hostname")
+ assert s.endswith(b"\n")
m2 = manifest.Manifest().from_string(s)
assert m == m2
def test_corrupt_filelist(self):
vi1 = manifest.VolumeInfo()
- vi1.set_info(3, (u"hello",), None, (), None)
+ vi1.set_info(3, (b"hello",), None, (), None)
vi2 = manifest.VolumeInfo()
- vi2.set_info(4, (u"goodbye", u"there"), None, (u"aoeusht",), None)
+ vi2.set_info(4, (b"goodbye", b"there"), None, (b"aoeusht",), None)
vi3 = manifest.VolumeInfo()
vi3.set_info(34, (), None, (), None)
m = manifest.Manifest()
@@ -126,16 +126,16 @@
self.set_global(u'local_path', path.Path(u"Foobar"))
m.set_dirinfo()
m.set_files_changed_info([
- (u'one', u'new'),
- (u'two', u'changed'),
- (u'three', u'new'),
+ (b'one', b'new'),
+ (b'two', b'changed'),
+ (b'three', b'new'),
])
# build manifest string
s = m.to_string()
# make filecount higher than files in list
- s2 = re.sub(u'Filelist 3', u'Filelist 5', s)
+ s2 = re.sub(b'Filelist 3', b'Filelist 5', s)
m2 = manifest.Manifest().from_string(s2)
assert hasattr(m2, u'corrupt_filelist')
=== modified file 'testing/unit/test_patchdir.py'
--- testing/unit/test_patchdir.py 2018-07-27 02:18:12 +0000
+++ testing/unit/test_patchdir.py 2018-11-29 19:03:43 +0000
@@ -19,9 +19,9 @@
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-from future_builtins import map
+from future.builtins import map
-import cStringIO
+import io
import unittest
from duplicity import diffdir
@@ -112,20 +112,20 @@
# file object will be empty, and tarinfo will have path
# "snapshot/../warning-security-error"
assert not os.system(u"cat /dev/null >testfiles/output/file")
- path = Path(u"testfiles/output/file")
- path.index = (u"diff", u"..", u"warning-security-error")
+ path = Path(b"testfiles/output/file")
+ path.index = (b"diff", b"..", b"warning-security-error")
ti = path.get_tarinfo()
- fp = cStringIO.StringIO(u"")
+ fp = io.StringIO(u"")
tf.addfile(ti, fp)
tf.close()
- make_bad_tar(u"testfiles/output/bad.tar")
+ make_bad_tar(b"testfiles/output/bad.tar")
os.mkdir(u"testfiles/output/temp")
self.assertRaises(patchdir.PatchDirException, patchdir.Patch,
Path(u"testfiles/output/temp"),
- open(u"testfiles/output/bad.tar"))
+ open(u"testfiles/output/bad.tar", u"rb"))
assert not Path(u"testfiles/output/warning-security-error").exists()
@@ -205,7 +205,7 @@
u"""Make a snapshot ROPath, permissions 0o600"""
ss = self.out.append(u"snapshot")
fout = ss.open(u"wb")
- fout.write(u"hello, world!")
+ fout.write(b"hello, world!")
assert not fout.close()
ss.chmod(0o600)
ss.difftype = u"snapshot"
@@ -213,11 +213,11 @@
def get_delta(self, old_buf, new_buf):
u"""Return delta buffer from old to new"""
- sigfile = librsync.SigFile(cStringIO.StringIO(old_buf))
+ sigfile = librsync.SigFile(io.BytesIO(old_buf))
sig = sigfile.read()
assert not sigfile.close()
- deltafile = librsync.DeltaFile(sig, cStringIO.StringIO(new_buf))
+ deltafile = librsync.DeltaFile(sig, io.BytesIO(new_buf))
deltabuf = deltafile.read()
assert not deltafile.close()
return deltabuf
@@ -226,8 +226,8 @@
u"""Make a delta ROPath, permissions 0o640"""
delta1 = self.out.append(u"delta1")
fout = delta1.open(u"wb")
- fout.write(self.get_delta(u"hello, world!",
- u"aonseuth aosetnuhaonsuhtansoetuhaoe"))
+ fout.write(self.get_delta(b"hello, world!",
+ b"aonseuth aosetnuhaonsuhtansoetuhaoe"))
assert not fout.close()
delta1.chmod(0o640)
delta1.difftype = u"diff"
@@ -237,8 +237,8 @@
u"""Make another delta ROPath, permissions 0o644"""
delta2 = self.out.append(u"delta1")
fout = delta2.open(u"wb")
- fout.write(self.get_delta(u"aonseuth aosetnuhaonsuhtansoetuhaoe",
- u"3499 34957839485792357 458348573"))
+ fout.write(self.get_delta(b"aonseuth aosetnuhaonsuhtansoetuhaoe",
+ b"3499 34957839485792357 458348573"))
assert not fout.close()
delta2.chmod(0o644)
delta2.difftype = u"diff"
@@ -286,11 +286,11 @@
ids = u"%d:%d" % (os.getuid(), os.getgid())
- testseq([self.snapshot()], (u"%s 600" % ids), u"hello, world!")
+ testseq([self.snapshot()], (u"%s 600" % ids), b"hello, world!")
testseq([self.snapshot(), self.delta1()], (u"%s 640" % ids),
- u"aonseuth aosetnuhaonsuhtansoetuhaoe")
+ b"aonseuth aosetnuhaonsuhtansoetuhaoe")
testseq([self.snapshot(), self.delta1(), self.delta2()], (u"%s 644" % ids),
- u"3499 34957839485792357 458348573")
+ b"3499 34957839485792357 458348573")
if __name__ == u"__main__":
=== modified file 'testing/unit/test_path.py'
--- testing/unit/test_path.py 2018-07-27 02:18:12 +0000
+++ testing/unit/test_path.py 2018-11-29 19:03:43 +0000
@@ -73,10 +73,10 @@
def test_canonical(self):
u"""Test getting canonical version of path"""
c = Path(u".").get_canonical()
- assert c == u".", c
+ assert c == b".", c
c = Path(u"//foo/bar/./").get_canonical()
- assert c == u"/foo/bar", c
+ assert c == b"/foo/bar", c
def test_compare_verbose(self):
u"""Run compare_verbose on a few files"""
=== modified file 'testing/unit/test_selection.py'
--- testing/unit/test_selection.py 2018-07-19 21:06:44 +0000
+++ testing/unit/test_selection.py 2018-11-29 19:03:43 +0000
@@ -22,7 +22,7 @@
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import types
-import StringIO
+import io
import unittest
import duplicity.path
@@ -41,7 +41,7 @@
self.Select = Select(self.root)
def makeext(self, path):
- return self.root.new_index(tuple(path.split(u"/")))
+ return self.root.new_index(tuple(path.encode().split(b"/")))
def testRegexp(self):
u"""Test regular expression selection func"""
@@ -239,8 +239,8 @@
u"""Turn strings in filelist into fileobjs"""
new_filelists = []
for f in filelist:
- if isinstance(f, unicode):
- new_filelists.append(StringIO.StringIO(f))
+ if isinstance(f, u"".__class__):
+ new_filelists.append(io.StringIO(f))
else:
new_filelists.append(f)
return new_filelists
=== modified file 'tox.ini'
--- tox.ini 2018-10-04 14:46:47 +0000
+++ tox.ini 2018-11-29 19:03:43 +0000
@@ -1,11 +1,17 @@
[tox]
-envlist = py27,code
+envlist = py27,py36,code
setenv = RUN_CODE_TESTS=0
[testenv:py27]
deps = -rrequirements.txt
commands = pytest {posargs}
+passenv = LC_CTYPE
+
+[testenv:py36]
+deps = -rrequirements.txt
+commands = pytest {posargs}
+passenv = LC_CTYPE
[testenv:code]
setenv = RUN_CODE_TESTS=1
@@ -21,12 +27,13 @@
[pycodestyle]
+# E402 module level import not at top of file: for python2 stdlib aliases
# W503 warnings for break before a binary operator. For new code, PEP8 prefers this and this warning should be ignored.
# W504 warnings for break after a binary operator. For new code, PEP8 prefers before, so these should be fixed -- TODO
# E722 do not use bare except -- TODO
# E731 do not assign a lambda expression, use a def -- TODO
# E741 ambiguous variable name -- TODO
-ignore = W503,W504,E722,E731,E741
+ignore = E402,W503,W504,E722,E731,E741
max-line-length = 120
Follow ups