duplicity-team team mailing list archive
-
duplicity-team team
-
Mailing list archive
-
Message #02167
[Merge] lp:~mterry/duplicity/py3-map-filter into lp:duplicity
Michael Terry has proposed merging lp:~mterry/duplicity/py3-map-filter into lp:duplicity.
Requested reviews:
duplicity-team (duplicity-team)
For more details, see:
https://code.launchpad.net/~mterry/duplicity/py3-map-filter/+merge/217315
In py3, map and filter return iterable objects, not lists. So in each case we use them, I've either imported the future version or switched to a list comprehension if we really wanted a list.
--
https://code.launchpad.net/~mterry/duplicity/py3-map-filter/+merge/217315
Your team duplicity-team is requested to review the proposed merge of lp:~mterry/duplicity/py3-map-filter into lp:duplicity.
=== modified file 'duplicity/backend.py'
--- duplicity/backend.py 2014-04-20 14:02:34 +0000
+++ duplicity/backend.py 2014-04-25 23:56:18 +0000
@@ -416,7 +416,7 @@
if hasattr(self, '_list'):
# Make sure that duplicity internals only ever see byte strings
# for filenames, no matter what the backend thinks it is talking.
- return map(tobytes, self._list())
+ return [tobytes(x) for x in self._list()]
else:
raise NotImplementedError()
=== modified file 'duplicity/backends/_boto_single.py'
--- duplicity/backends/_boto_single.py 2014-04-17 22:26:39 +0000
+++ duplicity/backends/_boto_single.py 2014-04-25 23:56:18 +0000
@@ -137,7 +137,7 @@
# This folds the null prefix and all null parts, which means that:
# //MyBucket/ and //MyBucket are equivalent.
# //MyBucket//My///My/Prefix/ and //MyBucket/My/Prefix are equivalent.
- self.url_parts = filter(lambda x: x != '', parsed_url.path.split('/'))
+ self.url_parts = [x for x in parsed_url.path.split('/') if x != '']
if self.url_parts:
self.bucket_name = self.url_parts.pop(0)
=== modified file 'duplicity/backends/_ssh_pexpect.py'
--- duplicity/backends/_ssh_pexpect.py 2014-04-17 19:34:23 +0000
+++ duplicity/backends/_ssh_pexpect.py 2014-04-25 23:56:18 +0000
@@ -302,7 +302,7 @@
l = self.run_sftp_command(commandline, commands).split('\n')[1:]
- return filter(lambda x: x, map(string.strip, l))
+ return [x for x in map(string.strip, l) if x]
def delete(self, filename_list):
"""
=== modified file 'duplicity/backends/ftpbackend.py'
--- duplicity/backends/ftpbackend.py 2013-12-27 06:39:00 +0000
+++ duplicity/backends/ftpbackend.py 2014-04-25 23:56:18 +0000
@@ -110,9 +110,8 @@
# Do a long listing to avoid connection reset
commandline = "ncftpls %s -l '%s'" % (self.flags, self.url_string)
l = self.popen_persist(commandline).split('\n')
- l = filter(lambda x: x, l)
# Look for our files as the last element of a long list line
- return [x.split()[-1] for x in l if not x.startswith("total ")]
+ return [x.split()[-1] for x in l if x and not x.startswith("total ")]
def delete(self, filename_list):
"""Delete files in filename_list"""
=== modified file 'duplicity/backends/ftpsbackend.py'
--- duplicity/backends/ftpsbackend.py 2013-12-27 06:39:00 +0000
+++ duplicity/backends/ftpsbackend.py 2014-04-25 23:56:18 +0000
@@ -110,9 +110,8 @@
remote_dir = urllib.unquote(self.parsed_url.path.lstrip('/')).rstrip()
commandline = "lftp -c 'source %s;ls \'%s\''" % (self.tempname, remote_dir)
l = self.popen_persist(commandline).split('\n')
- l = filter(lambda x: x, l)
# Look for our files as the last element of a long list line
- return [x.split()[-1] for x in l]
+ return [x.split()[-1] for x in l if x]
def delete(self, filename_list):
"""Delete files in filename_list"""
=== modified file 'duplicity/backends/hsibackend.py'
--- duplicity/backends/hsibackend.py 2011-06-17 06:21:42 +0000
+++ duplicity/backends/hsibackend.py 2014-04-25 23:56:18 +0000
@@ -56,8 +56,7 @@
l = os.popen3(commandline)[2].readlines()[3:]
for i in range(0,len(l)):
l[i] = l[i].split()[-1]
- print filter(lambda x: x, l)
- return filter(lambda x: x, l)
+ return [x for x in l if x]
def delete(self, filename_list):
assert len(filename_list) > 0
=== modified file 'duplicity/backends/rsyncbackend.py'
--- duplicity/backends/rsyncbackend.py 2011-11-15 19:21:58 +0000
+++ duplicity/backends/rsyncbackend.py 2014-04-25 23:56:18 +0000
@@ -136,7 +136,7 @@
return None
commandline = "%s %s" % (self.cmd, self.url_string)
result, stdout = self.run_command(commandline)
- return filter(lambda x: x, map (split, stdout.split('\n')))
+ return [x for x in map (split, stdout.split('\n')) if x]
def delete(self, filename_list):
"""Delete files."""
=== modified file 'duplicity/collections.py'
--- duplicity/collections.py 2014-04-17 22:03:10 +0000
+++ duplicity/collections.py 2014-04-25 23:56:18 +0000
@@ -21,6 +21,8 @@
"""Classes and functions on collections of backup volumes"""
+from future_builtins import filter, map
+
import types
import gettext
@@ -147,7 +149,7 @@
try:
self.backend.delete(rfn)
except Exception:
- log.Debug(_("BackupSet.delete: missing %s") % map(util.ufn, rfn))
+ log.Debug(_("BackupSet.delete: missing %s") % [util.ufn(f) for f in rfn])
pass
for lfn in globals.archive_dir.listdir():
pr = file_naming.parse(lfn)
@@ -158,7 +160,7 @@
try:
globals.archive_dir.append(lfn).delete()
except Exception:
- log.Debug(_("BackupSet.delete: missing %s") % map(util.ufn, lfn))
+ log.Debug(_("BackupSet.delete: missing %s") % [util.ufn(f) for f in lfn])
pass
util.release_lockfile()
@@ -247,8 +249,7 @@
assert self.info_set
volume_num_list = self.volume_name_dict.keys()
volume_num_list.sort()
- volume_filenames = map(lambda x: self.volume_name_dict[x],
- volume_num_list)
+ volume_filenames = [self.volume_name_dict[x] for x in volume_num_list]
if self.remote_manifest_name:
# For convenience of implementation for restart support, we treat
# local partial manifests as this set's remote manifest. But
@@ -338,7 +339,7 @@
"""
Return a list of sets in chain earlier or equal to time
"""
- older_incsets = filter(lambda s: s.end_time <= time, self.incset_list)
+ older_incsets = [s for s in self.incset_list if s.end_time <= time]
return [self.fullset] + older_incsets
def get_last(self):
@@ -527,7 +528,7 @@
return sig_dp.filtered_open("rb")
else:
filename_to_fileobj = self.backend.get_fileobj_read
- return map(filename_to_fileobj, self.get_filenames(time))
+ return [filename_to_fileobj(f) for f in self.get_filenames(time)]
def delete(self, keep_full=False):
"""
@@ -798,7 +799,7 @@
missing files.
"""
log.Debug(_("Extracting backup chains from list of files: %s")
- % map(util.ufn, filename_list))
+ % [util.ufn(f) for f in filename_list])
# First put filenames in set form
sets = []
def add_to_sets(filename):
@@ -816,7 +817,8 @@
sets.append(new_set)
else:
log.Debug(_("Ignoring file (rejected by backup set) '%s'") % util.ufn(filename))
- map(add_to_sets, filename_list)
+ for f in filename_list:
+ add_to_sets(f)
sets, incomplete_sets = self.get_sorted_sets(sets)
chains, orphaned_sets = [], []
@@ -839,7 +841,8 @@
else:
log.Debug(_("Found orphaned set %s") % (set.get_timestr(),))
orphaned_sets.append(set)
- map(add_to_chains, sets)
+ for s in sets:
+ add_to_chains(s)
return (chains, orphaned_sets, incomplete_sets)
def get_sorted_sets(self, set_list):
@@ -855,7 +858,7 @@
else:
time_set_pairs.append((set.end_time, set))
time_set_pairs.sort()
- return (map(lambda p: p[1], time_set_pairs), incomplete_sets)
+ return ([p[1] for p in time_set_pairs], incomplete_sets)
def get_signature_chains(self, local, filelist = None):
"""
@@ -951,15 +954,14 @@
if not self.all_backup_chains:
raise CollectionsError("No backup chains found")
- covering_chains = filter(lambda c: c.start_time <= time <= c.end_time,
- self.all_backup_chains)
+ covering_chains = [c for c in self.all_backup_chains
+ if c.start_time <= time <= c.end_time]
if len(covering_chains) > 1:
raise CollectionsError("Two chains cover the given time")
elif len(covering_chains) == 1:
return covering_chains[0]
- old_chains = filter(lambda c: c.end_time < time,
- self.all_backup_chains)
+ old_chains = [c for c in self.all_backup_chains if c.end_time < time]
if old_chains:
return old_chains[-1]
else:
@@ -976,13 +978,12 @@
if not self.all_sig_chains:
raise CollectionsError("No signature chains found")
- covering_chains = filter(lambda c: c.start_time <= time <= c.end_time,
- self.all_sig_chains)
+ covering_chains = [c for c in self.all_sig_chains
+ if c.start_time <= time <= c.end_time]
if covering_chains:
return covering_chains[-1] # prefer local if multiple sig chains
- old_chains = filter(lambda c: c.end_time < time,
- self.all_sig_chains)
+ old_chains = [c for c in self.all_sig_chains if c.end_time < time]
if old_chains:
return old_chains[-1]
else:
@@ -1024,9 +1025,9 @@
def sort_sets(self, setlist):
"""Return new list containing same elems of setlist, sorted by time"""
- pairs = map(lambda s: (s.get_time(), s), setlist)
+ pairs = [(s.get_time(), s) for s in setlist]
pairs.sort()
- return map(lambda p: p[1], pairs)
+ return [p[1] for p in pairs]
def get_chains_older_than(self, t):
"""
=== modified file 'duplicity/commandline.py'
--- duplicity/commandline.py 2014-04-19 19:54:54 +0000
+++ duplicity/commandline.py 2014-04-25 23:56:18 +0000
@@ -21,6 +21,8 @@
"""Parse command line, check for consistency, and set globals"""
+from future_builtins import filter
+
from copy import copy
import optparse
import os
=== modified file 'duplicity/diffdir.py'
--- duplicity/diffdir.py 2014-04-17 21:49:37 +0000
+++ duplicity/diffdir.py 2014-04-25 23:56:18 +0000
@@ -27,6 +27,8 @@
the second, the ROPath iterator is put into tar block form.
"""
+from future_builtins import map
+
import cStringIO, types, math
from duplicity import statistics
from duplicity import util
@@ -79,8 +81,8 @@
global stats
stats = statistics.StatsDeltaProcess()
if type(dirsig_fileobj_list) is types.ListType:
- sig_iter = combine_path_iters(map(sigtar2path_iter,
- dirsig_fileobj_list))
+ sig_iter = combine_path_iters([sigtar2path_iter(x) for x
+ in dirsig_fileobj_list])
else:
sig_iter = sigtar2path_iter(dirsig_fileobj_list)
delta_iter = get_delta_iter(path_iter, sig_iter)
@@ -342,8 +344,7 @@
else:
break # assumed triple_list sorted, so can exit now
- triple_list = filter(lambda x: x, map(get_triple,
- range(len(path_iter_list))))
+ triple_list = [x for x in map(get_triple, range(len(path_iter_list))) if x]
while triple_list:
triple_list.sort()
yield triple_list[0][2]
@@ -375,7 +376,7 @@
"""
Return path iter combining signatures in list of open sig files
"""
- return combine_path_iters(map(sigtar2path_iter, sig_infp_list))
+ return combine_path_iters([sigtar2path_iter(x) for x in sig_infp_list])
class FileWithReadCounter:
=== modified file 'duplicity/dup_time.py'
--- duplicity/dup_time.py 2014-04-17 21:46:00 +0000
+++ duplicity/dup_time.py 2014-04-25 23:56:18 +0000
@@ -21,6 +21,8 @@
"""Provide time related exceptions and functions"""
+from future_builtins import map
+
import time, types, re, calendar
from duplicity import globals
=== modified file 'duplicity/manifest.py'
--- duplicity/manifest.py 2013-12-27 06:39:00 +0000
+++ duplicity/manifest.py 2014-04-25 23:56:18 +0000
@@ -21,6 +21,8 @@
"""Create and edit manifest for session contents"""
+from future_builtins import filter
+
import re
from duplicity import log
=== modified file 'duplicity/patchdir.py'
--- duplicity/patchdir.py 2014-04-20 06:06:34 +0000
+++ duplicity/patchdir.py 2014-04-25 23:56:18 +0000
@@ -19,6 +19,8 @@
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+from future_builtins import filter, map
+
import re #@UnusedImport
import types
import os
@@ -517,11 +519,10 @@
the restrict_index.
"""
- diff_iters = map( difftar2path_iter, tarfile_list )
+ diff_iters = [difftar2path_iter(x) for x in tarfile_list]
if restrict_index:
# Apply filter before integration
- diff_iters = map( lambda i: filter_path_iter( i, restrict_index ),
- diff_iters )
+ diff_iters = [filter_path_iter(x, restrict_index) for x in diff_iters]
return integrate_patch_iters( diff_iters )
def Write_ROPaths( base_path, rop_iter ):
=== modified file 'duplicity/path.py'
--- duplicity/path.py 2014-04-17 20:50:57 +0000
+++ duplicity/path.py 2014-04-25 23:56:18 +0000
@@ -26,6 +26,8 @@
"""
+from future_builtins import filter
+
import stat, errno, socket, time, re, gzip
from duplicity import tarfile
=== modified file 'duplicity/selection.py'
--- duplicity/selection.py 2014-04-17 20:50:57 +0000
+++ duplicity/selection.py 2014-04-25 23:56:18 +0000
@@ -19,6 +19,8 @@
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+from future_builtins import filter, map
+
import os #@UnusedImport
import re #@UnusedImport
import stat #@UnusedImport
@@ -235,8 +237,8 @@
filelists[filelists_index], 0, arg))
filelists_index += 1
elif opt == "--exclude-globbing-filelist":
- map(self.add_selection_func,
- self.filelist_globbing_get_sfs(filelists[filelists_index], 0, arg))
+ for sf in self.filelist_globbing_get_sfs(filelists[filelists_index], 0, arg):
+ self.add_selection_func(sf)
filelists_index += 1
elif opt == "--exclude-other-filesystems":
self.add_selection_func(self.other_filesystems_get_sf(0))
@@ -249,8 +251,8 @@
filelists[filelists_index], 1, arg))
filelists_index += 1
elif opt == "--include-globbing-filelist":
- map(self.add_selection_func,
- self.filelist_globbing_get_sfs(filelists[filelists_index], 1, arg))
+ for sf in self.filelist_globbing_get_sfs(filelists[filelists_index], 1, arg):
+ self.add_selection_func(sf)
filelists_index += 1
elif opt == "--include-regexp":
self.add_selection_func(self.regexp_get_sf(arg, 1))
@@ -626,8 +628,7 @@
raise GlobbingError("Consecutive '/'s found in globbing string "
+ glob_str)
- prefixes = map(lambda i: "/".join(glob_parts[:i+1]),
- range(len(glob_parts)))
+ prefixes = ["/".join(glob_parts[:i+1]) for i in range(len(glob_parts))]
# we must make exception for root "/", only dir to end in slash
if prefixes[0] == "":
prefixes[0] = "/"
=== modified file 'duplicity/statistics.py'
--- duplicity/statistics.py 2014-04-17 21:46:00 +0000
+++ duplicity/statistics.py 2014-04-25 23:56:18 +0000
@@ -21,6 +21,8 @@
"""Generate and process backup statistics"""
+from future_builtins import map
+
import re, time, os
from duplicity import dup_time
@@ -99,8 +101,7 @@
def get_stats_line(self, index, use_repr = 1):
"""Return one line abbreviated version of full stats string"""
- file_attrs = map(lambda attr: str(self.get_stat(attr)),
- self.stat_file_attrs)
+ file_attrs = [str(self.get_stat(a)) for a in self.stat_file_attrs]
if not index:
filename = "."
else:
=== modified file 'duplicity/util.py'
--- duplicity/util.py 2014-04-20 06:06:34 +0000
+++ duplicity/util.py 2014-04-25 23:56:18 +0000
@@ -23,6 +23,8 @@
Miscellaneous utilities.
"""
+from future_builtins import map
+
import errno
import os
import sys
=== modified file 'testing/functional/__init__.py'
--- testing/functional/__init__.py 2014-04-20 05:58:47 +0000
+++ testing/functional/__init__.py 2014-04-25 23:56:18 +0000
@@ -18,6 +18,8 @@
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+from future_builtins import map
+
import os
import pexpect
import time
=== modified file 'testing/test_code.py'
--- testing/test_code.py 2014-04-20 14:02:34 +0000
+++ testing/test_code.py 2014-04-25 23:56:18 +0000
@@ -38,22 +38,20 @@
def test_2to3(self):
# As we modernize the source code, we can remove more and more nofixes
self.run_checker(["2to3",
- "--nofix=dict",
- "--nofix=filter",
- "--nofix=map",
"--nofix=next",
- "--nofix=print",
"--nofix=types",
"--nofix=unicode",
- "--nofix=xrange",
# The following fixes we don't want to remove, since they are false
# positives, things we don't care about, or real incompatibilities
# but which 2to3 can fix for us better automatically.
"--nofix=callable",
+ "--nofix=dict",
"--nofix=future",
"--nofix=imports",
+ "--nofix=print",
"--nofix=raw_input",
"--nofix=urllib",
+ "--nofix=xrange",
_top_dir])
def test_pylint(self):
=== modified file 'testing/unit/test_collections.py'
--- testing/unit/test_collections.py 2014-04-20 05:58:47 +0000
+++ testing/unit/test_collections.py 2014-04-25 23:56:18 +0000
@@ -219,16 +219,16 @@
cs = self.get_filelist2_cs()
oldsets = cs.get_older_than(
dup_time.genstrtotime("2002-05-01T16:17:01-07:00"))
- oldset_times = map(lambda s: s.get_time(), oldsets)
- right_times = map(dup_time.genstrtotime, ['2001-01-01T16:17:01-07:00'])
+ oldset_times = [s.get_time() for s in oldsets]
+ right_times = [dup_time.genstrtotime('2001-01-01T16:17:01-07:00')]
assert oldset_times == right_times, \
[oldset_times, right_times]
oldsets_required = cs.get_older_than_required(
dup_time.genstrtotime("2002-08-17T20:00:00-07:00"))
- oldset_times = map(lambda s: s.get_time(), oldsets_required)
- right_times_required = map(dup_time.genstrtotime,
- ['2002-08-17T16:17:01-07:00'])
+ oldset_times = [s.get_time() for s in oldsets_required]
+ right_times_required = [dup_time.genstrtotime(
+ '2002-08-17T16:17:01-07:00')]
assert oldset_times == right_times_required, \
[oldset_times, right_times_required]
=== modified file 'testing/unit/test_diffdir.py'
--- testing/unit/test_diffdir.py 2014-04-20 05:58:47 +0000
+++ testing/unit/test_diffdir.py 2014-04-25 23:56:18 +0000
@@ -189,9 +189,10 @@
delta1)
# Write new signature and delta to deltadir2 and sigdir2, compare
- block_iter = diffdir.DirDelta_WriteSig(get_sel(cur_dir),
- map(lambda p: p.open("rb"), sigstack),
- incsig.open("wb"))
+ block_iter = diffdir.DirDelta_WriteSig(
+ get_sel(cur_dir),
+ [p.open("rb") for p in sigstack],
+ incsig.open("wb"))
sigstack.append(incsig)
diffdir.write_block_iter(block_iter, delta2)
=== modified file 'testing/unit/test_patchdir.py'
--- testing/unit/test_patchdir.py 2014-04-20 05:58:47 +0000
+++ testing/unit/test_patchdir.py 2014-04-25 23:56:18 +0000
@@ -19,6 +19,8 @@
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+from future_builtins import map
+
import sys, cStringIO, unittest
from duplicity import diffdir
@@ -135,12 +137,12 @@
def test_collate(self):
"""Test collate_iters function"""
- indicies = map(index, [0,1,2,3])
+ indicies = [index(i) for i in [0,1,2,3]]
helper = lambda i: indicies[i]
makeiter1 = lambda: iter(indicies)
- makeiter2 = lambda: iter(map(helper, [0,1,3]))
- makeiter3 = lambda: iter(map(helper, [1,2]))
+ makeiter2 = lambda: map(helper, [0,1,3])
+ makeiter3 = lambda: map(helper, [1,2])
outiter = patchdir.collate_iters([makeiter1(), makeiter2()])
assert Iter.equal(outiter,
@@ -158,8 +160,8 @@
(indicies[3], indicies[3], None)]), 1)
assert Iter.equal(patchdir.collate_iters([makeiter1(), iter([])]),
- iter(map(lambda i: (i, None), indicies)))
- assert Iter.equal(iter(map(lambda i: (i, None), indicies)),
+ map(lambda i: (i, None), indicies))
+ assert Iter.equal(map(lambda i: (i, None), indicies),
patchdir.collate_iters([makeiter1(), iter([])]))
def test_tuple(self):
Follow ups