duplicity-team team mailing list archive
-
duplicity-team team
-
Mailing list archive
-
Message #04141
[Merge] lp:~benoit.bertholon/duplicity/duplicity into lp:duplicity
Benoit Bertholon has proposed merging lp:~benoit.bertholon/duplicity/duplicity into lp:duplicity.
Requested reviews:
duplicity-team (duplicity-team)
Related bugs:
Bug #1666194 in Duplicity: "ProcessCommandLine function called twice fail and arglist argument not used"
https://bugs.launchpad.net/duplicity/+bug/1666194
For more details, see:
https://code.launchpad.net/~benoit.bertholon/duplicity/duplicity/+merge/318382
The issue with the bug #1666194 (bug minor) is that the globals.archive_dir variable is set to a "string" at the begining of the execution and then during execution it becomes a "Path". So the type of the variable changes overtime. To fix that I used a second variable archive_dir_path to store the path.
This time I run the test with tox
commit message:
uses the globals.archive_dir variable to store only a string
in the case of a path, uses globals.archive_dir_path
--
Your team duplicity-team is requested to review the proposed merge of lp:~benoit.bertholon/duplicity/duplicity into lp:duplicity.
=== modified file 'bin/duplicity'
--- bin/duplicity 2017-02-20 00:05:17 +0000
+++ bin/duplicity 2017-02-27 16:43:17 +0000
@@ -481,7 +481,7 @@
"""
Return a fileobj opened for writing, save results as manifest
- Save manifest in globals.archive_dir gzipped.
+ Save manifest in globals.archive_dir_path gzipped.
Save them on the backend encrypted as needed.
@type man_type: string
@@ -501,7 +501,7 @@
manifest=True,
encrypted=globals.encryption)
- fh = dup_temp.get_fileobj_duppath(globals.archive_dir,
+ fh = dup_temp.get_fileobj_duppath(globals.archive_dir_path,
part_man_filename,
perm_man_filename,
remote_man_filename)
@@ -531,7 +531,7 @@
remote_sig_filename = file_naming.get(sig_type, encrypted=globals.encryption,
gzipped=globals.compression)
- fh = dup_temp.get_fileobj_duppath(globals.archive_dir,
+ fh = dup_temp.get_fileobj_duppath(globals.archive_dir_path,
part_sig_filename,
perm_sig_filename,
remote_sig_filename,
@@ -541,7 +541,7 @@
def full_backup(col_stats):
"""
- Do full backup of directory to backend, using archive_dir
+ Do full backup of directory to backend, using archive_dir_path
@type col_stats: CollectionStatus object
@param col_stats: collection status
@@ -630,7 +630,7 @@
def incremental_backup(sig_chain):
"""
- Do incremental backup of directory to backend, using archive_dir
+ Do incremental backup of directory to backend, using archive_dir_path
@rtype: void
@return: void
@@ -910,7 +910,7 @@
col_stats.backend.delete(ext_remote)
for fn in ext_local:
try:
- globals.archive_dir.append(fn).delete()
+ globals.archive_dir_path.append(fn).delete()
except Exception:
pass
else:
@@ -1079,7 +1079,7 @@
return (pr, loc_name, fn)
def remove_local(fn):
- del_name = globals.archive_dir.append(fn).name
+ del_name = globals.archive_dir_path.append(fn).name
log.Notice(_("Deleting local %s (not authoritative at backend).") %
util.ufn(del_name))
@@ -1143,14 +1143,14 @@
else:
gpg.GzipWriteFile(src_iter, tdp.name, size=sys.maxsize)
tdp.setdata()
- tdp.move(globals.archive_dir.append(loc_name))
+ tdp.move(globals.archive_dir_path.append(loc_name))
# get remote metafile list
remlist = globals.backend.list()
remote_metafiles, ignored, rem_needpass = get_metafiles(remlist)
# get local metafile list
- loclist = globals.archive_dir.listdir()
+ loclist = globals.archive_dir_path.listdir()
local_metafiles, local_partials, loc_needpass = get_metafiles(loclist)
# we have the list of metafiles on both sides. remote is always
@@ -1374,13 +1374,13 @@
# determine what action we're performing and process command line
action = commandline.ProcessCommandLine(sys.argv[1:])
- globals.lockfile = FileLock(os.path.join(globals.archive_dir.name, "lockfile"), threaded=False)
+ globals.lockfile = FileLock(os.path.join(globals.archive_dir_path.name, "lockfile"), threaded=False)
if globals.lockfile.is_locked():
log.FatalError(
"Another instance is already running with this archive directory\n"
"If you are sure that this is the only instance running you may delete\n"
"the following lockfile and run the command again :\n"
- "\t%s" % os.path.join(globals.archive_dir.name, "lockfile.lock"),
+ "\t%s" % os.path.join(globals.archive_dir_path.name, "lockfile.lock"),
log.ErrorCode.user_error)
log.shutdown()
sys.exit(2)
@@ -1413,7 +1413,7 @@
# get current collection status
col_stats = collections.CollectionsStatus(globals.backend,
- globals.archive_dir).set_values()
+ globals.archive_dir_path).set_values()
while True:
# if we have to clean up the last partial, then col_stats are invalidated
@@ -1443,7 +1443,7 @@
log.Notice(_("Cleaning up previous partial %s backup set, restarting." % action))
last_backup.delete()
col_stats = collections.CollectionsStatus(globals.backend,
- globals.archive_dir).set_values()
+ globals.archive_dir_path).set_values()
continue
break
break
=== modified file 'duplicity/collections.py'
--- duplicity/collections.py 2017-01-19 00:32:12 +0000
+++ duplicity/collections.py 2017-02-27 16:43:17 +0000
@@ -142,14 +142,14 @@
remote_filename)
self.remote_manifest_name = remote_filename
- for local_filename in globals.archive_dir.listdir():
+ for local_filename in globals.archive_dir_path.listdir():
pr = file_naming.parse(local_filename)
if (pr and pr.manifest and pr.type == self.type and
pr.time == self.time and
pr.start_time == self.start_time and
pr.end_time == self.end_time):
self.local_manifest_path = \
- globals.archive_dir.append(local_filename)
+ globals.archive_dir_path.append(local_filename)
self.set_files_changed()
break
@@ -165,13 +165,13 @@
except Exception:
log.Debug(_("BackupSet.delete: missing %s") % [util.ufn(f) for f in rfn])
pass
- for lfn in globals.archive_dir.listdir():
+ for lfn in globals.archive_dir_path.listdir():
pr = file_naming.parse(lfn)
if (pr and pr.time == self.time and
pr.start_time == self.start_time and
pr.end_time == self.end_time):
try:
- globals.archive_dir.append(lfn).delete()
+ globals.archive_dir_path.append(lfn).delete()
except Exception:
log.Debug(_("BackupSet.delete: missing %s") % [util.ufn(f) for f in lfn])
pass
@@ -459,19 +459,19 @@
Return new SignatureChain.
local should be true iff the signature chain resides in
- globals.archive_dir and false if the chain is in
+ globals.archive_dir_path and false if the chain is in
globals.backend.
- @param local: True if sig chain in globals.archive_dir
+ @param local: True if sig chain in globals.archive_dir_path
@type local: Boolean
@param location: Where the sig chain is located
- @type location: globals.archive_dir or globals.backend
+ @type location: globals.archive_dir_path or globals.backend
"""
if local:
- self.archive_dir, self.backend = location, None
+ self.archive_dir_path, self.backend = location, None
else:
- self.archive_dir, self.backend = None, location
+ self.archive_dir_path, self.backend = None, location
self.fullsig = None # filename of full signature
self.inclist = [] # list of filenames of incremental signatures
self.start_time, self.end_time = None, None
@@ -480,7 +480,7 @@
"""
Local or Remote and List of files in the set
"""
- if self.archive_dir:
+ if self.archive_dir_path:
place = _("local")
else:
place = _("remote")
@@ -502,7 +502,7 @@
"""
Return true if represents a signature chain in archive_dir
"""
- if self.archive_dir:
+ if self.archive_dir_path:
return True
else:
return False
@@ -539,10 +539,10 @@
optionally at a certain time
"""
assert self.fullsig
- if self.archive_dir: # local
+ if self.archive_dir_path: # local
def filename_to_fileobj(filename):
- """Open filename in archive_dir, return filtered fileobj"""
- sig_dp = path.DupPath(self.archive_dir.name, (filename,))
+ """Open filename in archive_dir_path, return filtered fileobj"""
+ sig_dp = path.DupPath(self.archive_dir_path.name, (filename,))
return sig_dp.filtered_open("rb")
else:
filename_to_fileobj = self.backend.get_fileobj_read
@@ -553,11 +553,11 @@
Remove all files in signature set
"""
# Try to delete in opposite order, so something useful even if aborted
- if self.archive_dir:
+ if self.archive_dir_path:
for i in range(len(self.inclist) - 1, -1, -1):
- self.archive_dir.append(self.inclist[i]).delete()
+ self.archive_dir_path.append(self.inclist[i]).delete()
if not keep_full:
- self.archive_dir.append(self.fullsig).delete()
+ self.archive_dir_path.append(self.fullsig).delete()
else:
assert self.backend
inclist_copy = self.inclist[:]
@@ -588,12 +588,12 @@
"""
Hold information about available chains and sets
"""
- def __init__(self, backend, archive_dir):
+ def __init__(self, backend, archive_dir_path):
"""
Make new object. Does not set values
"""
self.backend = backend
- self.archive_dir = archive_dir
+ self.archive_dir_path = archive_dir_path
# Will hold (signature chain, backup chain) pair of active
# (most recent) chains
@@ -618,7 +618,7 @@
Return summary of the collection, suitable for printing to log
"""
l = ["backend %s" % (self.backend.__class__.__name__,),
- "archive-dir %s" % (self.archive_dir,)]
+ "archive-dir %s" % (self.archive_dir_path,)]
for i in range(len(self.other_backup_chains)):
# A bit of a misnomer. Chain might have a sig.
@@ -642,7 +642,7 @@
u"-----------------",
_("Connecting with backend: %s") %
(self.backend.__class__.__name__,),
- _("Archive dir: %s") % (util.ufn(self.archive_dir.name),)]
+ _("Archive dir: %s") % (util.ufn(self.archive_dir_path.name),)]
l.append("\n" +
ngettext("Found %d secondary backup chain.",
@@ -681,7 +681,7 @@
def set_values(self, sig_chain_warning=1):
"""
- Set values from archive_dir and backend.
+ Set values from archive_dir_path and backend.
Returns self for convenience. If sig_chain_warning is set to None,
do not warn about unnecessary sig chains. This is because there may
@@ -697,7 +697,7 @@
len(backend_filename_list))
# get local filename list
- local_filename_list = self.archive_dir.listdir()
+ local_filename_list = self.archive_dir_path.listdir()
log.Debug(ngettext("%d file exists in cache",
"%d files exist in cache",
len(local_filename_list)) %
@@ -884,7 +884,7 @@
def get_signature_chains(self, local, filelist=None):
"""
- Find chains in archive_dir (if local is true) or backend
+ Find chains in archive_dir_path (if local is true) or backend
Use filelist if given, otherwise regenerate. Return value is
pair (list of chains, list of signature paths not in any
@@ -894,7 +894,7 @@
if filelist is not None:
return filelist
elif local:
- return self.archive_dir.listdir()
+ return self.archive_dir_path.listdir()
else:
return self.backend.list()
@@ -903,7 +903,7 @@
Return new empty signature chain
"""
if local:
- return SignatureChain(True, self.archive_dir)
+ return SignatureChain(True, self.archive_dir_path)
else:
return SignatureChain(False, self.backend)
=== modified file 'duplicity/commandline.py'
--- duplicity/commandline.py 2017-02-21 21:29:03 +0000
+++ duplicity/commandline.py 2017-02-27 16:43:17 +0000
@@ -735,7 +735,7 @@
set_archive_dir(expand_archive_dir(globals.archive_dir,
globals.backup_name))
- log.Info(_("Using archive dir: %s") % (util.ufn(globals.archive_dir.name),))
+ log.Info(_("Using archive dir: %s") % (util.ufn(globals.archive_dir_path.name),))
log.Info(_("Using backup name: %s") % (globals.backup_name,))
return args
@@ -956,12 +956,12 @@
os.makedirs(dirstring)
except Exception:
pass
- archive_dir = path.Path(dirstring)
- if not archive_dir.isdir():
+ archive_dir_path = path.Path(dirstring)
+ if not archive_dir_path.isdir():
log.FatalError(_("Specified archive directory '%s' does not exist, "
- "or is not a directory") % (util.ufn(archive_dir.name),),
+ "or is not a directory") % (util.ufn(archive_dir_path.name),),
log.ErrorCode.bad_archive_dir)
- globals.archive_dir = archive_dir
+ globals.archive_dir_path = archive_dir_path
def set_sign_key(sign_key):
=== modified file 'duplicity/globals.py'
--- duplicity/globals.py 2017-02-21 21:29:03 +0000
+++ duplicity/globals.py 2017-02-27 16:43:17 +0000
@@ -59,6 +59,7 @@
# NOTE: this gets expanded in duplicity.commandline
os.environ["XDG_CACHE_HOME"] = os.getenv("XDG_CACHE_HOME", os.path.expanduser("~/.cache"))
archive_dir = os.path.expandvars("$XDG_CACHE_HOME/duplicity")
+archive_dir_path = None
# config dir for future use
os.environ["XDG_CONFIG_HOME"] = os.getenv("XDG_CONFIG_HOME", os.path.expanduser("~/.config"))
=== modified file 'duplicity/progress.py'
--- duplicity/progress.py 2016-06-28 21:03:46 +0000
+++ duplicity/progress.py 2017-02-27 16:43:17 +0000
@@ -63,7 +63,7 @@
# If restarting Full, discard marshalled data and start over
if globals.restart is not None and globals.restart.start_vol >= 1:
try:
- progressfd = open('%s/progress' % globals.archive_dir.name, 'r')
+ progressfd = open('%s/progress' % globals.archive_dir_path.name, 'r')
snapshot = pickle.load(progressfd)
progressfd.close()
except:
@@ -77,7 +77,7 @@
"""
Serializes object to cache
"""
- progressfd = open('%s/progress' % globals.archive_dir.name, 'w+')
+ progressfd = open('%s/progress' % globals.archive_dir_path.name, 'w+')
pickle.dump(self, progressfd)
progressfd.close()
=== modified file 'testing/unit/test_collections.py'
--- testing/unit/test_collections.py 2017-01-18 22:59:29 +0000
+++ testing/unit/test_collections.py 2017-02-27 16:43:17 +0000
@@ -81,8 +81,8 @@
self.unpack_testfiles()
col_test_dir = path.Path("testfiles/collectionstest")
- archive_dir = col_test_dir.append("archive_dir")
- self.set_global('archive_dir', archive_dir)
+ archive_dir_path = col_test_dir.append("archive_dir")
+ self.set_global('archive_dir_path', archive_dir_path)
self.archive_dir_backend = backend.get_backend("file://testfiles/collectionstest"
"/archive_dir")
@@ -98,7 +98,7 @@
def test_backup_chains(self):
"""Test basic backup chain construction"""
random.shuffle(filename_list1)
- cs = collections.CollectionsStatus(None, globals.archive_dir)
+ cs = collections.CollectionsStatus(None, globals.archive_dir_path)
chains, orphaned, incomplete = cs.get_backup_chains(filename_list1) # @UnusedVariable
if len(chains) != 1 or len(orphaned) != 0:
print(chains)
@@ -119,26 +119,26 @@
assert cs.matched_chain_pair[0].end_time == 1029826800
assert len(cs.all_backup_chains) == 1, cs.all_backup_chains
- cs = collections.CollectionsStatus(self.real_backend, globals.archive_dir).set_values()
+ cs = collections.CollectionsStatus(self.real_backend, globals.archive_dir_path).set_values()
check_cs(cs)
assert cs.matched_chain_pair[0].islocal()
def test_sig_chain(self):
"""Test a single signature chain"""
- chain = collections.SignatureChain(1, globals.archive_dir)
+ chain = collections.SignatureChain(1, globals.archive_dir_path)
for filename in local_sigchain_filename_list:
assert chain.add_filename(filename)
assert not chain.add_filename("duplicity-new-signatures.2002-08-18T00:04:30-07:00.to.2002-08-20T00:00:00-07:00.sigtar.gpg")
def test_sig_chains(self):
"""Test making signature chains from filename list"""
- cs = collections.CollectionsStatus(None, globals.archive_dir)
+ cs = collections.CollectionsStatus(None, globals.archive_dir_path)
chains, orphaned_paths = cs.get_signature_chains(local=1)
self.sig_chains_helper(chains, orphaned_paths)
def test_sig_chains2(self):
"""Test making signature chains from filename list on backend"""
- cs = collections.CollectionsStatus(self.archive_dir_backend, globals.archive_dir)
+ cs = collections.CollectionsStatus(self.archive_dir_backend, globals.archive_dir_path)
chains, orphaned_paths = cs.get_signature_chains(local=None)
self.sig_chains_helper(chains, orphaned_paths)
@@ -154,7 +154,7 @@
def sigchain_fileobj_get(self, local):
"""Return chain, local if local is true with filenames added"""
if local:
- chain = collections.SignatureChain(1, globals.archive_dir)
+ chain = collections.SignatureChain(1, globals.archive_dir_path)
for filename in local_sigchain_filename_list:
assert chain.add_filename(filename)
else:
@@ -183,7 +183,7 @@
test_fileobj(2, "Hello 2")
def test_sigchain_fileobj(self):
- """Test getting signature chain fileobjs from archive_dir"""
+ """Test getting signature chain fileobjs from archive_dir_path"""
self.set_gpg_profile()
self.sigchain_fileobj_check_list(self.sigchain_fileobj_get(1))
self.sigchain_fileobj_check_list(self.sigchain_fileobj_get(None))
@@ -195,7 +195,7 @@
p = self.output_dir.append(filename)
p.touch()
- cs = collections.CollectionsStatus(self.output_dir_backend, globals.archive_dir)
+ cs = collections.CollectionsStatus(self.output_dir_backend, globals.archive_dir_path)
cs.set_values()
return cs
Follow ups