duplicity-team team mailing list archive
-
duplicity-team team
-
Mailing list archive
-
Message #01897
[Merge] lp:~louis-bouchard/duplicity/add-allow-concurrency into lp:duplicity
Louis Bouchard has proposed merging lp:~louis-bouchard/duplicity/add-allow-concurrency into lp:duplicity.
Requested reviews:
duplicity-team (duplicity-team)
For more details, see:
https://code.launchpad.net/~louis-bouchard/duplicity/add-allow-concurrency/+merge/202134
Implement locking mechanism to avoid concurrent execution under the same cache directory. This is the default behavior.
Also implement --alllow-concurrency option to disable the locking if required.
This functionality adds a dependency to python-lockfile
--
https://code.launchpad.net/~louis-bouchard/duplicity/add-allow-concurrency/+merge/202134
Your team duplicity-team is requested to review the proposed merge of lp:~louis-bouchard/duplicity/add-allow-concurrency into lp:duplicity.
=== modified file 'bin/duplicity'
--- bin/duplicity 2013-12-27 06:39:00 +0000
+++ bin/duplicity 2014-01-17 16:49:58 +0000
@@ -31,6 +31,7 @@
import traceback, platform, statvfs, resource, re
import threading
from datetime import datetime
+from lockfile import FileLock
pwd = os.path.abspath(os.path.dirname(sys.argv[0]))
if os.path.exists(os.path.join(pwd, "../duplicity")):
@@ -1315,6 +1316,24 @@
# determine what action we're performing and process command line
action = commandline.ProcessCommandLine(sys.argv[1:])
+ globals.lockfile = None
+
+ if not globals.allow_concurrency:
+ globals.lockfile = FileLock(os.path.join(globals.archive_dir.name, "lockfile"))
+ if globals.lockfile.is_locked():
+ log.FatalError("Another instance is already running with this archive directory", log.ErrorCode.user_error)
+ log.shutdown()
+ sys.exit(2)
+
+ globals.lockfile.acquire(timeout = 0)
+
+ try:
+ do_backup(action)
+
+ finally:
+ util.release_lockfile()
+
+def do_backup(action):
# The following is for starting remote debugging in Eclipse with Pydev.
# Adjust the path to your location and version of Eclipse and Pydev.
if globals.pydevd:
@@ -1464,7 +1483,6 @@
finally:
tempdir.default().cleanup()
-
if __name__ == "__main__":
try:
with_tempdir(main)
@@ -1476,16 +1494,19 @@
# goes here, if needed.
except SystemExit, e:
# No traceback, just get out
+ util.release_lockfile()
sys.exit(e)
except KeyboardInterrupt, e:
# No traceback, just get out
log.Info(_("INT intercepted...exiting."))
+ util.release_lockfile()
sys.exit(4)
except gpg.GPGError, e:
# For gpg errors, don't show an ugly stack trace by
# default. But do with sufficient verbosity.
+ util.release_lockfile()
log.Info(_("GPG error detail: %s")
% (u''.join(traceback.format_exception(*sys.exc_info()))))
log.FatalError(u"%s: %s" % (e.__class__.__name__, e.args[0]),
@@ -1493,6 +1514,7 @@
e.__class__.__name__)
except duplicity.errors.UserError, e:
+ util.release_lockfile()
# For user errors, don't show an ugly stack trace by
# default. But do with sufficient verbosity.
log.Info(_("User error detail: %s")
@@ -1502,6 +1524,7 @@
e.__class__.__name__)
except duplicity.errors.BackendException, e:
+ util.release_lockfile()
# For backend errors, don't show an ugly stack trace by
# default. But do with sufficient verbosity.
log.Info(_("Backend error detail: %s")
@@ -1511,6 +1534,7 @@
e.__class__.__name__)
except Exception, e:
+ util.release_lockfile()
if "Forced assertion for testing" in str(e):
log.FatalError(u"%s: %s" % (e.__class__.__name__, unicode(e)),
log.ErrorCode.exception,
=== modified file 'duplicity/collections.py'
--- duplicity/collections.py 2013-12-27 06:39:00 +0000
+++ duplicity/collections.py 2014-01-17 16:49:58 +0000
@@ -24,9 +24,11 @@
import types
import gettext
+
from duplicity import log
from duplicity import file_naming
from duplicity import path
+from duplicity import util
from duplicity import dup_time
from duplicity import globals
from duplicity import manifest
@@ -158,6 +160,7 @@
except Exception:
log.Debug(_("BackupSet.delete: missing %s") % map(util.ufn, lfn))
pass
+ util.release_lockfile()
def __unicode__(self):
"""
=== modified file 'duplicity/commandline.py'
--- duplicity/commandline.py 2013-12-27 16:45:37 +0000
+++ duplicity/commandline.py 2014-01-17 16:49:58 +0000
@@ -246,6 +246,10 @@
parser = OPHelpFix(option_class = DupOption, usage = usage())
+ # If set, more than one instance may run with the given cache dir at the same time
+ parser.add_option("--allow-concurrency", action="store_true")
+
+
# If this is true, only warn and don't raise fatal error when backup
# source directory doesn't match previous backup source directory.
parser.add_option("--allow-source-mismatch", action = "store_true")
=== modified file 'duplicity/globals.py'
--- duplicity/globals.py 2013-11-24 16:49:57 +0000
+++ duplicity/globals.py 2014-01-17 16:49:58 +0000
@@ -95,6 +95,12 @@
# windows machines.
time_separator = ":"
+# Allow only one concurrent instance runnning with the same cache directory
+allow_concurrency = False
+
+# Global lockfile used to manage concurrency
+lockfile = None
+
# If this is true, only warn and don't raise fatal error when backup
# source directory doesn't match previous backup source directory.
allow_source_mismatch = None
@@ -250,6 +256,9 @@
# Renames (--rename)
rename = {}
+# Allow only one concurrent instance runnning with the same cache directory
+allow_concurrency = False
+
# enable data comparison on verify runs
compare_data = False
=== modified file 'duplicity/util.py'
--- duplicity/util.py 2013-12-27 06:39:00 +0000
+++ duplicity/util.py 2014-01-17 16:49:58 +0000
@@ -29,6 +29,8 @@
import string
import traceback
+from lockfile import FileLock, UnlockError
+
from duplicity import tarfile
import duplicity.globals as globals
@@ -134,3 +136,12 @@
pass
else:
raise
+
+def release_lockfile():
+ if globals.lockfile and globals.lockfile.is_locked():
+ log.Debug(_("Releasing lockfile %s") % globals.lockfile )
+ try:
+ globals.lockfile.release()
+ except UnlockError:
+ pass
+
Follow ups