Merge lp:~louis/duplicity/add-allow-concurrency into lp:duplicity/0.6

Proposed by Louis Bouchard
Status: Merged
Merged at revision: 953
Proposed branch: lp:~louis/duplicity/add-allow-concurrency
Merge into: lp:duplicity/0.6
Diff against target: 180 lines (+52/-1)
5 files modified
bin/duplicity (+25/-1)
duplicity/collections.py (+3/-0)
duplicity/commandline.py (+4/-0)
duplicity/globals.py (+9/-0)
duplicity/util.py (+11/-0)
To merge this branch: bzr merge lp:~louis/duplicity/add-allow-concurrency
Reviewer Review Type Date Requested Status
edso Approve
Review via email: mp+202134@code.launchpad.net

Description of the change

Implement locking mechanism to avoid concurrent execution under the same cache directory. This is the default behavior.

Also implement --alllow-concurrency option to disable the locking if required.

This functionality adds a dependency to python-lockfile

To post a comment you must log in.
Revision history for this message
edso (ed.so) wrote :

nice!

what's missing is a proper manpage entry though.. would you mind adding it around
http://bazaar.launchpad.net/~duplicity-team/duplicity/0.6-series/view/head:/bin/duplicity.1#L341
before --allow-source-mismatch ?

possibly in a new branch as ken merged this one already.

..ede/duply.net

review: Approve

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'bin/duplicity'
2--- bin/duplicity 2013-12-27 06:39:00 +0000
3+++ bin/duplicity 2014-01-17 16:49:58 +0000
4@@ -31,6 +31,7 @@
5 import traceback, platform, statvfs, resource, re
6 import threading
7 from datetime import datetime
8+from lockfile import FileLock
9
10 pwd = os.path.abspath(os.path.dirname(sys.argv[0]))
11 if os.path.exists(os.path.join(pwd, "../duplicity")):
12@@ -1315,6 +1316,24 @@
13 # determine what action we're performing and process command line
14 action = commandline.ProcessCommandLine(sys.argv[1:])
15
16+ globals.lockfile = None
17+
18+ if not globals.allow_concurrency:
19+ globals.lockfile = FileLock(os.path.join(globals.archive_dir.name, "lockfile"))
20+ if globals.lockfile.is_locked():
21+ log.FatalError("Another instance is already running with this archive directory", log.ErrorCode.user_error)
22+ log.shutdown()
23+ sys.exit(2)
24+
25+ globals.lockfile.acquire(timeout = 0)
26+
27+ try:
28+ do_backup(action)
29+
30+ finally:
31+ util.release_lockfile()
32+
33+def do_backup(action):
34 # The following is for starting remote debugging in Eclipse with Pydev.
35 # Adjust the path to your location and version of Eclipse and Pydev.
36 if globals.pydevd:
37@@ -1464,7 +1483,6 @@
38 finally:
39 tempdir.default().cleanup()
40
41-
42 if __name__ == "__main__":
43 try:
44 with_tempdir(main)
45@@ -1476,16 +1494,19 @@
46 # goes here, if needed.
47 except SystemExit, e:
48 # No traceback, just get out
49+ util.release_lockfile()
50 sys.exit(e)
51
52 except KeyboardInterrupt, e:
53 # No traceback, just get out
54 log.Info(_("INT intercepted...exiting."))
55+ util.release_lockfile()
56 sys.exit(4)
57
58 except gpg.GPGError, e:
59 # For gpg errors, don't show an ugly stack trace by
60 # default. But do with sufficient verbosity.
61+ util.release_lockfile()
62 log.Info(_("GPG error detail: %s")
63 % (u''.join(traceback.format_exception(*sys.exc_info()))))
64 log.FatalError(u"%s: %s" % (e.__class__.__name__, e.args[0]),
65@@ -1493,6 +1514,7 @@
66 e.__class__.__name__)
67
68 except duplicity.errors.UserError, e:
69+ util.release_lockfile()
70 # For user errors, don't show an ugly stack trace by
71 # default. But do with sufficient verbosity.
72 log.Info(_("User error detail: %s")
73@@ -1502,6 +1524,7 @@
74 e.__class__.__name__)
75
76 except duplicity.errors.BackendException, e:
77+ util.release_lockfile()
78 # For backend errors, don't show an ugly stack trace by
79 # default. But do with sufficient verbosity.
80 log.Info(_("Backend error detail: %s")
81@@ -1511,6 +1534,7 @@
82 e.__class__.__name__)
83
84 except Exception, e:
85+ util.release_lockfile()
86 if "Forced assertion for testing" in str(e):
87 log.FatalError(u"%s: %s" % (e.__class__.__name__, unicode(e)),
88 log.ErrorCode.exception,
89
90=== modified file 'duplicity/collections.py'
91--- duplicity/collections.py 2013-12-27 06:39:00 +0000
92+++ duplicity/collections.py 2014-01-17 16:49:58 +0000
93@@ -24,9 +24,11 @@
94 import types
95 import gettext
96
97+
98 from duplicity import log
99 from duplicity import file_naming
100 from duplicity import path
101+from duplicity import util
102 from duplicity import dup_time
103 from duplicity import globals
104 from duplicity import manifest
105@@ -158,6 +160,7 @@
106 except Exception:
107 log.Debug(_("BackupSet.delete: missing %s") % map(util.ufn, lfn))
108 pass
109+ util.release_lockfile()
110
111 def __unicode__(self):
112 """
113
114=== modified file 'duplicity/commandline.py'
115--- duplicity/commandline.py 2013-12-27 16:45:37 +0000
116+++ duplicity/commandline.py 2014-01-17 16:49:58 +0000
117@@ -246,6 +246,10 @@
118
119 parser = OPHelpFix(option_class = DupOption, usage = usage())
120
121+ # If set, more than one instance may run with the given cache dir at the same time
122+ parser.add_option("--allow-concurrency", action="store_true")
123+
124+
125 # If this is true, only warn and don't raise fatal error when backup
126 # source directory doesn't match previous backup source directory.
127 parser.add_option("--allow-source-mismatch", action = "store_true")
128
129=== modified file 'duplicity/globals.py'
130--- duplicity/globals.py 2013-11-24 16:49:57 +0000
131+++ duplicity/globals.py 2014-01-17 16:49:58 +0000
132@@ -95,6 +95,12 @@
133 # windows machines.
134 time_separator = ":"
135
136+# Allow only one concurrent instance runnning with the same cache directory
137+allow_concurrency = False
138+
139+# Global lockfile used to manage concurrency
140+lockfile = None
141+
142 # If this is true, only warn and don't raise fatal error when backup
143 # source directory doesn't match previous backup source directory.
144 allow_source_mismatch = None
145@@ -250,6 +256,9 @@
146 # Renames (--rename)
147 rename = {}
148
149+# Allow only one concurrent instance runnning with the same cache directory
150+allow_concurrency = False
151+
152 # enable data comparison on verify runs
153 compare_data = False
154
155
156=== modified file 'duplicity/util.py'
157--- duplicity/util.py 2013-12-27 06:39:00 +0000
158+++ duplicity/util.py 2014-01-17 16:49:58 +0000
159@@ -29,6 +29,8 @@
160 import string
161 import traceback
162
163+from lockfile import FileLock, UnlockError
164+
165 from duplicity import tarfile
166
167 import duplicity.globals as globals
168@@ -134,3 +136,12 @@
169 pass
170 else:
171 raise
172+
173+def release_lockfile():
174+ if globals.lockfile and globals.lockfile.is_locked():
175+ log.Debug(_("Releasing lockfile %s") % globals.lockfile )
176+ try:
177+ globals.lockfile.release()
178+ except UnlockError:
179+ pass
180+

Subscribers

People subscribed via source and target branches