Merge lp:~mterry/duplicity/2.6isms into lp:duplicity/0.6
- 2.6isms
- Merge into 0.6-series
Proposed by
Michael Terry
Status: | Merged |
---|---|
Merged at revision: | 975 |
Proposed branch: | lp:~mterry/duplicity/2.6isms |
Merge into: | lp:duplicity/0.6 |
Diff against target: |
1807 lines (+259/-196) 46 files modified
bin/duplicity (+7/-7) bin/rdiffdir (+1/-1) duplicity/backend.py (+4/-4) duplicity/backends/_boto_multi.py (+4/-4) duplicity/backends/_boto_single.py (+8/-8) duplicity/backends/_cf_cloudfiles.py (+13/-13) duplicity/backends/_cf_pyrax.py (+13/-13) duplicity/backends/_ssh_paramiko.py (+20/-20) duplicity/backends/botobackend.py (+2/-2) duplicity/backends/cfbackend.py (+2/-2) duplicity/backends/dpbxbackend.py (+6/-5) duplicity/backends/gdocsbackend.py (+7/-7) duplicity/backends/giobackend.py (+6/-6) duplicity/backends/imapbackend.py (+1/-1) duplicity/backends/localbackend.py (+6/-6) duplicity/backends/megabackend.py (+6/-6) duplicity/backends/sshbackend.py (+2/-2) duplicity/backends/swiftbackend.py (+17/-17) duplicity/backends/webdavbackend.py (+5/-5) duplicity/cached_ops.py (+1/-1) duplicity/collections.py (+3/-3) duplicity/commandline.py (+1/-1) duplicity/diffdir.py (+4/-4) duplicity/dup_temp.py (+3/-3) duplicity/dup_threading.py (+2/-2) duplicity/dup_time.py (+4/-4) duplicity/file_naming.py (+1/-1) duplicity/globals.py (+1/-1) duplicity/gpg.py (+1/-1) duplicity/gpginterface.py (+9/-9) duplicity/librsync.py (+7/-7) duplicity/patchdir.py (+1/-1) duplicity/path.py (+1/-1) duplicity/progress.py (+2/-2) duplicity/robust.py (+1/-1) duplicity/selection.py (+2/-2) duplicity/statistics.py (+3/-3) duplicity/tempdir.py (+1/-1) duplicity/util.py (+2/-2) testing/tests/test_badupload.py (+1/-1) testing/tests/test_collections.py (+4/-4) testing/tests/test_filenaming.py (+4/-4) testing/tests/test_lazy.py (+2/-1) testing/tests/test_patchdir.py (+6/-6) testing/tests/test_python3.py (+61/-0) testing/tests/test_restart.py (+1/-1) |
To merge this branch: | bzr merge lp:~mterry/duplicity/2.6isms |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
duplicity-team | Pending | ||
Review via email: mp+216404@code.launchpad.net |
Commit message
Description of the change
Here's a whole stack of minor syntax modernizations that will become necessary in python3. They all work in python2.6.
I've added a new test to keep us honest and prevent backsliding on these modernizations. It runs 2to3 and will fail the test if 2to3 finds anything that needs fixing (with a specific set of exceptions carved out).
This branch has most of the easy 2to3 fixes, the ones with obvious and safe syntax changes.
We could just let 2to3 do them for us, but ideally we use 2to3 as little as possible, since it doesn't always know how to solve a given problem. I will propose a branch later that actually does use 2to3 to generate python3 versions of duplicity if they are requested. But this is a first step to clean up the code base.
To post a comment you must log in.
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'bin/duplicity' | |||
2 | --- bin/duplicity 2014-04-16 02:43:43 +0000 | |||
3 | +++ bin/duplicity 2014-04-17 22:26:47 +0000 | |||
4 | @@ -1042,7 +1042,7 @@ | |||
5 | 1042 | log.Notice(_("Deleting local %s (not authoritative at backend).") % util.ufn(del_name)) | 1042 | log.Notice(_("Deleting local %s (not authoritative at backend).") % util.ufn(del_name)) |
6 | 1043 | try: | 1043 | try: |
7 | 1044 | util.ignore_missing(os.unlink, del_name) | 1044 | util.ignore_missing(os.unlink, del_name) |
9 | 1045 | except Exception, e: | 1045 | except Exception as e: |
10 | 1046 | log.Warn(_("Unable to delete %s: %s") % (util.ufn(del_name), str(e))) | 1046 | log.Warn(_("Unable to delete %s: %s") % (util.ufn(del_name), str(e))) |
11 | 1047 | 1047 | ||
12 | 1048 | def copy_to_local(fn): | 1048 | def copy_to_local(fn): |
13 | @@ -1505,18 +1505,18 @@ | |||
14 | 1505 | # sys.exit() function. Python handles this by | 1505 | # sys.exit() function. Python handles this by |
15 | 1506 | # raising the SystemExit exception. Cleanup code | 1506 | # raising the SystemExit exception. Cleanup code |
16 | 1507 | # goes here, if needed. | 1507 | # goes here, if needed. |
18 | 1508 | except SystemExit, e: | 1508 | except SystemExit as e: |
19 | 1509 | # No traceback, just get out | 1509 | # No traceback, just get out |
20 | 1510 | util.release_lockfile() | 1510 | util.release_lockfile() |
21 | 1511 | sys.exit(e) | 1511 | sys.exit(e) |
22 | 1512 | 1512 | ||
24 | 1513 | except KeyboardInterrupt, e: | 1513 | except KeyboardInterrupt as e: |
25 | 1514 | # No traceback, just get out | 1514 | # No traceback, just get out |
26 | 1515 | log.Info(_("INT intercepted...exiting.")) | 1515 | log.Info(_("INT intercepted...exiting.")) |
27 | 1516 | util.release_lockfile() | 1516 | util.release_lockfile() |
28 | 1517 | sys.exit(4) | 1517 | sys.exit(4) |
29 | 1518 | 1518 | ||
31 | 1519 | except gpg.GPGError, e: | 1519 | except gpg.GPGError as e: |
32 | 1520 | # For gpg errors, don't show an ugly stack trace by | 1520 | # For gpg errors, don't show an ugly stack trace by |
33 | 1521 | # default. But do with sufficient verbosity. | 1521 | # default. But do with sufficient verbosity. |
34 | 1522 | util.release_lockfile() | 1522 | util.release_lockfile() |
35 | @@ -1526,7 +1526,7 @@ | |||
36 | 1526 | log.ErrorCode.gpg_failed, | 1526 | log.ErrorCode.gpg_failed, |
37 | 1527 | e.__class__.__name__) | 1527 | e.__class__.__name__) |
38 | 1528 | 1528 | ||
40 | 1529 | except duplicity.errors.UserError, e: | 1529 | except duplicity.errors.UserError as e: |
41 | 1530 | util.release_lockfile() | 1530 | util.release_lockfile() |
42 | 1531 | # For user errors, don't show an ugly stack trace by | 1531 | # For user errors, don't show an ugly stack trace by |
43 | 1532 | # default. But do with sufficient verbosity. | 1532 | # default. But do with sufficient verbosity. |
44 | @@ -1536,7 +1536,7 @@ | |||
45 | 1536 | log.ErrorCode.user_error, | 1536 | log.ErrorCode.user_error, |
46 | 1537 | e.__class__.__name__) | 1537 | e.__class__.__name__) |
47 | 1538 | 1538 | ||
49 | 1539 | except duplicity.errors.BackendException, e: | 1539 | except duplicity.errors.BackendException as e: |
50 | 1540 | util.release_lockfile() | 1540 | util.release_lockfile() |
51 | 1541 | # For backend errors, don't show an ugly stack trace by | 1541 | # For backend errors, don't show an ugly stack trace by |
52 | 1542 | # default. But do with sufficient verbosity. | 1542 | # default. But do with sufficient verbosity. |
53 | @@ -1546,7 +1546,7 @@ | |||
54 | 1546 | log.ErrorCode.user_error, | 1546 | log.ErrorCode.user_error, |
55 | 1547 | e.__class__.__name__) | 1547 | e.__class__.__name__) |
56 | 1548 | 1548 | ||
58 | 1549 | except Exception, e: | 1549 | except Exception as e: |
59 | 1550 | util.release_lockfile() | 1550 | util.release_lockfile() |
60 | 1551 | if "Forced assertion for testing" in str(e): | 1551 | if "Forced assertion for testing" in str(e): |
61 | 1552 | log.FatalError(u"%s: %s" % (e.__class__.__name__, unicode(e)), | 1552 | log.FatalError(u"%s: %s" % (e.__class__.__name__, unicode(e)), |
62 | 1553 | 1553 | ||
63 | === modified file 'bin/rdiffdir' | |||
64 | --- bin/rdiffdir 2014-02-05 02:57:01 +0000 | |||
65 | +++ bin/rdiffdir 2014-04-17 22:26:47 +0000 | |||
66 | @@ -64,7 +64,7 @@ | |||
67 | 64 | "include-filelist-stdin", "include-globbing-filelist", | 64 | "include-filelist-stdin", "include-globbing-filelist", |
68 | 65 | "include-regexp=", "max-blocksize", "null-separator", | 65 | "include-regexp=", "max-blocksize", "null-separator", |
69 | 66 | "verbosity=", "write-sig-to="]) | 66 | "verbosity=", "write-sig-to="]) |
71 | 67 | except getopt.error, e: | 67 | except getopt.error as e: |
72 | 68 | command_line_error("Bad command line option: %s" % (str(e),)) | 68 | command_line_error("Bad command line option: %s" % (str(e),)) |
73 | 69 | 69 | ||
74 | 70 | for opt, arg in optlist: | 70 | for opt, arg in optlist: |
75 | 71 | 71 | ||
76 | === modified file 'duplicity/backend.py' | |||
77 | --- duplicity/backend.py 2014-04-17 19:02:22 +0000 | |||
78 | +++ duplicity/backend.py 2014-04-17 22:26:47 +0000 | |||
79 | @@ -306,7 +306,7 @@ | |||
80 | 306 | try: | 306 | try: |
81 | 307 | kwargs = {"raise_errors" : True} | 307 | kwargs = {"raise_errors" : True} |
82 | 308 | return fn(*args, **kwargs) | 308 | return fn(*args, **kwargs) |
84 | 309 | except Exception, e: | 309 | except Exception as e: |
85 | 310 | log.Warn(_("Attempt %s failed: %s: %s") | 310 | log.Warn(_("Attempt %s failed: %s: %s") |
86 | 311 | % (n, e.__class__.__name__, str(e))) | 311 | % (n, e.__class__.__name__, str(e))) |
87 | 312 | log.Debug(_("Backtrace of previous error: %s") | 312 | log.Debug(_("Backtrace of previous error: %s") |
88 | @@ -332,10 +332,10 @@ | |||
89 | 332 | try: | 332 | try: |
90 | 333 | self.retry_count = n | 333 | self.retry_count = n |
91 | 334 | return fn(self, *args) | 334 | return fn(self, *args) |
93 | 335 | except FatalBackendError, e: | 335 | except FatalBackendError as e: |
94 | 336 | # die on fatal errors | 336 | # die on fatal errors |
95 | 337 | raise e | 337 | raise e |
97 | 338 | except Exception, e: | 338 | except Exception as e: |
98 | 339 | # retry on anything else | 339 | # retry on anything else |
99 | 340 | log.Warn(_("Attempt %s failed. %s: %s") | 340 | log.Warn(_("Attempt %s failed. %s: %s") |
100 | 341 | % (n, e.__class__.__name__, str(e))) | 341 | % (n, e.__class__.__name__, str(e))) |
101 | @@ -345,7 +345,7 @@ | |||
102 | 345 | # final trial, die on exception | 345 | # final trial, die on exception |
103 | 346 | self.retry_count = n+1 | 346 | self.retry_count = n+1 |
104 | 347 | return fn(self, *args) | 347 | return fn(self, *args) |
106 | 348 | except Exception, e: | 348 | except Exception as e: |
107 | 349 | log.Debug(_("Backtrace of previous error: %s") | 349 | log.Debug(_("Backtrace of previous error: %s") |
108 | 350 | % exception_traceback()) | 350 | % exception_traceback()) |
109 | 351 | log.FatalError(_("Giving up after %s attempts. %s: %s") | 351 | log.FatalError(_("Giving up after %s attempts. %s: %s") |
110 | 352 | 352 | ||
111 | === modified file 'duplicity/backends/_boto_multi.py' | |||
112 | --- duplicity/backends/_boto_multi.py 2014-04-09 09:22:27 +0000 | |||
113 | +++ duplicity/backends/_boto_multi.py 2014-04-17 22:26:47 +0000 | |||
114 | @@ -33,8 +33,8 @@ | |||
115 | 33 | from duplicity.filechunkio import FileChunkIO | 33 | from duplicity.filechunkio import FileChunkIO |
116 | 34 | from duplicity import progress | 34 | from duplicity import progress |
117 | 35 | 35 | ||
120 | 36 | from _boto_single import BotoBackend as BotoSingleBackend | 36 | from ._boto_single import BotoBackend as BotoSingleBackend |
121 | 37 | from _boto_single import get_connection | 37 | from ._boto_single import get_connection |
122 | 38 | 38 | ||
123 | 39 | BOTO_MIN_VERSION = "2.1.1" | 39 | BOTO_MIN_VERSION = "2.1.1" |
124 | 40 | 40 | ||
125 | @@ -63,7 +63,7 @@ | |||
126 | 63 | try: | 63 | try: |
127 | 64 | args = self.queue.get(True, 1) | 64 | args = self.queue.get(True, 1) |
128 | 65 | progress.report_transfer(args[0], args[1]) | 65 | progress.report_transfer(args[0], args[1]) |
130 | 66 | except Queue.Empty, e: | 66 | except Queue.Empty as e: |
131 | 67 | pass | 67 | pass |
132 | 68 | 68 | ||
133 | 69 | 69 | ||
134 | @@ -210,7 +210,7 @@ | |||
135 | 210 | conn = None | 210 | conn = None |
136 | 211 | bucket = None | 211 | bucket = None |
137 | 212 | del conn | 212 | del conn |
139 | 213 | except Exception, e: | 213 | except Exception as e: |
140 | 214 | traceback.print_exc() | 214 | traceback.print_exc() |
141 | 215 | if num_retries: | 215 | if num_retries: |
142 | 216 | log.Debug("%s: Upload of chunk %d failed. Retrying %d more times..." % ( | 216 | log.Debug("%s: Upload of chunk %d failed. Retrying %d more times..." % ( |
143 | 217 | 217 | ||
144 | === modified file 'duplicity/backends/_boto_single.py' | |||
145 | --- duplicity/backends/_boto_single.py 2014-04-09 09:55:21 +0000 | |||
146 | +++ duplicity/backends/_boto_single.py 2014-04-17 22:26:47 +0000 | |||
147 | @@ -202,7 +202,7 @@ | |||
148 | 202 | try: | 202 | try: |
149 | 203 | try: | 203 | try: |
150 | 204 | self.bucket = self.conn.get_bucket(self.bucket_name, validate=True) | 204 | self.bucket = self.conn.get_bucket(self.bucket_name, validate=True) |
152 | 205 | except Exception, e: | 205 | except Exception as e: |
153 | 206 | if "NoSuchBucket" in str(e): | 206 | if "NoSuchBucket" in str(e): |
154 | 207 | if globals.s3_european_buckets: | 207 | if globals.s3_european_buckets: |
155 | 208 | self.bucket = self.conn.create_bucket(self.bucket_name, | 208 | self.bucket = self.conn.create_bucket(self.bucket_name, |
156 | @@ -211,7 +211,7 @@ | |||
157 | 211 | self.bucket = self.conn.create_bucket(self.bucket_name) | 211 | self.bucket = self.conn.create_bucket(self.bucket_name) |
158 | 212 | else: | 212 | else: |
159 | 213 | raise e | 213 | raise e |
161 | 214 | except Exception, e: | 214 | except Exception as e: |
162 | 215 | log.Warn("Failed to create bucket (attempt #%d) '%s' failed (reason: %s: %s)" | 215 | log.Warn("Failed to create bucket (attempt #%d) '%s' failed (reason: %s: %s)" |
163 | 216 | "" % (n, self.bucket_name, | 216 | "" % (n, self.bucket_name, |
164 | 217 | e.__class__.__name__, | 217 | e.__class__.__name__, |
165 | @@ -252,7 +252,7 @@ | |||
166 | 252 | self.resetConnection() | 252 | self.resetConnection() |
167 | 253 | log.Debug("Uploaded %s/%s to %s Storage at roughly %f bytes/second" % (self.straight_url, remote_filename, storage_class, rough_upload_speed)) | 253 | log.Debug("Uploaded %s/%s to %s Storage at roughly %f bytes/second" % (self.straight_url, remote_filename, storage_class, rough_upload_speed)) |
168 | 254 | return | 254 | return |
170 | 255 | except Exception, e: | 255 | except Exception as e: |
171 | 256 | log.Warn("Upload '%s/%s' failed (attempt #%d, reason: %s: %s)" | 256 | log.Warn("Upload '%s/%s' failed (attempt #%d, reason: %s: %s)" |
172 | 257 | "" % (self.straight_url, | 257 | "" % (self.straight_url, |
173 | 258 | remote_filename, | 258 | remote_filename, |
174 | @@ -279,7 +279,7 @@ | |||
175 | 279 | key.get_contents_to_filename(local_path.name) | 279 | key.get_contents_to_filename(local_path.name) |
176 | 280 | local_path.setdata() | 280 | local_path.setdata() |
177 | 281 | return | 281 | return |
179 | 282 | except Exception, e: | 282 | except Exception as e: |
180 | 283 | log.Warn("Download %s/%s failed (attempt #%d, reason: %s: %s)" | 283 | log.Warn("Download %s/%s failed (attempt #%d, reason: %s: %s)" |
181 | 284 | "" % (self.straight_url, | 284 | "" % (self.straight_url, |
182 | 285 | remote_filename, | 285 | remote_filename, |
183 | @@ -304,7 +304,7 @@ | |||
184 | 304 | log.Info("Listing %s" % self.straight_url) | 304 | log.Info("Listing %s" % self.straight_url) |
185 | 305 | try: | 305 | try: |
186 | 306 | return self._list_filenames_in_bucket() | 306 | return self._list_filenames_in_bucket() |
188 | 307 | except Exception, e: | 307 | except Exception as e: |
189 | 308 | log.Warn("List %s failed (attempt #%d, reason: %s: %s)" | 308 | log.Warn("List %s failed (attempt #%d, reason: %s: %s)" |
190 | 309 | "" % (self.straight_url, | 309 | "" % (self.straight_url, |
191 | 310 | n, | 310 | n, |
192 | @@ -348,7 +348,7 @@ | |||
193 | 348 | if key is None: | 348 | if key is None: |
194 | 349 | return {'size': -1} | 349 | return {'size': -1} |
195 | 350 | return {'size': key.size} | 350 | return {'size': key.size} |
197 | 351 | except Exception, e: | 351 | except Exception as e: |
198 | 352 | log.Warn("Query %s/%s failed: %s" | 352 | log.Warn("Query %s/%s failed: %s" |
199 | 353 | "" % (self.straight_url, | 353 | "" % (self.straight_url, |
200 | 354 | filename, | 354 | filename, |
201 | @@ -368,7 +368,7 @@ | |||
202 | 368 | 368 | ||
203 | 369 | def pre_process_download(self, files_to_download, wait=False): | 369 | def pre_process_download(self, files_to_download, wait=False): |
204 | 370 | # Used primarily to move files in Glacier to S3 | 370 | # Used primarily to move files in Glacier to S3 |
206 | 371 | if isinstance(files_to_download, basestring): | 371 | if isinstance(files_to_download, (bytes, str, unicode)): |
207 | 372 | files_to_download = [files_to_download] | 372 | files_to_download = [files_to_download] |
208 | 373 | 373 | ||
209 | 374 | for remote_filename in files_to_download: | 374 | for remote_filename in files_to_download: |
210 | @@ -397,7 +397,7 @@ | |||
211 | 397 | log.Info("File %s was successfully restored from Glacier" % remote_filename) | 397 | log.Info("File %s was successfully restored from Glacier" % remote_filename) |
212 | 398 | success = True | 398 | success = True |
213 | 399 | break | 399 | break |
215 | 400 | except Exception, e: | 400 | except Exception as e: |
216 | 401 | log.Warn("Restoration from Glacier for file %s/%s failed (attempt #%d, reason: %s: %s)" | 401 | log.Warn("Restoration from Glacier for file %s/%s failed (attempt #%d, reason: %s: %s)" |
217 | 402 | "" % (self.straight_url, | 402 | "" % (self.straight_url, |
218 | 403 | remote_filename, | 403 | remote_filename, |
219 | 404 | 404 | ||
220 | === modified file 'duplicity/backends/_cf_cloudfiles.py' | |||
221 | --- duplicity/backends/_cf_cloudfiles.py 2013-12-27 06:39:00 +0000 | |||
222 | +++ duplicity/backends/_cf_cloudfiles.py 2014-04-17 22:26:47 +0000 | |||
223 | @@ -44,17 +44,17 @@ | |||
224 | 44 | self.resp_exc = ResponseError | 44 | self.resp_exc = ResponseError |
225 | 45 | conn_kwargs = {} | 45 | conn_kwargs = {} |
226 | 46 | 46 | ||
228 | 47 | if not os.environ.has_key('CLOUDFILES_USERNAME'): | 47 | if 'CLOUDFILES_USERNAME' not in os.environ: |
229 | 48 | raise BackendException('CLOUDFILES_USERNAME environment variable' | 48 | raise BackendException('CLOUDFILES_USERNAME environment variable' |
230 | 49 | 'not set.') | 49 | 'not set.') |
231 | 50 | 50 | ||
233 | 51 | if not os.environ.has_key('CLOUDFILES_APIKEY'): | 51 | if 'CLOUDFILES_APIKEY' not in os.environ: |
234 | 52 | raise BackendException('CLOUDFILES_APIKEY environment variable not set.') | 52 | raise BackendException('CLOUDFILES_APIKEY environment variable not set.') |
235 | 53 | 53 | ||
236 | 54 | conn_kwargs['username'] = os.environ['CLOUDFILES_USERNAME'] | 54 | conn_kwargs['username'] = os.environ['CLOUDFILES_USERNAME'] |
237 | 55 | conn_kwargs['api_key'] = os.environ['CLOUDFILES_APIKEY'] | 55 | conn_kwargs['api_key'] = os.environ['CLOUDFILES_APIKEY'] |
238 | 56 | 56 | ||
240 | 57 | if os.environ.has_key('CLOUDFILES_AUTHURL'): | 57 | if 'CLOUDFILES_AUTHURL' in os.environ: |
241 | 58 | conn_kwargs['authurl'] = os.environ['CLOUDFILES_AUTHURL'] | 58 | conn_kwargs['authurl'] = os.environ['CLOUDFILES_AUTHURL'] |
242 | 59 | else: | 59 | else: |
243 | 60 | conn_kwargs['authurl'] = consts.default_authurl | 60 | conn_kwargs['authurl'] = consts.default_authurl |
244 | @@ -63,7 +63,7 @@ | |||
245 | 63 | 63 | ||
246 | 64 | try: | 64 | try: |
247 | 65 | conn = Connection(**conn_kwargs) | 65 | conn = Connection(**conn_kwargs) |
249 | 66 | except Exception, e: | 66 | except Exception as e: |
250 | 67 | log.FatalError("Connection failed, please check your credentials: %s %s" | 67 | log.FatalError("Connection failed, please check your credentials: %s %s" |
251 | 68 | % (e.__class__.__name__, str(e)), | 68 | % (e.__class__.__name__, str(e)), |
252 | 69 | log.ErrorCode.connection_failed) | 69 | log.ErrorCode.connection_failed) |
253 | @@ -79,10 +79,10 @@ | |||
254 | 79 | sobject = self.container.create_object(remote_filename) | 79 | sobject = self.container.create_object(remote_filename) |
255 | 80 | sobject.load_from_filename(source_path.name) | 80 | sobject.load_from_filename(source_path.name) |
256 | 81 | return | 81 | return |
258 | 82 | except self.resp_exc, error: | 82 | except self.resp_exc as error: |
259 | 83 | log.Warn("Upload of '%s' failed (attempt %d): CloudFiles returned: %s %s" | 83 | log.Warn("Upload of '%s' failed (attempt %d): CloudFiles returned: %s %s" |
260 | 84 | % (remote_filename, n, error.status, error.reason)) | 84 | % (remote_filename, n, error.status, error.reason)) |
262 | 85 | except Exception, e: | 85 | except Exception as e: |
263 | 86 | log.Warn("Upload of '%s' failed (attempt %s): %s: %s" | 86 | log.Warn("Upload of '%s' failed (attempt %s): %s: %s" |
264 | 87 | % (remote_filename, n, e.__class__.__name__, str(e))) | 87 | % (remote_filename, n, e.__class__.__name__, str(e))) |
265 | 88 | log.Debug("Backtrace of previous error: %s" | 88 | log.Debug("Backtrace of previous error: %s" |
266 | @@ -102,10 +102,10 @@ | |||
267 | 102 | f.write(chunk) | 102 | f.write(chunk) |
268 | 103 | local_path.setdata() | 103 | local_path.setdata() |
269 | 104 | return | 104 | return |
271 | 105 | except self.resp_exc, resperr: | 105 | except self.resp_exc as resperr: |
272 | 106 | log.Warn("Download of '%s' failed (attempt %s): CloudFiles returned: %s %s" | 106 | log.Warn("Download of '%s' failed (attempt %s): CloudFiles returned: %s %s" |
273 | 107 | % (remote_filename, n, resperr.status, resperr.reason)) | 107 | % (remote_filename, n, resperr.status, resperr.reason)) |
275 | 108 | except Exception, e: | 108 | except Exception as e: |
276 | 109 | log.Warn("Download of '%s' failed (attempt %s): %s: %s" | 109 | log.Warn("Download of '%s' failed (attempt %s): %s: %s" |
277 | 110 | % (remote_filename, n, e.__class__.__name__, str(e))) | 110 | % (remote_filename, n, e.__class__.__name__, str(e))) |
278 | 111 | log.Debug("Backtrace of previous error: %s" | 111 | log.Debug("Backtrace of previous error: %s" |
279 | @@ -128,10 +128,10 @@ | |||
280 | 128 | objs = self.container.list_objects(marker=keys[-1]) | 128 | objs = self.container.list_objects(marker=keys[-1]) |
281 | 129 | keys += objs | 129 | keys += objs |
282 | 130 | return keys | 130 | return keys |
284 | 131 | except self.resp_exc, resperr: | 131 | except self.resp_exc as resperr: |
285 | 132 | log.Warn("Listing of '%s' failed (attempt %s): CloudFiles returned: %s %s" | 132 | log.Warn("Listing of '%s' failed (attempt %s): CloudFiles returned: %s %s" |
286 | 133 | % (self.container, n, resperr.status, resperr.reason)) | 133 | % (self.container, n, resperr.status, resperr.reason)) |
288 | 134 | except Exception, e: | 134 | except Exception as e: |
289 | 135 | log.Warn("Listing of '%s' failed (attempt %s): %s: %s" | 135 | log.Warn("Listing of '%s' failed (attempt %s): %s: %s" |
290 | 136 | % (self.container, n, e.__class__.__name__, str(e))) | 136 | % (self.container, n, e.__class__.__name__, str(e))) |
291 | 137 | log.Debug("Backtrace of previous error: %s" | 137 | log.Debug("Backtrace of previous error: %s" |
292 | @@ -148,14 +148,14 @@ | |||
293 | 148 | try: | 148 | try: |
294 | 149 | self.container.delete_object(remote_filename) | 149 | self.container.delete_object(remote_filename) |
295 | 150 | return | 150 | return |
297 | 151 | except self.resp_exc, resperr: | 151 | except self.resp_exc as resperr: |
298 | 152 | if n > 1 and resperr.status == 404: | 152 | if n > 1 and resperr.status == 404: |
299 | 153 | # We failed on a timeout, but delete succeeded on the server | 153 | # We failed on a timeout, but delete succeeded on the server |
300 | 154 | log.Warn("Delete of '%s' missing after retry - must have succeded earler" % remote_filename ) | 154 | log.Warn("Delete of '%s' missing after retry - must have succeded earler" % remote_filename ) |
301 | 155 | return | 155 | return |
302 | 156 | log.Warn("Delete of '%s' failed (attempt %s): CloudFiles returned: %s %s" | 156 | log.Warn("Delete of '%s' failed (attempt %s): CloudFiles returned: %s %s" |
303 | 157 | % (remote_filename, n, resperr.status, resperr.reason)) | 157 | % (remote_filename, n, resperr.status, resperr.reason)) |
305 | 158 | except Exception, e: | 158 | except Exception as e: |
306 | 159 | log.Warn("Delete of '%s' failed (attempt %s): %s: %s" | 159 | log.Warn("Delete of '%s' failed (attempt %s): %s: %s" |
307 | 160 | % (remote_filename, n, e.__class__.__name__, str(e))) | 160 | % (remote_filename, n, e.__class__.__name__, str(e))) |
308 | 161 | log.Debug("Backtrace of previous error: %s" | 161 | log.Debug("Backtrace of previous error: %s" |
309 | @@ -179,7 +179,7 @@ | |||
310 | 179 | return {'size': sobject.size} | 179 | return {'size': sobject.size} |
311 | 180 | except NoSuchObject: | 180 | except NoSuchObject: |
312 | 181 | return {'size': -1} | 181 | return {'size': -1} |
314 | 182 | except Exception, e: | 182 | except Exception as e: |
315 | 183 | log.Warn("Error querying '%s/%s': %s" | 183 | log.Warn("Error querying '%s/%s': %s" |
316 | 184 | "" % (self.container, | 184 | "" % (self.container, |
317 | 185 | filename, | 185 | filename, |
318 | 186 | 186 | ||
319 | === modified file 'duplicity/backends/_cf_pyrax.py' | |||
320 | --- duplicity/backends/_cf_pyrax.py 2013-12-27 06:39:00 +0000 | |||
321 | +++ duplicity/backends/_cf_pyrax.py 2014-04-17 22:26:47 +0000 | |||
322 | @@ -45,24 +45,24 @@ | |||
323 | 45 | 45 | ||
324 | 46 | conn_kwargs = {} | 46 | conn_kwargs = {} |
325 | 47 | 47 | ||
327 | 48 | if not os.environ.has_key('CLOUDFILES_USERNAME'): | 48 | if 'CLOUDFILES_USERNAME' not in os.environ: |
328 | 49 | raise BackendException('CLOUDFILES_USERNAME environment variable' | 49 | raise BackendException('CLOUDFILES_USERNAME environment variable' |
329 | 50 | 'not set.') | 50 | 'not set.') |
330 | 51 | 51 | ||
332 | 52 | if not os.environ.has_key('CLOUDFILES_APIKEY'): | 52 | if 'CLOUDFILES_APIKEY' not in os.environ: |
333 | 53 | raise BackendException('CLOUDFILES_APIKEY environment variable not set.') | 53 | raise BackendException('CLOUDFILES_APIKEY environment variable not set.') |
334 | 54 | 54 | ||
335 | 55 | conn_kwargs['username'] = os.environ['CLOUDFILES_USERNAME'] | 55 | conn_kwargs['username'] = os.environ['CLOUDFILES_USERNAME'] |
336 | 56 | conn_kwargs['api_key'] = os.environ['CLOUDFILES_APIKEY'] | 56 | conn_kwargs['api_key'] = os.environ['CLOUDFILES_APIKEY'] |
337 | 57 | 57 | ||
339 | 58 | if os.environ.has_key('CLOUDFILES_REGION'): | 58 | if 'CLOUDFILES_REGION' in os.environ: |
340 | 59 | conn_kwargs['region'] = os.environ['CLOUDFILES_REGION'] | 59 | conn_kwargs['region'] = os.environ['CLOUDFILES_REGION'] |
341 | 60 | 60 | ||
342 | 61 | container = parsed_url.path.lstrip('/') | 61 | container = parsed_url.path.lstrip('/') |
343 | 62 | 62 | ||
344 | 63 | try: | 63 | try: |
345 | 64 | pyrax.set_credentials(**conn_kwargs) | 64 | pyrax.set_credentials(**conn_kwargs) |
347 | 65 | except Exception, e: | 65 | except Exception as e: |
348 | 66 | log.FatalError("Connection failed, please check your credentials: %s %s" | 66 | log.FatalError("Connection failed, please check your credentials: %s %s" |
349 | 67 | % (e.__class__.__name__, str(e)), | 67 | % (e.__class__.__name__, str(e)), |
350 | 68 | log.ErrorCode.connection_failed) | 68 | log.ErrorCode.connection_failed) |
351 | @@ -81,10 +81,10 @@ | |||
352 | 81 | try: | 81 | try: |
353 | 82 | self.container.upload_file(source_path.name, remote_filename) | 82 | self.container.upload_file(source_path.name, remote_filename) |
354 | 83 | return | 83 | return |
356 | 84 | except self.client_exc, error: | 84 | except self.client_exc as error: |
357 | 85 | log.Warn("Upload of '%s' failed (attempt %d): pyrax returned: %s %s" | 85 | log.Warn("Upload of '%s' failed (attempt %d): pyrax returned: %s %s" |
358 | 86 | % (remote_filename, n, error.__class__.__name__, error.message)) | 86 | % (remote_filename, n, error.__class__.__name__, error.message)) |
360 | 87 | except Exception, e: | 87 | except Exception as e: |
361 | 88 | log.Warn("Upload of '%s' failed (attempt %s): %s: %s" | 88 | log.Warn("Upload of '%s' failed (attempt %s): %s: %s" |
362 | 89 | % (remote_filename, n, e.__class__.__name__, str(e))) | 89 | % (remote_filename, n, e.__class__.__name__, str(e))) |
363 | 90 | log.Debug("Backtrace of previous error: %s" | 90 | log.Debug("Backtrace of previous error: %s" |
364 | @@ -105,10 +105,10 @@ | |||
365 | 105 | return | 105 | return |
366 | 106 | except self.nso_exc: | 106 | except self.nso_exc: |
367 | 107 | return | 107 | return |
369 | 108 | except self.client_exc, resperr: | 108 | except self.client_exc as resperr: |
370 | 109 | log.Warn("Download of '%s' failed (attempt %s): pyrax returned: %s %s" | 109 | log.Warn("Download of '%s' failed (attempt %s): pyrax returned: %s %s" |
371 | 110 | % (remote_filename, n, resperr.__class__.__name__, resperr.message)) | 110 | % (remote_filename, n, resperr.__class__.__name__, resperr.message)) |
373 | 111 | except Exception, e: | 111 | except Exception as e: |
374 | 112 | log.Warn("Download of '%s' failed (attempt %s): %s: %s" | 112 | log.Warn("Download of '%s' failed (attempt %s): %s: %s" |
375 | 113 | % (remote_filename, n, e.__class__.__name__, str(e))) | 113 | % (remote_filename, n, e.__class__.__name__, str(e))) |
376 | 114 | log.Debug("Backtrace of previous error: %s" | 114 | log.Debug("Backtrace of previous error: %s" |
377 | @@ -131,10 +131,10 @@ | |||
378 | 131 | objs = self.container.get_object_names(marker = keys[-1]) | 131 | objs = self.container.get_object_names(marker = keys[-1]) |
379 | 132 | keys += objs | 132 | keys += objs |
380 | 133 | return keys | 133 | return keys |
382 | 134 | except self.client_exc, resperr: | 134 | except self.client_exc as resperr: |
383 | 135 | log.Warn("Listing of '%s' failed (attempt %s): pyrax returned: %s %s" | 135 | log.Warn("Listing of '%s' failed (attempt %s): pyrax returned: %s %s" |
384 | 136 | % (self.container, n, resperr.__class__.__name__, resperr.message)) | 136 | % (self.container, n, resperr.__class__.__name__, resperr.message)) |
386 | 137 | except Exception, e: | 137 | except Exception as e: |
387 | 138 | log.Warn("Listing of '%s' failed (attempt %s): %s: %s" | 138 | log.Warn("Listing of '%s' failed (attempt %s): %s: %s" |
388 | 139 | % (self.container, n, e.__class__.__name__, str(e))) | 139 | % (self.container, n, e.__class__.__name__, str(e))) |
389 | 140 | log.Debug("Backtrace of previous error: %s" | 140 | log.Debug("Backtrace of previous error: %s" |
390 | @@ -151,14 +151,14 @@ | |||
391 | 151 | try: | 151 | try: |
392 | 152 | self.container.delete_object(remote_filename) | 152 | self.container.delete_object(remote_filename) |
393 | 153 | return | 153 | return |
395 | 154 | except self.client_exc, resperr: | 154 | except self.client_exc as resperr: |
396 | 155 | if n > 1 and resperr.status == 404: | 155 | if n > 1 and resperr.status == 404: |
397 | 156 | # We failed on a timeout, but delete succeeded on the server | 156 | # We failed on a timeout, but delete succeeded on the server |
398 | 157 | log.Warn("Delete of '%s' missing after retry - must have succeded earler" % remote_filename) | 157 | log.Warn("Delete of '%s' missing after retry - must have succeded earler" % remote_filename) |
399 | 158 | return | 158 | return |
400 | 159 | log.Warn("Delete of '%s' failed (attempt %s): pyrax returned: %s %s" | 159 | log.Warn("Delete of '%s' failed (attempt %s): pyrax returned: %s %s" |
401 | 160 | % (remote_filename, n, resperr.__class__.__name__, resperr.message)) | 160 | % (remote_filename, n, resperr.__class__.__name__, resperr.message)) |
403 | 161 | except Exception, e: | 161 | except Exception as e: |
404 | 162 | log.Warn("Delete of '%s' failed (attempt %s): %s: %s" | 162 | log.Warn("Delete of '%s' failed (attempt %s): %s: %s" |
405 | 163 | % (remote_filename, n, e.__class__.__name__, str(e))) | 163 | % (remote_filename, n, e.__class__.__name__, str(e))) |
406 | 164 | log.Debug("Backtrace of previous error: %s" | 164 | log.Debug("Backtrace of previous error: %s" |
407 | @@ -181,7 +181,7 @@ | |||
408 | 181 | return {'size': sobject.total_bytes} | 181 | return {'size': sobject.total_bytes} |
409 | 182 | except self.nso_exc: | 182 | except self.nso_exc: |
410 | 183 | return {'size': -1} | 183 | return {'size': -1} |
412 | 184 | except Exception, e: | 184 | except Exception as e: |
413 | 185 | log.Warn("Error querying '%s/%s': %s" | 185 | log.Warn("Error querying '%s/%s': %s" |
414 | 186 | "" % (self.container, | 186 | "" % (self.container, |
415 | 187 | filename, | 187 | filename, |
416 | 188 | 188 | ||
417 | === modified file 'duplicity/backends/_ssh_paramiko.py' | |||
418 | --- duplicity/backends/_ssh_paramiko.py 2013-12-30 16:01:49 +0000 | |||
419 | +++ duplicity/backends/_ssh_paramiko.py 2014-04-17 22:26:47 +0000 | |||
420 | @@ -134,7 +134,7 @@ | |||
421 | 134 | try: | 134 | try: |
422 | 135 | if os.path.isfile("/etc/ssh/ssh_known_hosts"): | 135 | if os.path.isfile("/etc/ssh/ssh_known_hosts"): |
423 | 136 | self.client.load_system_host_keys("/etc/ssh/ssh_known_hosts") | 136 | self.client.load_system_host_keys("/etc/ssh/ssh_known_hosts") |
425 | 137 | except Exception, e: | 137 | except Exception as e: |
426 | 138 | raise BackendException("could not load /etc/ssh/ssh_known_hosts, maybe corrupt?") | 138 | raise BackendException("could not load /etc/ssh/ssh_known_hosts, maybe corrupt?") |
427 | 139 | try: | 139 | try: |
428 | 140 | # use load_host_keys() to signal it's writable to paramiko | 140 | # use load_host_keys() to signal it's writable to paramiko |
429 | @@ -144,7 +144,7 @@ | |||
430 | 144 | self.client.load_host_keys(file) | 144 | self.client.load_host_keys(file) |
431 | 145 | else: | 145 | else: |
432 | 146 | self.client._host_keys_filename = file | 146 | self.client._host_keys_filename = file |
434 | 147 | except Exception, e: | 147 | except Exception as e: |
435 | 148 | raise BackendException("could not load ~/.ssh/known_hosts, maybe corrupt?") | 148 | raise BackendException("could not load ~/.ssh/known_hosts, maybe corrupt?") |
436 | 149 | 149 | ||
437 | 150 | """ the next block reorganizes all host parameters into a | 150 | """ the next block reorganizes all host parameters into a |
438 | @@ -211,7 +211,7 @@ | |||
439 | 211 | allow_agent=True, | 211 | allow_agent=True, |
440 | 212 | look_for_keys=True, | 212 | look_for_keys=True, |
441 | 213 | key_filename=self.config['identityfile']) | 213 | key_filename=self.config['identityfile']) |
443 | 214 | except Exception, e: | 214 | except Exception as e: |
444 | 215 | raise BackendException("ssh connection to %s@%s:%d failed: %s" % ( | 215 | raise BackendException("ssh connection to %s@%s:%d failed: %s" % ( |
445 | 216 | self.config['user'], | 216 | self.config['user'], |
446 | 217 | self.config['hostname'], | 217 | self.config['hostname'], |
447 | @@ -229,7 +229,7 @@ | |||
448 | 229 | else: | 229 | else: |
449 | 230 | try: | 230 | try: |
450 | 231 | self.sftp=self.client.open_sftp() | 231 | self.sftp=self.client.open_sftp() |
452 | 232 | except Exception, e: | 232 | except Exception as e: |
453 | 233 | raise BackendException("sftp negotiation failed: %s" % e) | 233 | raise BackendException("sftp negotiation failed: %s" % e) |
454 | 234 | 234 | ||
455 | 235 | 235 | ||
456 | @@ -244,17 +244,17 @@ | |||
457 | 244 | continue | 244 | continue |
458 | 245 | try: | 245 | try: |
459 | 246 | attrs=self.sftp.stat(d) | 246 | attrs=self.sftp.stat(d) |
461 | 247 | except IOError, e: | 247 | except IOError as e: |
462 | 248 | if e.errno == errno.ENOENT: | 248 | if e.errno == errno.ENOENT: |
463 | 249 | try: | 249 | try: |
464 | 250 | self.sftp.mkdir(d) | 250 | self.sftp.mkdir(d) |
466 | 251 | except Exception, e: | 251 | except Exception as e: |
467 | 252 | raise BackendException("sftp mkdir %s failed: %s" % (self.sftp.normalize(".")+"/"+d,e)) | 252 | raise BackendException("sftp mkdir %s failed: %s" % (self.sftp.normalize(".")+"/"+d,e)) |
468 | 253 | else: | 253 | else: |
469 | 254 | raise BackendException("sftp stat %s failed: %s" % (self.sftp.normalize(".")+"/"+d,e)) | 254 | raise BackendException("sftp stat %s failed: %s" % (self.sftp.normalize(".")+"/"+d,e)) |
470 | 255 | try: | 255 | try: |
471 | 256 | self.sftp.chdir(d) | 256 | self.sftp.chdir(d) |
473 | 257 | except Exception, e: | 257 | except Exception as e: |
474 | 258 | raise BackendException("sftp chdir to %s failed: %s" % (self.sftp.normalize(".")+"/"+d,e)) | 258 | raise BackendException("sftp chdir to %s failed: %s" % (self.sftp.normalize(".")+"/"+d,e)) |
475 | 259 | 259 | ||
476 | 260 | def put(self, source_path, remote_filename = None): | 260 | def put(self, source_path, remote_filename = None): |
477 | @@ -275,7 +275,7 @@ | |||
478 | 275 | chan=self.client.get_transport().open_session() | 275 | chan=self.client.get_transport().open_session() |
479 | 276 | chan.settimeout(globals.timeout) | 276 | chan.settimeout(globals.timeout) |
480 | 277 | chan.exec_command("scp -t '%s'" % self.remote_dir) # scp in sink mode uses the arg as base directory | 277 | chan.exec_command("scp -t '%s'" % self.remote_dir) # scp in sink mode uses the arg as base directory |
482 | 278 | except Exception, e: | 278 | except Exception as e: |
483 | 279 | raise BackendException("scp execution failed: %s" % e) | 279 | raise BackendException("scp execution failed: %s" % e) |
484 | 280 | # scp protocol: one 0x0 after startup, one after the Create meta, one after saving | 280 | # scp protocol: one 0x0 after startup, one after the Create meta, one after saving |
485 | 281 | # if there's a problem: 0x1 or 0x02 and some error text | 281 | # if there's a problem: 0x1 or 0x02 and some error text |
486 | @@ -298,9 +298,9 @@ | |||
487 | 298 | try: | 298 | try: |
488 | 299 | self.sftp.put(source_path.name,remote_filename) | 299 | self.sftp.put(source_path.name,remote_filename) |
489 | 300 | return | 300 | return |
491 | 301 | except Exception, e: | 301 | except Exception as e: |
492 | 302 | raise BackendException("sftp put of %s (as %s) failed: %s" % (source_path.name,remote_filename,e)) | 302 | raise BackendException("sftp put of %s (as %s) failed: %s" % (source_path.name,remote_filename,e)) |
494 | 303 | except Exception, e: | 303 | except Exception as e: |
495 | 304 | log.Warn("%s (Try %d of %d) Will retry in %d seconds." % (e,n,globals.num_retries,self.retry_delay)) | 304 | log.Warn("%s (Try %d of %d) Will retry in %d seconds." % (e,n,globals.num_retries,self.retry_delay)) |
496 | 305 | raise BackendException("Giving up trying to upload '%s' after %d attempts" % (remote_filename,n)) | 305 | raise BackendException("Giving up trying to upload '%s' after %d attempts" % (remote_filename,n)) |
497 | 306 | 306 | ||
498 | @@ -320,7 +320,7 @@ | |||
499 | 320 | chan=self.client.get_transport().open_session() | 320 | chan=self.client.get_transport().open_session() |
500 | 321 | chan.settimeout(globals.timeout) | 321 | chan.settimeout(globals.timeout) |
501 | 322 | chan.exec_command("scp -f '%s/%s'" % (self.remote_dir,remote_filename)) | 322 | chan.exec_command("scp -f '%s/%s'" % (self.remote_dir,remote_filename)) |
503 | 323 | except Exception, e: | 323 | except Exception as e: |
504 | 324 | raise BackendException("scp execution failed: %s" % e) | 324 | raise BackendException("scp execution failed: %s" % e) |
505 | 325 | 325 | ||
506 | 326 | chan.send('\0') # overall ready indicator | 326 | chan.send('\0') # overall ready indicator |
507 | @@ -343,7 +343,7 @@ | |||
508 | 343 | buff=chan.recv(blocksize) | 343 | buff=chan.recv(blocksize) |
509 | 344 | f.write(buff) | 344 | f.write(buff) |
510 | 345 | togo-=len(buff) | 345 | togo-=len(buff) |
512 | 346 | except Exception, e: | 346 | except Exception as e: |
513 | 347 | raise BackendException("scp get %s failed: %s" % (remote_filename,e)) | 347 | raise BackendException("scp get %s failed: %s" % (remote_filename,e)) |
514 | 348 | 348 | ||
515 | 349 | msg=chan.recv(1) # check the final status | 349 | msg=chan.recv(1) # check the final status |
516 | @@ -357,10 +357,10 @@ | |||
517 | 357 | try: | 357 | try: |
518 | 358 | self.sftp.get(remote_filename,local_path.name) | 358 | self.sftp.get(remote_filename,local_path.name) |
519 | 359 | return | 359 | return |
521 | 360 | except Exception, e: | 360 | except Exception as e: |
522 | 361 | raise BackendException("sftp get of %s (to %s) failed: %s" % (remote_filename,local_path.name,e)) | 361 | raise BackendException("sftp get of %s (to %s) failed: %s" % (remote_filename,local_path.name,e)) |
523 | 362 | local_path.setdata() | 362 | local_path.setdata() |
525 | 363 | except Exception, e: | 363 | except Exception as e: |
526 | 364 | log.Warn("%s (Try %d of %d) Will retry in %d seconds." % (e,n,globals.num_retries,self.retry_delay)) | 364 | log.Warn("%s (Try %d of %d) Will retry in %d seconds." % (e,n,globals.num_retries,self.retry_delay)) |
527 | 365 | raise BackendException("Giving up trying to download '%s' after %d attempts" % (remote_filename,n)) | 365 | raise BackendException("Giving up trying to download '%s' after %d attempts" % (remote_filename,n)) |
528 | 366 | 366 | ||
529 | @@ -379,9 +379,9 @@ | |||
530 | 379 | else: | 379 | else: |
531 | 380 | try: | 380 | try: |
532 | 381 | return self.sftp.listdir() | 381 | return self.sftp.listdir() |
534 | 382 | except Exception, e: | 382 | except Exception as e: |
535 | 383 | raise BackendException("sftp listing of %s failed: %s" % (self.sftp.getcwd(),e)) | 383 | raise BackendException("sftp listing of %s failed: %s" % (self.sftp.getcwd(),e)) |
537 | 384 | except Exception, e: | 384 | except Exception as e: |
538 | 385 | log.Warn("%s (Try %d of %d) Will retry in %d seconds." % (e,n,globals.num_retries,self.retry_delay)) | 385 | log.Warn("%s (Try %d of %d) Will retry in %d seconds." % (e,n,globals.num_retries,self.retry_delay)) |
539 | 386 | raise BackendException("Giving up trying to list '%s' after %d attempts" % (self.remote_dir,n)) | 386 | raise BackendException("Giving up trying to list '%s' after %d attempts" % (self.remote_dir,n)) |
540 | 387 | 387 | ||
541 | @@ -397,12 +397,12 @@ | |||
542 | 397 | else: | 397 | else: |
543 | 398 | try: | 398 | try: |
544 | 399 | self.sftp.remove(fn) | 399 | self.sftp.remove(fn) |
546 | 400 | except Exception, e: | 400 | except Exception as e: |
547 | 401 | raise BackendException("sftp rm %s failed: %s" % (fn,e)) | 401 | raise BackendException("sftp rm %s failed: %s" % (fn,e)) |
548 | 402 | 402 | ||
549 | 403 | # If we get here, we deleted this file successfully. Move on to the next one. | 403 | # If we get here, we deleted this file successfully. Move on to the next one. |
550 | 404 | break | 404 | break |
552 | 405 | except Exception, e: | 405 | except Exception as e: |
553 | 406 | if n == globals.num_retries: | 406 | if n == globals.num_retries: |
554 | 407 | log.FatalError(str(e), log.ErrorCode.backend_error) | 407 | log.FatalError(str(e), log.ErrorCode.backend_error) |
555 | 408 | else: | 408 | else: |
556 | @@ -416,7 +416,7 @@ | |||
557 | 416 | chan=self.client.get_transport().open_session() | 416 | chan=self.client.get_transport().open_session() |
558 | 417 | chan.settimeout(globals.timeout) | 417 | chan.settimeout(globals.timeout) |
559 | 418 | chan.exec_command(cmd) | 418 | chan.exec_command(cmd) |
561 | 419 | except Exception, e: | 419 | except Exception as e: |
562 | 420 | raise BackendException("%sexecution failed: %s" % (errorprefix,e)) | 420 | raise BackendException("%sexecution failed: %s" % (errorprefix,e)) |
563 | 421 | output=chan.recv(-1) | 421 | output=chan.recv(-1) |
564 | 422 | res=chan.recv_exit_status() | 422 | res=chan.recv_exit_status() |
565 | @@ -434,7 +434,7 @@ | |||
566 | 434 | sshconfig = paramiko.SSHConfig() | 434 | sshconfig = paramiko.SSHConfig() |
567 | 435 | try: | 435 | try: |
568 | 436 | sshconfig.parse(open(file)) | 436 | sshconfig.parse(open(file)) |
570 | 437 | except Exception, e: | 437 | except Exception as e: |
571 | 438 | raise BackendException("could not load '%s', maybe corrupt?" % (file)) | 438 | raise BackendException("could not load '%s', maybe corrupt?" % (file)) |
572 | 439 | 439 | ||
573 | 440 | return sshconfig.lookup(host) | 440 | return sshconfig.lookup(host) |
574 | 441 | 441 | ||
575 | === modified file 'duplicity/backends/botobackend.py' | |||
576 | --- duplicity/backends/botobackend.py 2014-04-16 20:45:09 +0000 | |||
577 | +++ duplicity/backends/botobackend.py 2014-04-17 22:26:47 +0000 | |||
578 | @@ -22,8 +22,8 @@ | |||
579 | 22 | 22 | ||
580 | 23 | import duplicity.backend | 23 | import duplicity.backend |
581 | 24 | from duplicity import globals | 24 | from duplicity import globals |
584 | 25 | from _boto_multi import BotoBackend as BotoMultiUploadBackend | 25 | from ._boto_multi import BotoBackend as BotoMultiUploadBackend |
585 | 26 | from _boto_single import BotoBackend as BotoSingleUploadBackend | 26 | from ._boto_single import BotoBackend as BotoSingleUploadBackend |
586 | 27 | 27 | ||
587 | 28 | if globals.s3_use_multiprocessing: | 28 | if globals.s3_use_multiprocessing: |
588 | 29 | duplicity.backend.register_backend("gs", BotoMultiUploadBackend) | 29 | duplicity.backend.register_backend("gs", BotoMultiUploadBackend) |
589 | 30 | 30 | ||
590 | === modified file 'duplicity/backends/cfbackend.py' | |||
591 | --- duplicity/backends/cfbackend.py 2013-11-24 16:49:57 +0000 | |||
592 | +++ duplicity/backends/cfbackend.py 2014-04-17 22:26:47 +0000 | |||
593 | @@ -22,6 +22,6 @@ | |||
594 | 22 | 22 | ||
595 | 23 | if (globals.cf_backend and | 23 | if (globals.cf_backend and |
596 | 24 | globals.cf_backend.lower().strip() == 'pyrax'): | 24 | globals.cf_backend.lower().strip() == 'pyrax'): |
598 | 25 | import _cf_pyrax | 25 | from . import _cf_pyrax |
599 | 26 | else: | 26 | else: |
601 | 27 | import _cf_cloudfiles | 27 | from . import _cf_cloudfiles |
602 | 28 | 28 | ||
603 | === modified file 'duplicity/backends/dpbxbackend.py' | |||
604 | --- duplicity/backends/dpbxbackend.py 2014-03-05 17:05:04 +0000 | |||
605 | +++ duplicity/backends/dpbxbackend.py 2014-04-17 22:26:47 +0000 | |||
606 | @@ -29,6 +29,7 @@ | |||
607 | 29 | import urllib | 29 | import urllib |
608 | 30 | import re | 30 | import re |
609 | 31 | import locale, sys | 31 | import locale, sys |
610 | 32 | from functools import reduce | ||
611 | 32 | 33 | ||
612 | 33 | import traceback, StringIO | 34 | import traceback, StringIO |
613 | 34 | from exceptions import Exception | 35 | from exceptions import Exception |
614 | @@ -80,14 +81,14 @@ | |||
615 | 80 | 81 | ||
616 | 81 | try: | 82 | try: |
617 | 82 | return f(self, *args) | 83 | return f(self, *args) |
619 | 83 | except TypeError, e: | 84 | except TypeError as e: |
620 | 84 | log_exception(e) | 85 | log_exception(e) |
621 | 85 | log.FatalError('dpbx type error "%s"' % (e,), log.ErrorCode.backend_code_error) | 86 | log.FatalError('dpbx type error "%s"' % (e,), log.ErrorCode.backend_code_error) |
623 | 86 | except rest.ErrorResponse, e: | 87 | except rest.ErrorResponse as e: |
624 | 87 | msg = e.user_error_msg or str(e) | 88 | msg = e.user_error_msg or str(e) |
625 | 88 | log.Error('dpbx error: %s' % (msg,), log.ErrorCode.backend_command_error) | 89 | log.Error('dpbx error: %s' % (msg,), log.ErrorCode.backend_command_error) |
626 | 89 | raise e | 90 | raise e |
628 | 90 | except Exception, e: | 91 | except Exception as e: |
629 | 91 | log_exception(e) | 92 | log_exception(e) |
630 | 92 | log.Error('dpbx code error "%s"' % (e,), log.ErrorCode.backend_code_error) | 93 | log.Error('dpbx code error "%s"' % (e,), log.ErrorCode.backend_code_error) |
631 | 93 | raise e | 94 | raise e |
632 | @@ -119,7 +120,7 @@ | |||
633 | 119 | 120 | ||
634 | 120 | def write_creds(self, token): | 121 | def write_creds(self, token): |
635 | 121 | open(self.TOKEN_FILE, 'w').close() # create/reset file | 122 | open(self.TOKEN_FILE, 'w').close() # create/reset file |
637 | 122 | os.chmod(self.TOKEN_FILE,0600) # set it -rw------ (NOOP in Windows?) | 123 | os.chmod(self.TOKEN_FILE, 0o600) # set it -rw------ (NOOP in Windows?) |
638 | 123 | # now write the content | 124 | # now write the content |
639 | 124 | f = open(self.TOKEN_FILE, 'w') | 125 | f = open(self.TOKEN_FILE, 'w') |
640 | 125 | f.write("|".join([token.key, token.secret])) | 126 | f.write("|".join([token.key, token.secret])) |
641 | @@ -159,7 +160,7 @@ | |||
642 | 159 | if not self.sess.is_linked(): | 160 | if not self.sess.is_linked(): |
643 | 160 | try: # to login to the box | 161 | try: # to login to the box |
644 | 161 | self.sess.link() | 162 | self.sess.link() |
646 | 162 | except rest.ErrorResponse, e: | 163 | except rest.ErrorResponse as e: |
647 | 163 | log.FatalError('dpbx Error: %s\n' % str(e), log.ErrorCode.dpbx_nologin) | 164 | log.FatalError('dpbx Error: %s\n' % str(e), log.ErrorCode.dpbx_nologin) |
648 | 164 | if not self.sess.is_linked(): # stil not logged in | 165 | if not self.sess.is_linked(): # stil not logged in |
649 | 165 | log.FatalError("dpbx Cannot login: check your credentials",log.ErrorCode.dpbx_nologin) | 166 | log.FatalError("dpbx Cannot login: check your credentials",log.ErrorCode.dpbx_nologin) |
650 | 166 | 167 | ||
651 | === modified file 'duplicity/backends/gdocsbackend.py' | |||
652 | --- duplicity/backends/gdocsbackend.py 2014-01-03 10:37:54 +0000 | |||
653 | +++ duplicity/backends/gdocsbackend.py 2014-04-17 22:26:47 +0000 | |||
654 | @@ -113,7 +113,7 @@ | |||
655 | 113 | self.__handle_error("Failed to initialize upload of file '%s' to remote folder '%s'" | 113 | self.__handle_error("Failed to initialize upload of file '%s' to remote folder '%s'" |
656 | 114 | % (source_path.get_filename(), self.folder.title.text), raise_errors) | 114 | % (source_path.get_filename(), self.folder.title.text), raise_errors) |
657 | 115 | assert not file.close() | 115 | assert not file.close() |
659 | 116 | except Exception, e: | 116 | except Exception as e: |
660 | 117 | self.__handle_error("Failed to upload file '%s' to remote folder '%s': %s" | 117 | self.__handle_error("Failed to upload file '%s' to remote folder '%s': %s" |
661 | 118 | % (source_path.get_filename(), self.folder.title.text, str(e)), raise_errors) | 118 | % (source_path.get_filename(), self.folder.title.text, str(e)), raise_errors) |
662 | 119 | 119 | ||
663 | @@ -132,7 +132,7 @@ | |||
664 | 132 | else: | 132 | else: |
665 | 133 | self.__handle_error("Failed to find file '%s' in remote folder '%s'" | 133 | self.__handle_error("Failed to find file '%s' in remote folder '%s'" |
666 | 134 | % (remote_filename, self.folder.title.text), raise_errors) | 134 | % (remote_filename, self.folder.title.text), raise_errors) |
668 | 135 | except Exception, e: | 135 | except Exception as e: |
669 | 136 | self.__handle_error("Failed to download file '%s' in remote folder '%s': %s" | 136 | self.__handle_error("Failed to download file '%s' in remote folder '%s': %s" |
670 | 137 | % (remote_filename, self.folder.title.text, str(e)), raise_errors) | 137 | % (remote_filename, self.folder.title.text, str(e)), raise_errors) |
671 | 138 | 138 | ||
672 | @@ -143,7 +143,7 @@ | |||
673 | 143 | entries = self.__fetch_entries(self.folder.resource_id.text, | 143 | entries = self.__fetch_entries(self.folder.resource_id.text, |
674 | 144 | GDocsBackend.BACKUP_DOCUMENT_TYPE) | 144 | GDocsBackend.BACKUP_DOCUMENT_TYPE) |
675 | 145 | return [entry.title.text for entry in entries] | 145 | return [entry.title.text for entry in entries] |
677 | 146 | except Exception, e: | 146 | except Exception as e: |
678 | 147 | self.__handle_error("Failed to fetch list of files in remote folder '%s': %s" | 147 | self.__handle_error("Failed to fetch list of files in remote folder '%s': %s" |
679 | 148 | % (self.folder.title.text, str(e)), raise_errors) | 148 | % (self.folder.title.text, str(e)), raise_errors) |
680 | 149 | 149 | ||
681 | @@ -166,7 +166,7 @@ | |||
682 | 166 | else: | 166 | else: |
683 | 167 | log.Warn("Failed to fetch file '%s' in remote folder '%s'" | 167 | log.Warn("Failed to fetch file '%s' in remote folder '%s'" |
684 | 168 | % (filename, self.folder.title.text)) | 168 | % (filename, self.folder.title.text)) |
686 | 169 | except Exception, e: | 169 | except Exception as e: |
687 | 170 | self.__handle_error("Failed to remove file '%s' in remote folder '%s': %s" | 170 | self.__handle_error("Failed to remove file '%s' in remote folder '%s': %s" |
688 | 171 | % (filename, self.folder.title.text, str(e)), raise_errors) | 171 | % (filename, self.folder.title.text, str(e)), raise_errors) |
689 | 172 | 172 | ||
690 | @@ -184,7 +184,7 @@ | |||
691 | 184 | service='writely', | 184 | service='writely', |
692 | 185 | captcha_token=captcha_token, | 185 | captcha_token=captcha_token, |
693 | 186 | captcha_response=captcha_response) | 186 | captcha_response=captcha_response) |
695 | 187 | except gdata.client.CaptchaChallenge, challenge: | 187 | except gdata.client.CaptchaChallenge as challenge: |
696 | 188 | print('A captcha challenge in required. Please visit ' + challenge.captcha_url) | 188 | print('A captcha challenge in required. Please visit ' + challenge.captcha_url) |
697 | 189 | answer = None | 189 | answer = None |
698 | 190 | while not answer: | 190 | while not answer: |
699 | @@ -196,7 +196,7 @@ | |||
700 | 196 | 'access code for using this Duplicity backend. Follow the instrucction in ' | 196 | 'access code for using this Duplicity backend. Follow the instrucction in ' |
701 | 197 | 'http://www.google.com/support/accounts/bin/static.py?page=guide.cs&guide=1056283&topic=1056286 ' | 197 | 'http://www.google.com/support/accounts/bin/static.py?page=guide.cs&guide=1056283&topic=1056286 ' |
702 | 198 | 'and create your application-specific password to run duplicity backups.') | 198 | 'and create your application-specific password to run duplicity backups.') |
704 | 199 | except Exception, e: | 199 | except Exception as e: |
705 | 200 | self.__handle_error('Error while authenticating client: %s.' % str(e)) | 200 | self.__handle_error('Error while authenticating client: %s.' % str(e)) |
706 | 201 | 201 | ||
707 | 202 | def __fetch_entries(self, folder_id, type, title=None): | 202 | def __fetch_entries(self, folder_id, type, title=None): |
708 | @@ -238,7 +238,7 @@ | |||
709 | 238 | 238 | ||
710 | 239 | # Done! | 239 | # Done! |
711 | 240 | return result | 240 | return result |
713 | 241 | except Exception, e: | 241 | except Exception as e: |
714 | 242 | self.__handle_error('Error while fetching remote entries: %s.' % str(e)) | 242 | self.__handle_error('Error while fetching remote entries: %s.' % str(e)) |
715 | 243 | 243 | ||
716 | 244 | duplicity.backend.register_backend('gdocs', GDocsBackend) | 244 | duplicity.backend.register_backend('gdocs', GDocsBackend) |
717 | 245 | 245 | ||
718 | === modified file 'duplicity/backends/giobackend.py' | |||
719 | --- duplicity/backends/giobackend.py 2013-12-27 06:39:00 +0000 | |||
720 | +++ duplicity/backends/giobackend.py 2014-04-17 22:26:47 +0000 | |||
721 | @@ -93,14 +93,14 @@ | |||
722 | 93 | # Now make the directory if it doesn't exist | 93 | # Now make the directory if it doesn't exist |
723 | 94 | try: | 94 | try: |
724 | 95 | self.remote_file.make_directory_with_parents(None) | 95 | self.remote_file.make_directory_with_parents(None) |
726 | 96 | except GLib.GError, e: | 96 | except GLib.GError as e: |
727 | 97 | if e.code != Gio.IOErrorEnum.EXISTS: | 97 | if e.code != Gio.IOErrorEnum.EXISTS: |
728 | 98 | raise | 98 | raise |
729 | 99 | 99 | ||
730 | 100 | def done_with_mount(self, fileobj, result, loop): | 100 | def done_with_mount(self, fileobj, result, loop): |
731 | 101 | try: | 101 | try: |
732 | 102 | fileobj.mount_enclosing_volume_finish(result) | 102 | fileobj.mount_enclosing_volume_finish(result) |
734 | 103 | except GLib.GError, e: | 103 | except GLib.GError as e: |
735 | 104 | # check for NOT_SUPPORTED because some schemas (e.g. file://) validly don't | 104 | # check for NOT_SUPPORTED because some schemas (e.g. file://) validly don't |
736 | 105 | if e.code != Gio.IOErrorEnum.ALREADY_MOUNTED and e.code != Gio.IOErrorEnum.NOT_SUPPORTED: | 105 | if e.code != Gio.IOErrorEnum.ALREADY_MOUNTED and e.code != Gio.IOErrorEnum.NOT_SUPPORTED: |
737 | 106 | log.FatalError(_("Connection failed, please check your password: %s") | 106 | log.FatalError(_("Connection failed, please check your password: %s") |
738 | @@ -132,7 +132,7 @@ | |||
739 | 132 | source.copy(target, | 132 | source.copy(target, |
740 | 133 | Gio.FileCopyFlags.OVERWRITE | Gio.FileCopyFlags.NOFOLLOW_SYMLINKS, | 133 | Gio.FileCopyFlags.OVERWRITE | Gio.FileCopyFlags.NOFOLLOW_SYMLINKS, |
741 | 134 | None, self.copy_progress, None) | 134 | None, self.copy_progress, None) |
743 | 135 | except Exception, e: | 135 | except Exception as e: |
744 | 136 | self.handle_error(raise_errors, e, op, source.get_parse_name(), | 136 | self.handle_error(raise_errors, e, op, source.get_parse_name(), |
745 | 137 | target.get_parse_name()) | 137 | target.get_parse_name()) |
746 | 138 | 138 | ||
747 | @@ -163,7 +163,7 @@ | |||
748 | 163 | while info: | 163 | while info: |
749 | 164 | files.append(info.get_name()) | 164 | files.append(info.get_name()) |
750 | 165 | info = enum.next_file(None) | 165 | info = enum.next_file(None) |
752 | 166 | except Exception, e: | 166 | except Exception as e: |
753 | 167 | self.handle_error(raise_errors, e, 'list', | 167 | self.handle_error(raise_errors, e, 'list', |
754 | 168 | self.remote_file.get_parse_name()) | 168 | self.remote_file.get_parse_name()) |
755 | 169 | return files | 169 | return files |
756 | @@ -176,7 +176,7 @@ | |||
757 | 176 | target_file = self.remote_file.get_child(filename) | 176 | target_file = self.remote_file.get_child(filename) |
758 | 177 | try: | 177 | try: |
759 | 178 | target_file.delete(None) | 178 | target_file.delete(None) |
761 | 179 | except Exception, e: | 179 | except Exception as e: |
762 | 180 | if isinstance(e, GLib.GError): | 180 | if isinstance(e, GLib.GError): |
763 | 181 | if e.code == Gio.IOErrorEnum.NOT_FOUND: | 181 | if e.code == Gio.IOErrorEnum.NOT_FOUND: |
764 | 182 | continue | 182 | continue |
765 | @@ -193,7 +193,7 @@ | |||
766 | 193 | info = target_file.query_info(attrs, Gio.FileQueryInfoFlags.NONE, | 193 | info = target_file.query_info(attrs, Gio.FileQueryInfoFlags.NONE, |
767 | 194 | None) | 194 | None) |
768 | 195 | return {'size': info.get_size()} | 195 | return {'size': info.get_size()} |
770 | 196 | except Exception, e: | 196 | except Exception as e: |
771 | 197 | if isinstance(e, GLib.GError): | 197 | if isinstance(e, GLib.GError): |
772 | 198 | if e.code == Gio.IOErrorEnum.NOT_FOUND: | 198 | if e.code == Gio.IOErrorEnum.NOT_FOUND: |
773 | 199 | return {'size': -1} # early exit, no need to retry | 199 | return {'size': -1} # early exit, no need to retry |
774 | 200 | 200 | ||
775 | === modified file 'duplicity/backends/imapbackend.py' | |||
776 | --- duplicity/backends/imapbackend.py 2013-12-27 06:39:00 +0000 | |||
777 | +++ duplicity/backends/imapbackend.py 2014-04-17 22:26:47 +0000 | |||
778 | @@ -54,7 +54,7 @@ | |||
779 | 54 | 54 | ||
780 | 55 | # Set the password | 55 | # Set the password |
781 | 56 | if ( not parsed_url.password ): | 56 | if ( not parsed_url.password ): |
783 | 57 | if os.environ.has_key('IMAP_PASSWORD'): | 57 | if 'IMAP_PASSWORD' in os.environ: |
784 | 58 | password = os.environ.get('IMAP_PASSWORD') | 58 | password = os.environ.get('IMAP_PASSWORD') |
785 | 59 | else: | 59 | else: |
786 | 60 | password = getpass.getpass("Enter account password: ") | 60 | password = getpass.getpass("Enter account password: ") |
787 | 61 | 61 | ||
788 | === modified file 'duplicity/backends/localbackend.py' | |||
789 | --- duplicity/backends/localbackend.py 2013-12-27 06:39:00 +0000 | |||
790 | +++ duplicity/backends/localbackend.py 2014-04-17 22:26:47 +0000 | |||
791 | @@ -74,13 +74,13 @@ | |||
792 | 74 | source_path.rename(target_path) | 74 | source_path.rename(target_path) |
793 | 75 | except OSError: | 75 | except OSError: |
794 | 76 | pass | 76 | pass |
796 | 77 | except Exception, e: | 77 | except Exception as e: |
797 | 78 | self.handle_error(e, 'put', source_path.name, target_path.name) | 78 | self.handle_error(e, 'put', source_path.name, target_path.name) |
798 | 79 | else: | 79 | else: |
799 | 80 | return | 80 | return |
800 | 81 | try: | 81 | try: |
801 | 82 | target_path.writefileobj(source_path.open("rb")) | 82 | target_path.writefileobj(source_path.open("rb")) |
803 | 83 | except Exception, e: | 83 | except Exception as e: |
804 | 84 | self.handle_error(e, 'put', source_path.name, target_path.name) | 84 | self.handle_error(e, 'put', source_path.name, target_path.name) |
805 | 85 | 85 | ||
806 | 86 | """If we get here, renaming failed previously""" | 86 | """If we get here, renaming failed previously""" |
807 | @@ -93,7 +93,7 @@ | |||
808 | 93 | source_path = self.remote_pathdir.append(filename) | 93 | source_path = self.remote_pathdir.append(filename) |
809 | 94 | try: | 94 | try: |
810 | 95 | local_path.writefileobj(source_path.open("rb")) | 95 | local_path.writefileobj(source_path.open("rb")) |
812 | 96 | except Exception, e: | 96 | except Exception as e: |
813 | 97 | self.handle_error(e, 'get', source_path.name, local_path.name) | 97 | self.handle_error(e, 'get', source_path.name, local_path.name) |
814 | 98 | 98 | ||
815 | 99 | def _list(self): | 99 | def _list(self): |
816 | @@ -104,7 +104,7 @@ | |||
817 | 104 | pass | 104 | pass |
818 | 105 | try: | 105 | try: |
819 | 106 | return self.remote_pathdir.listdir() | 106 | return self.remote_pathdir.listdir() |
821 | 107 | except Exception, e: | 107 | except Exception as e: |
822 | 108 | self.handle_error(e, 'list', self.remote_pathdir.name) | 108 | self.handle_error(e, 'list', self.remote_pathdir.name) |
823 | 109 | 109 | ||
824 | 110 | def delete(self, filename_list): | 110 | def delete(self, filename_list): |
825 | @@ -113,7 +113,7 @@ | |||
826 | 113 | for filename in filename_list: | 113 | for filename in filename_list: |
827 | 114 | try: | 114 | try: |
828 | 115 | self.remote_pathdir.append(filename).delete() | 115 | self.remote_pathdir.append(filename).delete() |
830 | 116 | except Exception, e: | 116 | except Exception as e: |
831 | 117 | self.handle_error(e, 'delete', self.remote_pathdir.append(filename).name) | 117 | self.handle_error(e, 'delete', self.remote_pathdir.append(filename).name) |
832 | 118 | 118 | ||
833 | 119 | def _query_file_info(self, filename): | 119 | def _query_file_info(self, filename): |
834 | @@ -125,7 +125,7 @@ | |||
835 | 125 | target_file.setdata() | 125 | target_file.setdata() |
836 | 126 | size = target_file.getsize() | 126 | size = target_file.getsize() |
837 | 127 | return {'size': size} | 127 | return {'size': size} |
839 | 128 | except Exception, e: | 128 | except Exception as e: |
840 | 129 | self.handle_error(e, 'query', target_file.name) | 129 | self.handle_error(e, 'query', target_file.name) |
841 | 130 | return {'size': None} | 130 | return {'size': None} |
842 | 131 | 131 | ||
843 | 132 | 132 | ||
844 | === modified file 'duplicity/backends/megabackend.py' | |||
845 | --- duplicity/backends/megabackend.py 2013-12-27 06:39:00 +0000 | |||
846 | +++ duplicity/backends/megabackend.py 2014-04-17 22:26:47 +0000 | |||
847 | @@ -80,7 +80,7 @@ | |||
848 | 80 | 80 | ||
849 | 81 | self.client.upload(source_path.get_canonical(), self.folder, dest_filename=remote_filename) | 81 | self.client.upload(source_path.get_canonical(), self.folder, dest_filename=remote_filename) |
850 | 82 | 82 | ||
852 | 83 | except Exception, e: | 83 | except Exception as e: |
853 | 84 | self.__handle_error("Failed to upload file '%s' to remote folder '%s': %s" | 84 | self.__handle_error("Failed to upload file '%s' to remote folder '%s': %s" |
854 | 85 | % (source_path.get_canonical(), self.__get_node_name(self.folder), str(e)), raise_errors) | 85 | % (source_path.get_canonical(), self.__get_node_name(self.folder), str(e)), raise_errors) |
855 | 86 | 86 | ||
856 | @@ -100,7 +100,7 @@ | |||
857 | 100 | else: | 100 | else: |
858 | 101 | self.__handle_error("Failed to find file '%s' in remote folder '%s'" | 101 | self.__handle_error("Failed to find file '%s' in remote folder '%s'" |
859 | 102 | % (remote_filename, self.__get_node_name(self.folder)), raise_errors) | 102 | % (remote_filename, self.__get_node_name(self.folder)), raise_errors) |
861 | 103 | except Exception, e: | 103 | except Exception as e: |
862 | 104 | self.__handle_error("Failed to download file '%s' in remote folder '%s': %s" | 104 | self.__handle_error("Failed to download file '%s' in remote folder '%s': %s" |
863 | 105 | % (remote_filename, self.__get_node_name(self.folder), str(e)), raise_errors) | 105 | % (remote_filename, self.__get_node_name(self.folder), str(e)), raise_errors) |
864 | 106 | 106 | ||
865 | @@ -110,7 +110,7 @@ | |||
866 | 110 | try: | 110 | try: |
867 | 111 | entries = self.client.get_files_in_node(self.folder) | 111 | entries = self.client.get_files_in_node(self.folder) |
868 | 112 | return [ self.client.get_name_from_file({entry:entries[entry]}) for entry in entries] | 112 | return [ self.client.get_name_from_file({entry:entries[entry]}) for entry in entries] |
870 | 113 | except Exception, e: | 113 | except Exception as e: |
871 | 114 | self.__handle_error("Failed to fetch list of files in remote folder '%s': %s" | 114 | self.__handle_error("Failed to fetch list of files in remote folder '%s': %s" |
872 | 115 | % (self.__get_node_name(self.folder), str(e)), raise_errors) | 115 | % (self.__get_node_name(self.folder), str(e)), raise_errors) |
873 | 116 | 116 | ||
874 | @@ -129,7 +129,7 @@ | |||
875 | 129 | else: | 129 | else: |
876 | 130 | log.Warn("Failed to fetch file '%s' in remote folder '%s'" | 130 | log.Warn("Failed to fetch file '%s' in remote folder '%s'" |
877 | 131 | % (filename, self.__get_node_name(self.folder))) | 131 | % (filename, self.__get_node_name(self.folder))) |
879 | 132 | except Exception, e: | 132 | except Exception as e: |
880 | 133 | self.__handle_error("Failed to remove file '%s' in remote folder '%s': %s" | 133 | self.__handle_error("Failed to remove file '%s' in remote folder '%s': %s" |
881 | 134 | % (filename, self.__get_node_name(self.folder), str(e)), raise_errors) | 134 | % (filename, self.__get_node_name(self.folder), str(e)), raise_errors) |
882 | 135 | 135 | ||
883 | @@ -147,7 +147,7 @@ | |||
884 | 147 | def __authorize(self, email, password): | 147 | def __authorize(self, email, password): |
885 | 148 | try: | 148 | try: |
886 | 149 | self.client.login(email, password) | 149 | self.client.login(email, password) |
888 | 150 | except Exception, e: | 150 | except Exception as e: |
889 | 151 | self.__handle_error('Error while authenticating client: %s.' % str(e)) | 151 | self.__handle_error('Error while authenticating client: %s.' % str(e)) |
890 | 152 | 152 | ||
891 | 153 | def __filter_entries(self, entries, parent_id=None, title=None, type=None): | 153 | def __filter_entries(self, entries, parent_id=None, title=None, type=None): |
892 | @@ -169,7 +169,7 @@ | |||
893 | 169 | result.update({k:v}) | 169 | result.update({k:v}) |
894 | 170 | 170 | ||
895 | 171 | return result | 171 | return result |
897 | 172 | except Exception, e: | 172 | except Exception as e: |
898 | 173 | self.__handle_error('Error while fetching remote entries: %s.' % str(e)) | 173 | self.__handle_error('Error while fetching remote entries: %s.' % str(e)) |
899 | 174 | 174 | ||
900 | 175 | duplicity.backend.register_backend('mega', MegaBackend) | 175 | duplicity.backend.register_backend('mega', MegaBackend) |
901 | 176 | 176 | ||
902 | === modified file 'duplicity/backends/sshbackend.py' | |||
903 | --- duplicity/backends/sshbackend.py 2012-05-16 11:03:20 +0000 | |||
904 | +++ duplicity/backends/sshbackend.py 2014-04-17 22:26:47 +0000 | |||
905 | @@ -26,11 +26,11 @@ | |||
906 | 26 | 26 | ||
907 | 27 | if (globals.ssh_backend and | 27 | if (globals.ssh_backend and |
908 | 28 | globals.ssh_backend.lower().strip() == 'pexpect'): | 28 | globals.ssh_backend.lower().strip() == 'pexpect'): |
910 | 29 | import _ssh_pexpect | 29 | from . import _ssh_pexpect |
911 | 30 | else: | 30 | else: |
912 | 31 | # take user by the hand to prevent typo driven bug reports | 31 | # take user by the hand to prevent typo driven bug reports |
913 | 32 | if globals.ssh_backend.lower().strip() != 'paramiko': | 32 | if globals.ssh_backend.lower().strip() != 'paramiko': |
914 | 33 | log.Warn(_("Warning: Selected ssh backend '%s' is neither 'paramiko nor 'pexpect'. Will use default paramiko instead.") % globals.ssh_backend) | 33 | log.Warn(_("Warning: Selected ssh backend '%s' is neither 'paramiko nor 'pexpect'. Will use default paramiko instead.") % globals.ssh_backend) |
915 | 34 | warn_option("--scp-command", globals.scp_command) | 34 | warn_option("--scp-command", globals.scp_command) |
916 | 35 | warn_option("--sftp-command", globals.sftp_command) | 35 | warn_option("--sftp-command", globals.sftp_command) |
918 | 36 | import _ssh_paramiko | 36 | from . import _ssh_paramiko |
919 | 37 | 37 | ||
920 | === modified file 'duplicity/backends/swiftbackend.py' | |||
921 | --- duplicity/backends/swiftbackend.py 2013-12-27 06:39:00 +0000 | |||
922 | +++ duplicity/backends/swiftbackend.py 2014-04-17 22:26:47 +0000 | |||
923 | @@ -44,20 +44,20 @@ | |||
924 | 44 | conn_kwargs = {} | 44 | conn_kwargs = {} |
925 | 45 | 45 | ||
926 | 46 | # if the user has already authenticated | 46 | # if the user has already authenticated |
928 | 47 | if os.environ.has_key('SWIFT_PREAUTHURL') and os.environ.has_key('SWIFT_PREAUTHTOKEN'): | 47 | if 'SWIFT_PREAUTHURL' in os.environ and 'SWIFT_PREAUTHTOKEN' in os.environ: |
929 | 48 | conn_kwargs['preauthurl'] = os.environ['SWIFT_PREAUTHURL'] | 48 | conn_kwargs['preauthurl'] = os.environ['SWIFT_PREAUTHURL'] |
930 | 49 | conn_kwargs['preauthtoken'] = os.environ['SWIFT_PREAUTHTOKEN'] | 49 | conn_kwargs['preauthtoken'] = os.environ['SWIFT_PREAUTHTOKEN'] |
931 | 50 | 50 | ||
932 | 51 | else: | 51 | else: |
934 | 52 | if not os.environ.has_key('SWIFT_USERNAME'): | 52 | if 'SWIFT_USERNAME' not in os.environ: |
935 | 53 | raise BackendException('SWIFT_USERNAME environment variable ' | 53 | raise BackendException('SWIFT_USERNAME environment variable ' |
936 | 54 | 'not set.') | 54 | 'not set.') |
937 | 55 | 55 | ||
939 | 56 | if not os.environ.has_key('SWIFT_PASSWORD'): | 56 | if 'SWIFT_PASSWORD' not in os.environ: |
940 | 57 | raise BackendException('SWIFT_PASSWORD environment variable ' | 57 | raise BackendException('SWIFT_PASSWORD environment variable ' |
941 | 58 | 'not set.') | 58 | 'not set.') |
942 | 59 | 59 | ||
944 | 60 | if not os.environ.has_key('SWIFT_AUTHURL'): | 60 | if 'SWIFT_AUTHURL' not in os.environ: |
945 | 61 | raise BackendException('SWIFT_AUTHURL environment variable ' | 61 | raise BackendException('SWIFT_AUTHURL environment variable ' |
946 | 62 | 'not set.') | 62 | 'not set.') |
947 | 63 | 63 | ||
948 | @@ -65,11 +65,11 @@ | |||
949 | 65 | conn_kwargs['key'] = os.environ['SWIFT_PASSWORD'] | 65 | conn_kwargs['key'] = os.environ['SWIFT_PASSWORD'] |
950 | 66 | conn_kwargs['authurl'] = os.environ['SWIFT_AUTHURL'] | 66 | conn_kwargs['authurl'] = os.environ['SWIFT_AUTHURL'] |
951 | 67 | 67 | ||
953 | 68 | if os.environ.has_key('SWIFT_AUTHVERSION'): | 68 | if 'SWIFT_AUTHVERSION' in os.environ: |
954 | 69 | conn_kwargs['auth_version'] = os.environ['SWIFT_AUTHVERSION'] | 69 | conn_kwargs['auth_version'] = os.environ['SWIFT_AUTHVERSION'] |
955 | 70 | else: | 70 | else: |
956 | 71 | conn_kwargs['auth_version'] = '1' | 71 | conn_kwargs['auth_version'] = '1' |
958 | 72 | if os.environ.has_key('SWIFT_TENANTNAME'): | 72 | if 'SWIFT_TENANTNAME' in os.environ: |
959 | 73 | conn_kwargs['tenant_name'] = os.environ['SWIFT_TENANTNAME'] | 73 | conn_kwargs['tenant_name'] = os.environ['SWIFT_TENANTNAME'] |
960 | 74 | 74 | ||
961 | 75 | self.container = parsed_url.path.lstrip('/') | 75 | self.container = parsed_url.path.lstrip('/') |
962 | @@ -77,7 +77,7 @@ | |||
963 | 77 | try: | 77 | try: |
964 | 78 | self.conn = Connection(**conn_kwargs) | 78 | self.conn = Connection(**conn_kwargs) |
965 | 79 | self.conn.put_container(self.container) | 79 | self.conn.put_container(self.container) |
967 | 80 | except Exception, e: | 80 | except Exception as e: |
968 | 81 | log.FatalError("Connection failed: %s %s" | 81 | log.FatalError("Connection failed: %s %s" |
969 | 82 | % (e.__class__.__name__, str(e)), | 82 | % (e.__class__.__name__, str(e)), |
970 | 83 | log.ErrorCode.connection_failed) | 83 | log.ErrorCode.connection_failed) |
971 | @@ -93,10 +93,10 @@ | |||
972 | 93 | remote_filename, | 93 | remote_filename, |
973 | 94 | file(source_path.name)) | 94 | file(source_path.name)) |
974 | 95 | return | 95 | return |
976 | 96 | except self.resp_exc, error: | 96 | except self.resp_exc as error: |
977 | 97 | log.Warn("Upload of '%s' failed (attempt %d): Swift server returned: %s %s" | 97 | log.Warn("Upload of '%s' failed (attempt %d): Swift server returned: %s %s" |
978 | 98 | % (remote_filename, n, error.http_status, error.message)) | 98 | % (remote_filename, n, error.http_status, error.message)) |
980 | 99 | except Exception, e: | 99 | except Exception as e: |
981 | 100 | log.Warn("Upload of '%s' failed (attempt %s): %s: %s" | 100 | log.Warn("Upload of '%s' failed (attempt %s): %s: %s" |
982 | 101 | % (remote_filename, n, e.__class__.__name__, str(e))) | 101 | % (remote_filename, n, e.__class__.__name__, str(e))) |
983 | 102 | log.Debug("Backtrace of previous error: %s" | 102 | log.Debug("Backtrace of previous error: %s" |
984 | @@ -117,10 +117,10 @@ | |||
985 | 117 | f.write(chunk) | 117 | f.write(chunk) |
986 | 118 | local_path.setdata() | 118 | local_path.setdata() |
987 | 119 | return | 119 | return |
989 | 120 | except self.resp_exc, resperr: | 120 | except self.resp_exc as resperr: |
990 | 121 | log.Warn("Download of '%s' failed (attempt %s): Swift server returned: %s %s" | 121 | log.Warn("Download of '%s' failed (attempt %s): Swift server returned: %s %s" |
991 | 122 | % (remote_filename, n, resperr.http_status, resperr.message)) | 122 | % (remote_filename, n, resperr.http_status, resperr.message)) |
993 | 123 | except Exception, e: | 123 | except Exception as e: |
994 | 124 | log.Warn("Download of '%s' failed (attempt %s): %s: %s" | 124 | log.Warn("Download of '%s' failed (attempt %s): %s: %s" |
995 | 125 | % (remote_filename, n, e.__class__.__name__, str(e))) | 125 | % (remote_filename, n, e.__class__.__name__, str(e))) |
996 | 126 | log.Debug("Backtrace of previous error: %s" | 126 | log.Debug("Backtrace of previous error: %s" |
997 | @@ -139,10 +139,10 @@ | |||
998 | 139 | # to make multiple requests to get them all. | 139 | # to make multiple requests to get them all. |
999 | 140 | headers, objs = self.conn.get_container(self.container) | 140 | headers, objs = self.conn.get_container(self.container) |
1000 | 141 | return [ o['name'] for o in objs ] | 141 | return [ o['name'] for o in objs ] |
1002 | 142 | except self.resp_exc, resperr: | 142 | except self.resp_exc as resperr: |
1003 | 143 | log.Warn("Listing of '%s' failed (attempt %s): Swift server returned: %s %s" | 143 | log.Warn("Listing of '%s' failed (attempt %s): Swift server returned: %s %s" |
1004 | 144 | % (self.container, n, resperr.http_status, resperr.message)) | 144 | % (self.container, n, resperr.http_status, resperr.message)) |
1006 | 145 | except Exception, e: | 145 | except Exception as e: |
1007 | 146 | log.Warn("Listing of '%s' failed (attempt %s): %s: %s" | 146 | log.Warn("Listing of '%s' failed (attempt %s): %s: %s" |
1008 | 147 | % (self.container, n, e.__class__.__name__, str(e))) | 147 | % (self.container, n, e.__class__.__name__, str(e))) |
1009 | 148 | log.Debug("Backtrace of previous error: %s" | 148 | log.Debug("Backtrace of previous error: %s" |
1010 | @@ -159,14 +159,14 @@ | |||
1011 | 159 | try: | 159 | try: |
1012 | 160 | self.conn.delete_object(self.container, remote_filename) | 160 | self.conn.delete_object(self.container, remote_filename) |
1013 | 161 | return | 161 | return |
1015 | 162 | except self.resp_exc, resperr: | 162 | except self.resp_exc as resperr: |
1016 | 163 | if n > 1 and resperr.http_status == 404: | 163 | if n > 1 and resperr.http_status == 404: |
1017 | 164 | # We failed on a timeout, but delete succeeded on the server | 164 | # We failed on a timeout, but delete succeeded on the server |
1018 | 165 | log.Warn("Delete of '%s' missing after retry - must have succeded earlier" % remote_filename ) | 165 | log.Warn("Delete of '%s' missing after retry - must have succeded earlier" % remote_filename ) |
1019 | 166 | return | 166 | return |
1020 | 167 | log.Warn("Delete of '%s' failed (attempt %s): Swift server returned: %s %s" | 167 | log.Warn("Delete of '%s' failed (attempt %s): Swift server returned: %s %s" |
1021 | 168 | % (remote_filename, n, resperr.http_status, resperr.message)) | 168 | % (remote_filename, n, resperr.http_status, resperr.message)) |
1023 | 169 | except Exception, e: | 169 | except Exception as e: |
1024 | 170 | log.Warn("Delete of '%s' failed (attempt %s): %s: %s" | 170 | log.Warn("Delete of '%s' failed (attempt %s): %s: %s" |
1025 | 171 | % (remote_filename, n, e.__class__.__name__, str(e))) | 171 | % (remote_filename, n, e.__class__.__name__, str(e))) |
1026 | 172 | log.Debug("Backtrace of previous error: %s" | 172 | log.Debug("Backtrace of previous error: %s" |
1027 | @@ -186,10 +186,10 @@ | |||
1028 | 186 | def _query_file_info(self, filename, raise_errors=False): | 186 | def _query_file_info(self, filename, raise_errors=False): |
1029 | 187 | try: | 187 | try: |
1030 | 188 | sobject = self.conn.head_object(self.container, filename) | 188 | sobject = self.conn.head_object(self.container, filename) |
1032 | 189 | return {'size': long(sobject['content-length'])} | 189 | return {'size': int(sobject['content-length'])} |
1033 | 190 | except self.resp_exc: | 190 | except self.resp_exc: |
1034 | 191 | return {'size': -1} | 191 | return {'size': -1} |
1036 | 192 | except Exception, e: | 192 | except Exception as e: |
1037 | 193 | log.Warn("Error querying '%s/%s': %s" | 193 | log.Warn("Error querying '%s/%s': %s" |
1038 | 194 | "" % (self.container, | 194 | "" % (self.container, |
1039 | 195 | filename, | 195 | filename, |
1040 | 196 | 196 | ||
1041 | === modified file 'duplicity/backends/webdavbackend.py' | |||
1042 | --- duplicity/backends/webdavbackend.py 2014-04-16 20:45:09 +0000 | |||
1043 | +++ duplicity/backends/webdavbackend.py 2014-04-17 22:26:47 +0000 | |||
1044 | @@ -96,7 +96,7 @@ | |||
1045 | 96 | def request(self, *args, **kwargs): | 96 | def request(self, *args, **kwargs): |
1046 | 97 | try: | 97 | try: |
1047 | 98 | return httplib.HTTPSConnection.request(self, *args, **kwargs) | 98 | return httplib.HTTPSConnection.request(self, *args, **kwargs) |
1049 | 99 | except ssl.SSLError, e: | 99 | except ssl.SSLError as e: |
1050 | 100 | # encapsulate ssl errors | 100 | # encapsulate ssl errors |
1051 | 101 | raise BackendException("SSL failed: %s" % str(e),log.ErrorCode.backend_error) | 101 | raise BackendException("SSL failed: %s" % str(e),log.ErrorCode.backend_error) |
1052 | 102 | 102 | ||
1053 | @@ -293,7 +293,7 @@ | |||
1054 | 293 | if filename: | 293 | if filename: |
1055 | 294 | result.append(filename) | 294 | result.append(filename) |
1056 | 295 | return result | 295 | return result |
1058 | 296 | except Exception, e: | 296 | except Exception as e: |
1059 | 297 | raise e | 297 | raise e |
1060 | 298 | finally: | 298 | finally: |
1061 | 299 | if response: response.close() | 299 | if response: response.close() |
1062 | @@ -383,7 +383,7 @@ | |||
1063 | 383 | reason = response.reason | 383 | reason = response.reason |
1064 | 384 | response.close() | 384 | response.close() |
1065 | 385 | raise BackendException("Bad status code %s reason %s." % (status,reason)) | 385 | raise BackendException("Bad status code %s reason %s." % (status,reason)) |
1067 | 386 | except Exception, e: | 386 | except Exception as e: |
1068 | 387 | raise e | 387 | raise e |
1069 | 388 | finally: | 388 | finally: |
1070 | 389 | if response: response.close() | 389 | if response: response.close() |
1071 | @@ -407,7 +407,7 @@ | |||
1072 | 407 | reason = response.reason | 407 | reason = response.reason |
1073 | 408 | response.close() | 408 | response.close() |
1074 | 409 | raise BackendException("Bad status code %s reason %s." % (status,reason)) | 409 | raise BackendException("Bad status code %s reason %s." % (status,reason)) |
1076 | 410 | except Exception, e: | 410 | except Exception as e: |
1077 | 411 | raise e | 411 | raise e |
1078 | 412 | finally: | 412 | finally: |
1079 | 413 | if response: response.close() | 413 | if response: response.close() |
1080 | @@ -429,7 +429,7 @@ | |||
1081 | 429 | reason = response.reason | 429 | reason = response.reason |
1082 | 430 | response.close() | 430 | response.close() |
1083 | 431 | raise BackendException("Bad status code %s reason %s." % (status,reason)) | 431 | raise BackendException("Bad status code %s reason %s." % (status,reason)) |
1085 | 432 | except Exception, e: | 432 | except Exception as e: |
1086 | 433 | raise e | 433 | raise e |
1087 | 434 | finally: | 434 | finally: |
1088 | 435 | if response: response.close() | 435 | if response: response.close() |
1089 | 436 | 436 | ||
1090 | === modified file 'duplicity/cached_ops.py' | |||
1091 | --- duplicity/cached_ops.py 2012-11-09 03:21:40 +0000 | |||
1092 | +++ duplicity/cached_ops.py 2014-04-17 22:26:47 +0000 | |||
1093 | @@ -34,7 +34,7 @@ | |||
1094 | 34 | def __call__(self, *args): | 34 | def __call__(self, *args): |
1095 | 35 | try: | 35 | try: |
1096 | 36 | return self.cache[args] | 36 | return self.cache[args] |
1098 | 37 | except (KeyError, TypeError), e: | 37 | except (KeyError, TypeError) as e: |
1099 | 38 | result = self.f(*args) | 38 | result = self.f(*args) |
1100 | 39 | if not isinstance(e, TypeError): | 39 | if not isinstance(e, TypeError): |
1101 | 40 | # TypeError most likely means that args is not hashable | 40 | # TypeError most likely means that args is not hashable |
1102 | 41 | 41 | ||
1103 | === modified file 'duplicity/collections.py' | |||
1104 | --- duplicity/collections.py 2014-01-17 16:44:46 +0000 | |||
1105 | +++ duplicity/collections.py 2014-04-17 22:26:47 +0000 | |||
1106 | @@ -96,7 +96,7 @@ | |||
1107 | 96 | self.set_manifest(filename) | 96 | self.set_manifest(filename) |
1108 | 97 | else: | 97 | else: |
1109 | 98 | assert pr.volume_number is not None | 98 | assert pr.volume_number is not None |
1111 | 99 | assert not self.volume_name_dict.has_key(pr.volume_number), \ | 99 | assert pr.volume_number not in self.volume_name_dict, \ |
1112 | 100 | (self.volume_name_dict, filename) | 100 | (self.volume_name_dict, filename) |
1113 | 101 | self.volume_name_dict[pr.volume_number] = filename | 101 | self.volume_name_dict[pr.volume_number] = filename |
1114 | 102 | 102 | ||
1115 | @@ -222,7 +222,7 @@ | |||
1116 | 222 | # public key w/o secret key | 222 | # public key w/o secret key |
1117 | 223 | try: | 223 | try: |
1118 | 224 | manifest_buffer = self.backend.get_data(self.remote_manifest_name) | 224 | manifest_buffer = self.backend.get_data(self.remote_manifest_name) |
1120 | 225 | except GPGError, message: | 225 | except GPGError as message: |
1121 | 226 | #TODO: We check for gpg v1 and v2 messages, should be an error code. | 226 | #TODO: We check for gpg v1 and v2 messages, should be an error code. |
1122 | 227 | if ("secret key not available" in message.args[0] or | 227 | if ("secret key not available" in message.args[0] or |
1123 | 228 | "No secret key" in message.args[0]): | 228 | "No secret key" in message.args[0]): |
1124 | @@ -916,7 +916,7 @@ | |||
1125 | 916 | # Build dictionary from end_times to lists of corresponding chains | 916 | # Build dictionary from end_times to lists of corresponding chains |
1126 | 917 | endtime_chain_dict = {} | 917 | endtime_chain_dict = {} |
1127 | 918 | for chain in chain_list: | 918 | for chain in chain_list: |
1129 | 919 | if endtime_chain_dict.has_key(chain.end_time): | 919 | if chain.end_time in endtime_chain_dict: |
1130 | 920 | endtime_chain_dict[chain.end_time].append(chain) | 920 | endtime_chain_dict[chain.end_time].append(chain) |
1131 | 921 | else: | 921 | else: |
1132 | 922 | endtime_chain_dict[chain.end_time] = [chain] | 922 | endtime_chain_dict[chain.end_time] = [chain] |
1133 | 923 | 923 | ||
1134 | === modified file 'duplicity/commandline.py' | |||
1135 | --- duplicity/commandline.py 2014-04-17 17:45:37 +0000 | |||
1136 | +++ duplicity/commandline.py 2014-04-17 22:26:47 +0000 | |||
1137 | @@ -109,7 +109,7 @@ | |||
1138 | 109 | def check_time(option, opt, value): | 109 | def check_time(option, opt, value): |
1139 | 110 | try: | 110 | try: |
1140 | 111 | return dup_time.genstrtotime(value) | 111 | return dup_time.genstrtotime(value) |
1142 | 112 | except dup_time.TimeException, e: | 112 | except dup_time.TimeException as e: |
1143 | 113 | raise optparse.OptionValueError(str(e)) | 113 | raise optparse.OptionValueError(str(e)) |
1144 | 114 | 114 | ||
1145 | 115 | def check_verbosity(option, opt, value): | 115 | def check_verbosity(option, opt, value): |
1146 | 116 | 116 | ||
1147 | === modified file 'duplicity/diffdir.py' | |||
1148 | --- duplicity/diffdir.py 2013-12-27 06:39:00 +0000 | |||
1149 | +++ duplicity/diffdir.py 2014-04-17 22:26:47 +0000 | |||
1150 | @@ -389,7 +389,7 @@ | |||
1151 | 389 | def read(self, length = -1): | 389 | def read(self, length = -1): |
1152 | 390 | try: | 390 | try: |
1153 | 391 | buf = self.infile.read(length) | 391 | buf = self.infile.read(length) |
1155 | 392 | except IOError, ex: | 392 | except IOError as ex: |
1156 | 393 | buf = "" | 393 | buf = "" |
1157 | 394 | log.Warn(_("Error %s getting delta for %s") % (str(ex), util.ufn(self.infile.name))) | 394 | log.Warn(_("Error %s getting delta for %s") % (str(ex), util.ufn(self.infile.name))) |
1158 | 395 | if stats: | 395 | if stats: |
1159 | @@ -461,7 +461,7 @@ | |||
1160 | 461 | TarBlockIter initializer | 461 | TarBlockIter initializer |
1161 | 462 | """ | 462 | """ |
1162 | 463 | self.input_iter = input_iter | 463 | self.input_iter = input_iter |
1164 | 464 | self.offset = 0l # total length of data read | 464 | self.offset = 0 # total length of data read |
1165 | 465 | self.process_waiting = False # process_continued has more blocks | 465 | self.process_waiting = False # process_continued has more blocks |
1166 | 466 | self.process_next_vol_number = None # next volume number to write in multivol | 466 | self.process_next_vol_number = None # next volume number to write in multivol |
1167 | 467 | self.previous_index = None # holds index of last block returned | 467 | self.previous_index = None # holds index of last block returned |
1168 | @@ -564,7 +564,7 @@ | |||
1169 | 564 | Return closing string for tarfile, reset offset | 564 | Return closing string for tarfile, reset offset |
1170 | 565 | """ | 565 | """ |
1171 | 566 | blocks, remainder = divmod(self.offset, tarfile.RECORDSIZE) #@UnusedVariable | 566 | blocks, remainder = divmod(self.offset, tarfile.RECORDSIZE) #@UnusedVariable |
1173 | 567 | self.offset = 0l | 567 | self.offset = 0 |
1174 | 568 | return '\0' * (tarfile.RECORDSIZE - remainder) # remainder can be 0 | 568 | return '\0' * (tarfile.RECORDSIZE - remainder) # remainder can be 0 |
1175 | 569 | 569 | ||
1176 | 570 | def __iter__(self): | 570 | def __iter__(self): |
1177 | @@ -736,5 +736,5 @@ | |||
1178 | 736 | return 512 # set minimum of 512 bytes | 736 | return 512 # set minimum of 512 bytes |
1179 | 737 | else: | 737 | else: |
1180 | 738 | # Split file into about 2000 pieces, rounding to 512 | 738 | # Split file into about 2000 pieces, rounding to 512 |
1182 | 739 | file_blocksize = long((file_len / (2000 * 512)) * 512) | 739 | file_blocksize = int((file_len / (2000 * 512)) * 512) |
1183 | 740 | return min(file_blocksize, globals.max_blocksize) | 740 | return min(file_blocksize, globals.max_blocksize) |
1184 | 741 | 741 | ||
1185 | === modified file 'duplicity/dup_temp.py' | |||
1186 | --- duplicity/dup_temp.py 2013-12-27 06:39:00 +0000 | |||
1187 | +++ duplicity/dup_temp.py 2014-04-17 22:26:47 +0000 | |||
1188 | @@ -179,9 +179,9 @@ | |||
1189 | 179 | tgt = self.dirpath.append(self.remname) | 179 | tgt = self.dirpath.append(self.remname) |
1190 | 180 | src_iter = SrcIter(src) | 180 | src_iter = SrcIter(src) |
1191 | 181 | if pr.compressed: | 181 | if pr.compressed: |
1193 | 182 | gpg.GzipWriteFile(src_iter, tgt.name, size = sys.maxint) | 182 | gpg.GzipWriteFile(src_iter, tgt.name, size = sys.maxsize) |
1194 | 183 | elif pr.encrypted: | 183 | elif pr.encrypted: |
1196 | 184 | gpg.GPGWriteFile(src_iter, tgt.name, globals.gpg_profile, size = sys.maxint) | 184 | gpg.GPGWriteFile(src_iter, tgt.name, globals.gpg_profile, size = sys.maxsize) |
1197 | 185 | else: | 185 | else: |
1198 | 186 | os.system("cp -p \"%s\" \"%s\"" % (src.name, tgt.name)) | 186 | os.system("cp -p \"%s\" \"%s\"" % (src.name, tgt.name)) |
1199 | 187 | globals.backend.move(tgt) #@UndefinedVariable | 187 | globals.backend.move(tgt) #@UndefinedVariable |
1200 | @@ -195,7 +195,7 @@ | |||
1201 | 195 | src_iter = SrcIter(src) | 195 | src_iter = SrcIter(src) |
1202 | 196 | pr = file_naming.parse(self.permname) | 196 | pr = file_naming.parse(self.permname) |
1203 | 197 | if pr.compressed: | 197 | if pr.compressed: |
1205 | 198 | gpg.GzipWriteFile(src_iter, tgt.name, size = sys.maxint) | 198 | gpg.GzipWriteFile(src_iter, tgt.name, size = sys.maxsize) |
1206 | 199 | os.unlink(src.name) | 199 | os.unlink(src.name) |
1207 | 200 | else: | 200 | else: |
1208 | 201 | os.rename(src.name, tgt.name) | 201 | os.rename(src.name, tgt.name) |
1209 | 202 | 202 | ||
1210 | === modified file 'duplicity/dup_threading.py' | |||
1211 | --- duplicity/dup_threading.py 2010-07-22 19:15:11 +0000 | |||
1212 | +++ duplicity/dup_threading.py 2014-04-17 22:26:47 +0000 | |||
1213 | @@ -192,7 +192,7 @@ | |||
1214 | 192 | if state['error'] is None: | 192 | if state['error'] is None: |
1215 | 193 | return state['value'] | 193 | return state['value'] |
1216 | 194 | else: | 194 | else: |
1218 | 195 | raise state['error'], None, state['trace'] | 195 | raise state['error'].with_traceback(state['trace']) |
1219 | 196 | finally: | 196 | finally: |
1220 | 197 | cv.release() | 197 | cv.release() |
1221 | 198 | 198 | ||
1222 | @@ -207,7 +207,7 @@ | |||
1223 | 207 | cv.release() | 207 | cv.release() |
1224 | 208 | 208 | ||
1225 | 209 | return (True, waiter) | 209 | return (True, waiter) |
1227 | 210 | except Exception, e: | 210 | except Exception as e: |
1228 | 211 | cv.acquire() | 211 | cv.acquire() |
1229 | 212 | state['done'] = True | 212 | state['done'] = True |
1230 | 213 | state['error'] = e | 213 | state['error'] = e |
1231 | 214 | 214 | ||
1232 | === modified file 'duplicity/dup_time.py' | |||
1233 | --- duplicity/dup_time.py 2011-11-03 11:27:45 +0000 | |||
1234 | +++ duplicity/dup_time.py 2014-04-17 22:26:47 +0000 | |||
1235 | @@ -62,7 +62,7 @@ | |||
1236 | 62 | def setcurtime(time_in_secs = None): | 62 | def setcurtime(time_in_secs = None): |
1237 | 63 | """Sets the current time in curtime and curtimestr""" | 63 | """Sets the current time in curtime and curtimestr""" |
1238 | 64 | global curtime, curtimestr | 64 | global curtime, curtimestr |
1240 | 65 | t = time_in_secs or long(time.time()) | 65 | t = time_in_secs or int(time.time()) |
1241 | 66 | assert type(t) in (types.LongType, types.IntType) | 66 | assert type(t) in (types.LongType, types.IntType) |
1242 | 67 | curtime, curtimestr = t, timetostring(t) | 67 | curtime, curtimestr = t, timetostring(t) |
1243 | 68 | 68 | ||
1244 | @@ -137,9 +137,9 @@ | |||
1245 | 137 | # even when we're not in the same timezone that wrote the | 137 | # even when we're not in the same timezone that wrote the |
1246 | 138 | # string | 138 | # string |
1247 | 139 | if len(timestring) == 16: | 139 | if len(timestring) == 16: |
1249 | 140 | return long(utc_in_secs) | 140 | return int(utc_in_secs) |
1250 | 141 | else: | 141 | else: |
1252 | 142 | return long(utc_in_secs + tzdtoseconds(timestring[19:])) | 142 | return int(utc_in_secs + tzdtoseconds(timestring[19:])) |
1253 | 143 | except (TypeError, ValueError, AssertionError): | 143 | except (TypeError, ValueError, AssertionError): |
1254 | 144 | return None | 144 | return None |
1255 | 145 | 145 | ||
1256 | @@ -169,7 +169,7 @@ | |||
1257 | 169 | if seconds == 1: | 169 | if seconds == 1: |
1258 | 170 | partlist.append("1 second") | 170 | partlist.append("1 second") |
1259 | 171 | elif not partlist or seconds > 1: | 171 | elif not partlist or seconds > 1: |
1261 | 172 | if isinstance(seconds, int) or isinstance(seconds, long): | 172 | if isinstance(seconds, (types.LongType, types.IntType)): |
1262 | 173 | partlist.append("%s seconds" % seconds) | 173 | partlist.append("%s seconds" % seconds) |
1263 | 174 | else: | 174 | else: |
1264 | 175 | partlist.append("%.2f seconds" % seconds) | 175 | partlist.append("%.2f seconds" % seconds) |
1265 | 176 | 176 | ||
1266 | === modified file 'duplicity/file_naming.py' | |||
1267 | --- duplicity/file_naming.py 2014-01-31 12:41:00 +0000 | |||
1268 | +++ duplicity/file_naming.py 2014-04-17 22:26:47 +0000 | |||
1269 | @@ -158,7 +158,7 @@ | |||
1270 | 158 | """ | 158 | """ |
1271 | 159 | Convert string s in base 36 to long int | 159 | Convert string s in base 36 to long int |
1272 | 160 | """ | 160 | """ |
1274 | 161 | total = 0L | 161 | total = 0 |
1275 | 162 | for i in range(len(s)): | 162 | for i in range(len(s)): |
1276 | 163 | total *= 36 | 163 | total *= 36 |
1277 | 164 | digit_ord = ord(s[i]) | 164 | digit_ord = ord(s[i]) |
1278 | 165 | 165 | ||
1279 | === modified file 'duplicity/globals.py' | |||
1280 | --- duplicity/globals.py 2014-04-09 09:22:27 +0000 | |||
1281 | +++ duplicity/globals.py 2014-04-17 22:26:47 +0000 | |||
1282 | @@ -87,7 +87,7 @@ | |||
1283 | 87 | gpg_options = '' | 87 | gpg_options = '' |
1284 | 88 | 88 | ||
1285 | 89 | # Maximum file blocksize | 89 | # Maximum file blocksize |
1287 | 90 | max_blocksize = 2048L | 90 | max_blocksize = 2048 |
1288 | 91 | 91 | ||
1289 | 92 | # If true, filelists and directory statistics will be split on | 92 | # If true, filelists and directory statistics will be split on |
1290 | 93 | # nulls instead of newlines. | 93 | # nulls instead of newlines. |
1291 | 94 | 94 | ||
1292 | === modified file 'duplicity/gpg.py' | |||
1293 | --- duplicity/gpg.py 2013-12-27 06:39:00 +0000 | |||
1294 | +++ duplicity/gpg.py 2014-04-17 22:26:47 +0000 | |||
1295 | @@ -215,7 +215,7 @@ | |||
1296 | 215 | msg += unicode(line.strip(), locale.getpreferredencoding(), 'replace') + u"\n" | 215 | msg += unicode(line.strip(), locale.getpreferredencoding(), 'replace') + u"\n" |
1297 | 216 | msg += u"===== End GnuPG log =====\n" | 216 | msg += u"===== End GnuPG log =====\n" |
1298 | 217 | if not (msg.find(u"invalid packet (ctb=14)") > -1): | 217 | if not (msg.find(u"invalid packet (ctb=14)") > -1): |
1300 | 218 | raise GPGError, msg | 218 | raise GPGError(msg) |
1301 | 219 | else: | 219 | else: |
1302 | 220 | return "" | 220 | return "" |
1303 | 221 | 221 | ||
1304 | 222 | 222 | ||
1305 | === modified file 'duplicity/gpginterface.py' | |||
1306 | --- duplicity/gpginterface.py 2013-12-27 06:39:00 +0000 | |||
1307 | +++ duplicity/gpginterface.py 2014-04-17 22:26:47 +0000 | |||
1308 | @@ -353,14 +353,14 @@ | |||
1309 | 353 | if attach_fhs == None: attach_fhs = {} | 353 | if attach_fhs == None: attach_fhs = {} |
1310 | 354 | 354 | ||
1311 | 355 | for std in _stds: | 355 | for std in _stds: |
1313 | 356 | if not attach_fhs.has_key(std) \ | 356 | if std not in attach_fhs \ |
1314 | 357 | and std not in create_fhs: | 357 | and std not in create_fhs: |
1315 | 358 | attach_fhs.setdefault(std, getattr(sys, std)) | 358 | attach_fhs.setdefault(std, getattr(sys, std)) |
1316 | 359 | 359 | ||
1317 | 360 | handle_passphrase = 0 | 360 | handle_passphrase = 0 |
1318 | 361 | 361 | ||
1319 | 362 | if self.passphrase != None \ | 362 | if self.passphrase != None \ |
1321 | 363 | and not attach_fhs.has_key('passphrase') \ | 363 | and 'passphrase' not in attach_fhs \ |
1322 | 364 | and 'passphrase' not in create_fhs: | 364 | and 'passphrase' not in create_fhs: |
1323 | 365 | handle_passphrase = 1 | 365 | handle_passphrase = 1 |
1324 | 366 | create_fhs.append('passphrase') | 366 | create_fhs.append('passphrase') |
1325 | @@ -384,18 +384,18 @@ | |||
1326 | 384 | process = Process() | 384 | process = Process() |
1327 | 385 | 385 | ||
1328 | 386 | for fh_name in create_fhs + attach_fhs.keys(): | 386 | for fh_name in create_fhs + attach_fhs.keys(): |
1331 | 387 | if not _fd_modes.has_key(fh_name): | 387 | if fh_name not in _fd_modes: |
1332 | 388 | raise KeyError, \ | 388 | raise KeyError( |
1333 | 389 | "unrecognized filehandle name '%s'; must be one of %s" \ | 389 | "unrecognized filehandle name '%s'; must be one of %s" \ |
1335 | 390 | % (fh_name, _fd_modes.keys()) | 390 | % (fh_name, _fd_modes.keys())) |
1336 | 391 | 391 | ||
1337 | 392 | for fh_name in create_fhs: | 392 | for fh_name in create_fhs: |
1338 | 393 | # make sure the user doesn't specify a filehandle | 393 | # make sure the user doesn't specify a filehandle |
1339 | 394 | # to be created *and* attached | 394 | # to be created *and* attached |
1342 | 395 | if attach_fhs.has_key(fh_name): | 395 | if fh_name in attach_fhs: |
1343 | 396 | raise ValueError, \ | 396 | raise ValueError( |
1344 | 397 | "cannot have filehandle '%s' in both create_fhs and attach_fhs" \ | 397 | "cannot have filehandle '%s' in both create_fhs and attach_fhs" \ |
1346 | 398 | % fh_name | 398 | % fh_name) |
1347 | 399 | 399 | ||
1348 | 400 | pipe = os.pipe() | 400 | pipe = os.pipe() |
1349 | 401 | # fix by drt@un.bewaff.net noting | 401 | # fix by drt@un.bewaff.net noting |
1350 | @@ -660,7 +660,7 @@ | |||
1351 | 660 | if self.returned == None: | 660 | if self.returned == None: |
1352 | 661 | self.thread.join() | 661 | self.thread.join() |
1353 | 662 | if self.returned != 0: | 662 | if self.returned != 0: |
1355 | 663 | raise IOError, "GnuPG exited non-zero, with code %d" % (self.returned >> 8) | 663 | raise IOError("GnuPG exited non-zero, with code %d" % (self.returned >> 8)) |
1356 | 664 | 664 | ||
1357 | 665 | 665 | ||
1358 | 666 | def threaded_waitpid(process): | 666 | def threaded_waitpid(process): |
1359 | 667 | 667 | ||
1360 | === modified file 'duplicity/librsync.py' | |||
1361 | --- duplicity/librsync.py 2010-11-20 15:39:00 +0000 | |||
1362 | +++ duplicity/librsync.py 2014-04-17 22:26:47 +0000 | |||
1363 | @@ -26,7 +26,7 @@ | |||
1364 | 26 | 26 | ||
1365 | 27 | """ | 27 | """ |
1366 | 28 | 28 | ||
1368 | 29 | import _librsync | 29 | from . import _librsync |
1369 | 30 | import types, array | 30 | import types, array |
1370 | 31 | 31 | ||
1371 | 32 | blocksize = _librsync.RS_JOB_BLOCKSIZE | 32 | blocksize = _librsync.RS_JOB_BLOCKSIZE |
1372 | @@ -90,7 +90,7 @@ | |||
1373 | 90 | self._add_to_inbuf() | 90 | self._add_to_inbuf() |
1374 | 91 | try: | 91 | try: |
1375 | 92 | self.eof, len_inbuf_read, cycle_out = self.maker.cycle(self.inbuf) | 92 | self.eof, len_inbuf_read, cycle_out = self.maker.cycle(self.inbuf) |
1377 | 93 | except _librsync.librsyncError, e: | 93 | except _librsync.librsyncError as e: |
1378 | 94 | raise librsyncError(str(e)) | 94 | raise librsyncError(str(e)) |
1379 | 95 | self.inbuf = self.inbuf[len_inbuf_read:] | 95 | self.inbuf = self.inbuf[len_inbuf_read:] |
1380 | 96 | self.outbuf.fromstring(cycle_out) | 96 | self.outbuf.fromstring(cycle_out) |
1381 | @@ -126,7 +126,7 @@ | |||
1382 | 126 | LikeFile.__init__(self, infile) | 126 | LikeFile.__init__(self, infile) |
1383 | 127 | try: | 127 | try: |
1384 | 128 | self.maker = _librsync.new_sigmaker(blocksize) | 128 | self.maker = _librsync.new_sigmaker(blocksize) |
1386 | 129 | except _librsync.librsyncError, e: | 129 | except _librsync.librsyncError as e: |
1387 | 130 | raise librsyncError(str(e)) | 130 | raise librsyncError(str(e)) |
1388 | 131 | 131 | ||
1389 | 132 | class DeltaFile(LikeFile): | 132 | class DeltaFile(LikeFile): |
1390 | @@ -148,7 +148,7 @@ | |||
1391 | 148 | assert not signature.close() | 148 | assert not signature.close() |
1392 | 149 | try: | 149 | try: |
1393 | 150 | self.maker = _librsync.new_deltamaker(sig_string) | 150 | self.maker = _librsync.new_deltamaker(sig_string) |
1395 | 151 | except _librsync.librsyncError, e: | 151 | except _librsync.librsyncError as e: |
1396 | 152 | raise librsyncError(str(e)) | 152 | raise librsyncError(str(e)) |
1397 | 153 | 153 | ||
1398 | 154 | 154 | ||
1399 | @@ -167,7 +167,7 @@ | |||
1400 | 167 | raise TypeError("basis_file must be a (true) file") | 167 | raise TypeError("basis_file must be a (true) file") |
1401 | 168 | try: | 168 | try: |
1402 | 169 | self.maker = _librsync.new_patchmaker(basis_file) | 169 | self.maker = _librsync.new_patchmaker(basis_file) |
1404 | 170 | except _librsync.librsyncError, e: | 170 | except _librsync.librsyncError as e: |
1405 | 171 | raise librsyncError(str(e)) | 171 | raise librsyncError(str(e)) |
1406 | 172 | 172 | ||
1407 | 173 | 173 | ||
1408 | @@ -182,7 +182,7 @@ | |||
1409 | 182 | """Return new signature instance""" | 182 | """Return new signature instance""" |
1410 | 183 | try: | 183 | try: |
1411 | 184 | self.sig_maker = _librsync.new_sigmaker(blocksize) | 184 | self.sig_maker = _librsync.new_sigmaker(blocksize) |
1413 | 185 | except _librsync.librsyncError, e: | 185 | except _librsync.librsyncError as e: |
1414 | 186 | raise librsyncError(str(e)) | 186 | raise librsyncError(str(e)) |
1415 | 187 | self.gotsig = None | 187 | self.gotsig = None |
1416 | 188 | self.buffer = "" | 188 | self.buffer = "" |
1417 | @@ -201,7 +201,7 @@ | |||
1418 | 201 | """Run self.buffer through sig_maker, add to self.sig_string""" | 201 | """Run self.buffer through sig_maker, add to self.sig_string""" |
1419 | 202 | try: | 202 | try: |
1420 | 203 | eof, len_buf_read, cycle_out = self.sig_maker.cycle(self.buffer) | 203 | eof, len_buf_read, cycle_out = self.sig_maker.cycle(self.buffer) |
1422 | 204 | except _librsync.librsyncError, e: | 204 | except _librsync.librsyncError as e: |
1423 | 205 | raise librsyncError(str(e)) | 205 | raise librsyncError(str(e)) |
1424 | 206 | self.buffer = self.buffer[len_buf_read:] | 206 | self.buffer = self.buffer[len_buf_read:] |
1425 | 207 | self.sigstring_list.append(cycle_out) | 207 | self.sigstring_list.append(cycle_out) |
1426 | 208 | 208 | ||
1427 | === modified file 'duplicity/patchdir.py' | |||
1428 | --- duplicity/patchdir.py 2013-12-27 06:39:00 +0000 | |||
1429 | +++ duplicity/patchdir.py 2014-04-17 22:26:47 +0000 | |||
1430 | @@ -504,7 +504,7 @@ | |||
1431 | 504 | if final_ropath.exists(): | 504 | if final_ropath.exists(): |
1432 | 505 | # otherwise final patch was delete | 505 | # otherwise final patch was delete |
1433 | 506 | yield final_ropath | 506 | yield final_ropath |
1435 | 507 | except Exception, e: | 507 | except Exception as e: |
1436 | 508 | filename = normalized[-1].get_ropath().get_relative_path() | 508 | filename = normalized[-1].get_ropath().get_relative_path() |
1437 | 509 | log.Warn(_("Error '%s' patching %s") % | 509 | log.Warn(_("Error '%s' patching %s") % |
1438 | 510 | (str(e), filename), | 510 | (str(e), filename), |
1439 | 511 | 511 | ||
1440 | === modified file 'duplicity/path.py' | |||
1441 | --- duplicity/path.py 2013-12-27 06:39:00 +0000 | |||
1442 | +++ duplicity/path.py 2014-04-17 22:26:47 +0000 | |||
1443 | @@ -500,7 +500,7 @@ | |||
1444 | 500 | """Refresh stat cache""" | 500 | """Refresh stat cache""" |
1445 | 501 | try: | 501 | try: |
1446 | 502 | self.stat = os.lstat(self.name) | 502 | self.stat = os.lstat(self.name) |
1448 | 503 | except OSError, e: | 503 | except OSError as e: |
1449 | 504 | err_string = errno.errorcode[e[0]] | 504 | err_string = errno.errorcode[e[0]] |
1450 | 505 | if err_string in ["ENOENT", "ENOTDIR", "ELOOP", "ENOTCONN"]: | 505 | if err_string in ["ENOENT", "ENOTDIR", "ELOOP", "ENOTCONN"]: |
1451 | 506 | self.stat, self.type = None, None # file doesn't exist | 506 | self.stat, self.type = None, None # file doesn't exist |
1452 | 507 | 507 | ||
1453 | === modified file 'duplicity/progress.py' | |||
1454 | --- duplicity/progress.py 2013-04-15 12:10:35 +0000 | |||
1455 | +++ duplicity/progress.py 2014-04-17 22:26:47 +0000 | |||
1456 | @@ -264,7 +264,7 @@ | |||
1457 | 264 | projection = 1.0 | 264 | projection = 1.0 |
1458 | 265 | if self.progress_estimation > 0: | 265 | if self.progress_estimation > 0: |
1459 | 266 | projection = (1.0 - self.progress_estimation) / self.progress_estimation | 266 | projection = (1.0 - self.progress_estimation) / self.progress_estimation |
1461 | 267 | self.time_estimation = long(projection * float(self.elapsed_sum.total_seconds())) | 267 | self.time_estimation = int(projection * float(self.elapsed_sum.total_seconds())) |
1462 | 268 | 268 | ||
1463 | 269 | # Apply values only when monotonic, so the estimates look more consistent to the human eye | 269 | # Apply values only when monotonic, so the estimates look more consistent to the human eye |
1464 | 270 | if self.progress_estimation < last_progress_estimation: | 270 | if self.progress_estimation < last_progress_estimation: |
1465 | @@ -299,7 +299,7 @@ | |||
1466 | 299 | volume and for the current volume | 299 | volume and for the current volume |
1467 | 300 | """ | 300 | """ |
1468 | 301 | changing = max(bytecount - self.last_bytecount, 0) | 301 | changing = max(bytecount - self.last_bytecount, 0) |
1470 | 302 | self.total_bytecount += long(changing) # Annotate only changing bytes since last probe | 302 | self.total_bytecount += int(changing) # Annotate only changing bytes since last probe |
1471 | 303 | self.last_bytecount = bytecount | 303 | self.last_bytecount = bytecount |
1472 | 304 | if changing > 0: | 304 | if changing > 0: |
1473 | 305 | self.stall_last_time = datetime.now() | 305 | self.stall_last_time = datetime.now() |
1474 | 306 | 306 | ||
1475 | === modified file 'duplicity/robust.py' | |||
1476 | --- duplicity/robust.py 2013-12-27 06:39:00 +0000 | |||
1477 | +++ duplicity/robust.py 2014-04-17 22:26:47 +0000 | |||
1478 | @@ -39,7 +39,7 @@ | |||
1479 | 39 | # RPathException, Rdiff.RdiffException, | 39 | # RPathException, Rdiff.RdiffException, |
1480 | 40 | # librsync.librsyncError, C.UnknownFileTypeError), exc: | 40 | # librsync.librsyncError, C.UnknownFileTypeError), exc: |
1481 | 41 | # TracebackArchive.add() | 41 | # TracebackArchive.add() |
1483 | 42 | except (IOError, EnvironmentError, librsync.librsyncError, path.PathException), exc: | 42 | except (IOError, EnvironmentError, librsync.librsyncError, path.PathException) as exc: |
1484 | 43 | if (not isinstance(exc, EnvironmentError) or | 43 | if (not isinstance(exc, EnvironmentError) or |
1485 | 44 | ((exc[0] in errno.errorcode) | 44 | ((exc[0] in errno.errorcode) |
1486 | 45 | and errno.errorcode[exc[0]] in | 45 | and errno.errorcode[exc[0]] in |
1487 | 46 | 46 | ||
1488 | === modified file 'duplicity/selection.py' | |||
1489 | --- duplicity/selection.py 2013-12-27 06:39:00 +0000 | |||
1490 | +++ duplicity/selection.py 2014-04-17 22:26:47 +0000 | |||
1491 | @@ -256,7 +256,7 @@ | |||
1492 | 256 | self.add_selection_func(self.regexp_get_sf(arg, 1)) | 256 | self.add_selection_func(self.regexp_get_sf(arg, 1)) |
1493 | 257 | else: | 257 | else: |
1494 | 258 | assert 0, "Bad selection option %s" % opt | 258 | assert 0, "Bad selection option %s" % opt |
1496 | 259 | except SelectError, e: | 259 | except SelectError as e: |
1497 | 260 | self.parse_catch_error(e) | 260 | self.parse_catch_error(e) |
1498 | 261 | assert filelists_index == len(filelists) | 261 | assert filelists_index == len(filelists) |
1499 | 262 | self.parse_last_excludes() | 262 | self.parse_last_excludes() |
1500 | @@ -351,7 +351,7 @@ | |||
1501 | 351 | continue # skip blanks | 351 | continue # skip blanks |
1502 | 352 | try: | 352 | try: |
1503 | 353 | tuple = self.filelist_parse_line(line, include) | 353 | tuple = self.filelist_parse_line(line, include) |
1505 | 354 | except FilePrefixError, exc: | 354 | except FilePrefixError as exc: |
1506 | 355 | incr_warnings(exc) | 355 | incr_warnings(exc) |
1507 | 356 | continue | 356 | continue |
1508 | 357 | tuple_list.append(tuple) | 357 | tuple_list.append(tuple) |
1509 | 358 | 358 | ||
1510 | === modified file 'duplicity/statistics.py' | |||
1511 | --- duplicity/statistics.py 2010-07-22 19:15:11 +0000 | |||
1512 | +++ duplicity/statistics.py 2014-04-17 22:26:47 +0000 | |||
1513 | @@ -104,7 +104,7 @@ | |||
1514 | 104 | if not index: | 104 | if not index: |
1515 | 105 | filename = "." | 105 | filename = "." |
1516 | 106 | else: | 106 | else: |
1518 | 107 | filename = apply(os.path.join, index) | 107 | filename = os.path.join(*index) |
1519 | 108 | if use_repr: | 108 | if use_repr: |
1520 | 109 | # use repr to quote newlines in relative filename, then | 109 | # use repr to quote newlines in relative filename, then |
1521 | 110 | # take of leading and trailing quote and quote spaces. | 110 | # take of leading and trailing quote and quote spaces. |
1522 | @@ -123,7 +123,7 @@ | |||
1523 | 123 | for attr, val_string in zip(self.stat_file_attrs, | 123 | for attr, val_string in zip(self.stat_file_attrs, |
1524 | 124 | lineparts[-len(self.stat_file_attrs):]): | 124 | lineparts[-len(self.stat_file_attrs):]): |
1525 | 125 | try: | 125 | try: |
1527 | 126 | val = long(val_string) | 126 | val = int(val_string) |
1528 | 127 | except ValueError: | 127 | except ValueError: |
1529 | 128 | try: | 128 | try: |
1530 | 129 | val = float(val_string) | 129 | val = float(val_string) |
1531 | @@ -230,7 +230,7 @@ | |||
1532 | 230 | error(line) | 230 | error(line) |
1533 | 231 | try: | 231 | try: |
1534 | 232 | try: | 232 | try: |
1536 | 233 | val1 = long(value_string) | 233 | val1 = int(value_string) |
1537 | 234 | except ValueError: | 234 | except ValueError: |
1538 | 235 | val1 = None | 235 | val1 = None |
1539 | 236 | val2 = float(value_string) | 236 | val2 = float(value_string) |
1540 | 237 | 237 | ||
1541 | === modified file 'duplicity/tempdir.py' | |||
1542 | --- duplicity/tempdir.py 2013-12-27 06:39:00 +0000 | |||
1543 | +++ duplicity/tempdir.py 2014-04-17 22:26:47 +0000 | |||
1544 | @@ -213,7 +213,7 @@ | |||
1545 | 213 | """ | 213 | """ |
1546 | 214 | self.__lock.acquire() | 214 | self.__lock.acquire() |
1547 | 215 | try: | 215 | try: |
1549 | 216 | if self.__pending.has_key(fname): | 216 | if fname in self.__pending: |
1550 | 217 | log.Debug(_("Forgetting temporary file %s") % util.ufn(fname)) | 217 | log.Debug(_("Forgetting temporary file %s") % util.ufn(fname)) |
1551 | 218 | del(self.__pending[fname]) | 218 | del(self.__pending[fname]) |
1552 | 219 | else: | 219 | else: |
1553 | 220 | 220 | ||
1554 | === modified file 'duplicity/util.py' | |||
1555 | --- duplicity/util.py 2014-01-17 16:44:46 +0000 | |||
1556 | +++ duplicity/util.py 2014-04-17 22:26:47 +0000 | |||
1557 | @@ -80,7 +80,7 @@ | |||
1558 | 80 | """ | 80 | """ |
1559 | 81 | try: | 81 | try: |
1560 | 82 | return fn() | 82 | return fn() |
1562 | 83 | except Exception, e: | 83 | except Exception as e: |
1563 | 84 | if globals.ignore_errors: | 84 | if globals.ignore_errors: |
1564 | 85 | log.Warn(_("IGNORED_ERROR: Warning: ignoring error as requested: %s: %s") | 85 | log.Warn(_("IGNORED_ERROR: Warning: ignoring error as requested: %s: %s") |
1565 | 86 | % (e.__class__.__name__, str(e))) | 86 | % (e.__class__.__name__, str(e))) |
1566 | @@ -131,7 +131,7 @@ | |||
1567 | 131 | """ | 131 | """ |
1568 | 132 | try: | 132 | try: |
1569 | 133 | fn(filename) | 133 | fn(filename) |
1571 | 134 | except OSError, ex: | 134 | except OSError as ex: |
1572 | 135 | if ex.errno == errno.ENOENT: | 135 | if ex.errno == errno.ENOENT: |
1573 | 136 | pass | 136 | pass |
1574 | 137 | else: | 137 | else: |
1575 | 138 | 138 | ||
1576 | === modified file 'testing/gnupg/trustdb.gpg' | |||
1577 | 139 | Binary files testing/gnupg/trustdb.gpg 2011-11-04 12:48:04 +0000 and testing/gnupg/trustdb.gpg 2014-04-17 22:26:47 +0000 differ | 139 | Binary files testing/gnupg/trustdb.gpg 2011-11-04 12:48:04 +0000 and testing/gnupg/trustdb.gpg 2014-04-17 22:26:47 +0000 differ |
1578 | === modified file 'testing/tests/test_badupload.py' | |||
1579 | --- testing/tests/test_badupload.py 2014-04-16 02:43:43 +0000 | |||
1580 | +++ testing/tests/test_badupload.py 2014-04-17 22:26:47 +0000 | |||
1581 | @@ -36,7 +36,7 @@ | |||
1582 | 36 | try: | 36 | try: |
1583 | 37 | self.backup("full", "testfiles/dir1", options=["--skip-volume=1"]) | 37 | self.backup("full", "testfiles/dir1", options=["--skip-volume=1"]) |
1584 | 38 | self.fail() | 38 | self.fail() |
1586 | 39 | except CmdError, e: | 39 | except CmdError as e: |
1587 | 40 | self.assertEqual(e.exit_status, 44) | 40 | self.assertEqual(e.exit_status, 44) |
1588 | 41 | 41 | ||
1589 | 42 | if __name__ == "__main__": | 42 | if __name__ == "__main__": |
1590 | 43 | 43 | ||
1591 | === modified file 'testing/tests/test_collections.py' | |||
1592 | --- testing/tests/test_collections.py 2014-04-16 02:43:43 +0000 | |||
1593 | +++ testing/tests/test_collections.py 2014-04-17 22:26:47 +0000 | |||
1594 | @@ -111,8 +111,8 @@ | |||
1595 | 111 | assert 0 | 111 | assert 0 |
1596 | 112 | 112 | ||
1597 | 113 | chain = chains[0] | 113 | chain = chains[0] |
1600 | 114 | assert chain.end_time == 1029654270L | 114 | assert chain.end_time == 1029654270 |
1601 | 115 | assert chain.fullset.time == 1029626221L | 115 | assert chain.fullset.time == 1029626221 |
1602 | 116 | 116 | ||
1603 | 117 | def test_collections_status(self): | 117 | def test_collections_status(self): |
1604 | 118 | """Test CollectionStatus object's set_values()""" | 118 | """Test CollectionStatus object's set_values()""" |
1605 | @@ -121,7 +121,7 @@ | |||
1606 | 121 | assert cs.values_set | 121 | assert cs.values_set |
1607 | 122 | 122 | ||
1608 | 123 | assert cs.matched_chain_pair | 123 | assert cs.matched_chain_pair |
1610 | 124 | assert cs.matched_chain_pair[0].end_time == 1029826800L | 124 | assert cs.matched_chain_pair[0].end_time == 1029826800 |
1611 | 125 | assert len(cs.all_backup_chains) == 1, cs.all_backup_chains | 125 | assert len(cs.all_backup_chains) == 1, cs.all_backup_chains |
1612 | 126 | 126 | ||
1613 | 127 | cs = collections.CollectionsStatus(self.real_backend, globals.archive_dir).set_values() | 127 | cs = collections.CollectionsStatus(self.real_backend, globals.archive_dir).set_values() |
1614 | @@ -153,7 +153,7 @@ | |||
1615 | 153 | for op in orphaned_paths: print op | 153 | for op in orphaned_paths: print op |
1616 | 154 | assert 0 | 154 | assert 0 |
1617 | 155 | assert len(chains) == 1, chains | 155 | assert len(chains) == 1, chains |
1619 | 156 | assert chains[0].end_time == 1029826800L | 156 | assert chains[0].end_time == 1029826800 |
1620 | 157 | 157 | ||
1621 | 158 | def sigchain_fileobj_get(self, local): | 158 | def sigchain_fileobj_get(self, local): |
1622 | 159 | """Return chain, local if local is true with filenames added""" | 159 | """Return chain, local if local is true with filenames added""" |
1623 | 160 | 160 | ||
1624 | === modified file 'testing/tests/test_filenaming.py' | |||
1625 | --- testing/tests/test_filenaming.py 2014-04-16 02:43:43 +0000 | |||
1626 | +++ testing/tests/test_filenaming.py 2014-04-17 22:26:47 +0000 | |||
1627 | @@ -88,13 +88,13 @@ | |||
1628 | 88 | pr = file_naming.parse(globals.file_prefix + globals.file_prefix_signature + "dns.h112bi.h14rg0.st.g") | 88 | pr = file_naming.parse(globals.file_prefix + globals.file_prefix_signature + "dns.h112bi.h14rg0.st.g") |
1629 | 89 | assert pr, pr | 89 | assert pr, pr |
1630 | 90 | assert pr.type == "new-sig" | 90 | assert pr.type == "new-sig" |
1632 | 91 | assert pr.end_time == 1029826800L | 91 | assert pr.end_time == 1029826800 |
1633 | 92 | 92 | ||
1634 | 93 | if not globals.short_filenames: | 93 | if not globals.short_filenames: |
1635 | 94 | pr = file_naming.parse(globals.file_prefix + globals.file_prefix_signature + "duplicity-new-signatures.2002-08-18T00:04:30-07:00.to.2002-08-20T00:00:00-07:00.sigtar.gpg") | 94 | pr = file_naming.parse(globals.file_prefix + globals.file_prefix_signature + "duplicity-new-signatures.2002-08-18T00:04:30-07:00.to.2002-08-20T00:00:00-07:00.sigtar.gpg") |
1636 | 95 | assert pr, pr | 95 | assert pr, pr |
1637 | 96 | assert pr.type == "new-sig" | 96 | assert pr.type == "new-sig" |
1639 | 97 | assert pr.end_time == 1029826800L | 97 | assert pr.end_time == 1029826800 |
1640 | 98 | 98 | ||
1641 | 99 | pr = file_naming.parse(globals.file_prefix + globals.file_prefix_signature + "dfs.h5dixs.st.g") | 99 | pr = file_naming.parse(globals.file_prefix + globals.file_prefix_signature + "dfs.h5dixs.st.g") |
1642 | 100 | assert pr, pr | 100 | assert pr, pr |
1643 | @@ -108,14 +108,14 @@ | |||
1644 | 108 | assert pr, pr | 108 | assert pr, pr |
1645 | 109 | assert pr.partial | 109 | assert pr.partial |
1646 | 110 | assert pr.type == "new-sig" | 110 | assert pr.type == "new-sig" |
1648 | 111 | assert pr.end_time == 1029826800L | 111 | assert pr.end_time == 1029826800 |
1649 | 112 | 112 | ||
1650 | 113 | if not globals.short_filenames: | 113 | if not globals.short_filenames: |
1651 | 114 | pr = file_naming.parse(globals.file_prefix + globals.file_prefix_signature + "duplicity-new-signatures.2002-08-18T00:04:30-07:00.to.2002-08-20T00:00:00-07:00.sigtar.part.gpg") | 114 | pr = file_naming.parse(globals.file_prefix + globals.file_prefix_signature + "duplicity-new-signatures.2002-08-18T00:04:30-07:00.to.2002-08-20T00:00:00-07:00.sigtar.part.gpg") |
1652 | 115 | assert pr, pr | 115 | assert pr, pr |
1653 | 116 | assert pr.partial | 116 | assert pr.partial |
1654 | 117 | assert pr.type == "new-sig" | 117 | assert pr.type == "new-sig" |
1656 | 118 | assert pr.end_time == 1029826800L | 118 | assert pr.end_time == 1029826800 |
1657 | 119 | 119 | ||
1658 | 120 | pr = file_naming.parse(globals.file_prefix + globals.file_prefix_signature + "dfs.h5dixs.st.p.g") | 120 | pr = file_naming.parse(globals.file_prefix + globals.file_prefix_signature + "dfs.h5dixs.st.p.g") |
1659 | 121 | assert pr, pr | 121 | assert pr, pr |
1660 | 122 | 122 | ||
1661 | === modified file 'testing/tests/test_lazy.py' | |||
1662 | --- testing/tests/test_lazy.py 2014-04-16 02:43:43 +0000 | |||
1663 | +++ testing/tests/test_lazy.py 2014-04-17 22:26:47 +0000 | |||
1664 | @@ -21,6 +21,7 @@ | |||
1665 | 21 | 21 | ||
1666 | 22 | import helper | 22 | import helper |
1667 | 23 | import unittest, pickle, sys | 23 | import unittest, pickle, sys |
1668 | 24 | from functools import reduce | ||
1669 | 24 | 25 | ||
1670 | 25 | from duplicity.lazy import * #@UnusedWildImport | 26 | from duplicity.lazy import * #@UnusedWildImport |
1671 | 26 | 27 | ||
1672 | @@ -33,7 +34,7 @@ | |||
1673 | 33 | empty = lambda s: iter([]) | 34 | empty = lambda s: iter([]) |
1674 | 34 | 35 | ||
1675 | 35 | def __init__(self, *args): | 36 | def __init__(self, *args): |
1677 | 36 | apply (unittest.TestCase.__init__, (self,) + args) | 37 | unittest.TestCase.__init__(self, *args) |
1678 | 37 | self.falseerror = self.falseerror_maker() | 38 | self.falseerror = self.falseerror_maker() |
1679 | 38 | self.trueerror = self.trueerror_maker() | 39 | self.trueerror = self.trueerror_maker() |
1680 | 39 | self.emptygen = self.emptygen_maker() | 40 | self.emptygen = self.emptygen_maker() |
1681 | 40 | 41 | ||
1682 | === modified file 'testing/tests/test_patchdir.py' | |||
1683 | --- testing/tests/test_patchdir.py 2014-04-16 02:43:43 +0000 | |||
1684 | +++ testing/tests/test_patchdir.py 2014-04-17 22:26:47 +0000 | |||
1685 | @@ -209,12 +209,12 @@ | |||
1686 | 209 | self.out = out | 209 | self.out = out |
1687 | 210 | 210 | ||
1688 | 211 | def snapshot(self): | 211 | def snapshot(self): |
1690 | 212 | """Make a snapshot ROPath, permissions 0600""" | 212 | """Make a snapshot ROPath, permissions 0o600""" |
1691 | 213 | ss = self.out.append("snapshot") | 213 | ss = self.out.append("snapshot") |
1692 | 214 | fout = ss.open("wb") | 214 | fout = ss.open("wb") |
1693 | 215 | fout.write("hello, world!") | 215 | fout.write("hello, world!") |
1694 | 216 | assert not fout.close() | 216 | assert not fout.close() |
1696 | 217 | ss.chmod(0600) | 217 | ss.chmod(0o600) |
1697 | 218 | ss.difftype = "snapshot" | 218 | ss.difftype = "snapshot" |
1698 | 219 | return ss | 219 | return ss |
1699 | 220 | 220 | ||
1700 | @@ -230,24 +230,24 @@ | |||
1701 | 230 | return deltabuf | 230 | return deltabuf |
1702 | 231 | 231 | ||
1703 | 232 | def delta1(self): | 232 | def delta1(self): |
1705 | 233 | """Make a delta ROPath, permissions 0640""" | 233 | """Make a delta ROPath, permissions 0o640""" |
1706 | 234 | delta1 = self.out.append("delta1") | 234 | delta1 = self.out.append("delta1") |
1707 | 235 | fout = delta1.open("wb") | 235 | fout = delta1.open("wb") |
1708 | 236 | fout.write(self.get_delta("hello, world!", | 236 | fout.write(self.get_delta("hello, world!", |
1709 | 237 | "aonseuth aosetnuhaonsuhtansoetuhaoe")) | 237 | "aonseuth aosetnuhaonsuhtansoetuhaoe")) |
1710 | 238 | assert not fout.close() | 238 | assert not fout.close() |
1712 | 239 | delta1.chmod(0640) | 239 | delta1.chmod(0o640) |
1713 | 240 | delta1.difftype = "diff" | 240 | delta1.difftype = "diff" |
1714 | 241 | return delta1 | 241 | return delta1 |
1715 | 242 | 242 | ||
1716 | 243 | def delta2(self): | 243 | def delta2(self): |
1718 | 244 | """Make another delta ROPath, permissions 0644""" | 244 | """Make another delta ROPath, permissions 0o644""" |
1719 | 245 | delta2 = self.out.append("delta1") | 245 | delta2 = self.out.append("delta1") |
1720 | 246 | fout = delta2.open("wb") | 246 | fout = delta2.open("wb") |
1721 | 247 | fout.write(self.get_delta("aonseuth aosetnuhaonsuhtansoetuhaoe", | 247 | fout.write(self.get_delta("aonseuth aosetnuhaonsuhtansoetuhaoe", |
1722 | 248 | "3499 34957839485792357 458348573")) | 248 | "3499 34957839485792357 458348573")) |
1723 | 249 | assert not fout.close() | 249 | assert not fout.close() |
1725 | 250 | delta2.chmod(0644) | 250 | delta2.chmod(0o644) |
1726 | 251 | delta2.difftype = "diff" | 251 | delta2.difftype = "diff" |
1727 | 252 | return delta2 | 252 | return delta2 |
1728 | 253 | 253 | ||
1729 | 254 | 254 | ||
1730 | === added file 'testing/tests/test_python3.py' | |||
1731 | --- testing/tests/test_python3.py 1970-01-01 00:00:00 +0000 | |||
1732 | +++ testing/tests/test_python3.py 2014-04-17 22:26:47 +0000 | |||
1733 | @@ -0,0 +1,61 @@ | |||
1734 | 1 | # -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*- | ||
1735 | 2 | # | ||
1736 | 3 | # Copyright 2014 Michael Terry <michael.terry@canonical.com> | ||
1737 | 4 | # | ||
1738 | 5 | # This file is part of duplicity. | ||
1739 | 6 | # | ||
1740 | 7 | # Duplicity is free software; you can redistribute it and/or modify it | ||
1741 | 8 | # under the terms of the GNU General Public License as published by the | ||
1742 | 9 | # Free Software Foundation; either version 2 of the License, or (at your | ||
1743 | 10 | # option) any later version. | ||
1744 | 11 | # | ||
1745 | 12 | # Duplicity is distributed in the hope that it will be useful, but | ||
1746 | 13 | # WITHOUT ANY WARRANTY; without even the implied warranty of | ||
1747 | 14 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
1748 | 15 | # General Public License for more details. | ||
1749 | 16 | # | ||
1750 | 17 | # You should have received a copy of the GNU General Public License | ||
1751 | 18 | # along with duplicity; if not, write to the Free Software Foundation, | ||
1752 | 19 | # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA | ||
1753 | 20 | |||
1754 | 21 | import helper | ||
1755 | 22 | import os | ||
1756 | 23 | import subprocess | ||
1757 | 24 | import unittest | ||
1758 | 25 | |||
1759 | 26 | helper.setup() | ||
1760 | 27 | |||
1761 | 28 | |||
1762 | 29 | class Python3ReadinessTest(unittest.TestCase): | ||
1763 | 30 | def test_2to3(self): | ||
1764 | 31 | _top_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), | ||
1765 | 32 | "..", "..") | ||
1766 | 33 | |||
1767 | 34 | # As we modernize the source code, we can remove more and more nofixes | ||
1768 | 35 | process = subprocess.Popen(["2to3", | ||
1769 | 36 | "--nofix=dict", | ||
1770 | 37 | "--nofix=filter", | ||
1771 | 38 | "--nofix=map", | ||
1772 | 39 | "--nofix=next", | ||
1773 | 40 | "--nofix=print", | ||
1774 | 41 | "--nofix=types", | ||
1775 | 42 | "--nofix=unicode", | ||
1776 | 43 | "--nofix=xrange", | ||
1777 | 44 | # The following fixes we don't want to remove, since they are false | ||
1778 | 45 | # positives, things we don't care about, or real incompatibilities | ||
1779 | 46 | # but which 2to3 can fix for us better automatically. | ||
1780 | 47 | "--nofix=callable", | ||
1781 | 48 | "--nofix=future", | ||
1782 | 49 | "--nofix=imports", | ||
1783 | 50 | "--nofix=raw_input", | ||
1784 | 51 | "--nofix=urllib", | ||
1785 | 52 | _top_dir], | ||
1786 | 53 | stdout=subprocess.PIPE, | ||
1787 | 54 | stderr=subprocess.PIPE) | ||
1788 | 55 | output = process.communicate()[0] | ||
1789 | 56 | self.assertEqual(0, process.returncode) | ||
1790 | 57 | self.assertEqual("", output, output) | ||
1791 | 58 | |||
1792 | 59 | |||
1793 | 60 | if __name__ == "__main__": | ||
1794 | 61 | unittest.main() | ||
1795 | 0 | 62 | ||
1796 | === modified file 'testing/tests/test_restart.py' | |||
1797 | --- testing/tests/test_restart.py 2014-04-16 02:43:43 +0000 | |||
1798 | +++ testing/tests/test_restart.py 2014-04-17 22:26:47 +0000 | |||
1799 | @@ -326,7 +326,7 @@ | |||
1800 | 326 | self.backup("full", "testfiles/blocktartest") | 326 | self.backup("full", "testfiles/blocktartest") |
1801 | 327 | # Create an exact clone of the snapshot folder in the sigtar already. | 327 | # Create an exact clone of the snapshot folder in the sigtar already. |
1802 | 328 | # Permissions and mtime must match. | 328 | # Permissions and mtime must match. |
1804 | 329 | os.mkdir("testfiles/snapshot", 0755) | 329 | os.mkdir("testfiles/snapshot", 0o755) |
1805 | 330 | os.utime("testfiles/snapshot", (1030384548, 1030384548)) | 330 | os.utime("testfiles/snapshot", (1030384548, 1030384548)) |
1806 | 331 | # Adjust the sigtar.gz file to have a bogus second snapshot/ entry | 331 | # Adjust the sigtar.gz file to have a bogus second snapshot/ entry |
1807 | 332 | # at the beginning. | 332 | # at the beginning. |