Merge lp:~mterry/duplicity/2.6isms into lp:duplicity/0.6

Proposed by Michael Terry
Status: Merged
Merged at revision: 975
Proposed branch: lp:~mterry/duplicity/2.6isms
Merge into: lp:duplicity/0.6
Diff against target: 1807 lines (+259/-196)
46 files modified
bin/duplicity (+7/-7)
bin/rdiffdir (+1/-1)
duplicity/backend.py (+4/-4)
duplicity/backends/_boto_multi.py (+4/-4)
duplicity/backends/_boto_single.py (+8/-8)
duplicity/backends/_cf_cloudfiles.py (+13/-13)
duplicity/backends/_cf_pyrax.py (+13/-13)
duplicity/backends/_ssh_paramiko.py (+20/-20)
duplicity/backends/botobackend.py (+2/-2)
duplicity/backends/cfbackend.py (+2/-2)
duplicity/backends/dpbxbackend.py (+6/-5)
duplicity/backends/gdocsbackend.py (+7/-7)
duplicity/backends/giobackend.py (+6/-6)
duplicity/backends/imapbackend.py (+1/-1)
duplicity/backends/localbackend.py (+6/-6)
duplicity/backends/megabackend.py (+6/-6)
duplicity/backends/sshbackend.py (+2/-2)
duplicity/backends/swiftbackend.py (+17/-17)
duplicity/backends/webdavbackend.py (+5/-5)
duplicity/cached_ops.py (+1/-1)
duplicity/collections.py (+3/-3)
duplicity/commandline.py (+1/-1)
duplicity/diffdir.py (+4/-4)
duplicity/dup_temp.py (+3/-3)
duplicity/dup_threading.py (+2/-2)
duplicity/dup_time.py (+4/-4)
duplicity/file_naming.py (+1/-1)
duplicity/globals.py (+1/-1)
duplicity/gpg.py (+1/-1)
duplicity/gpginterface.py (+9/-9)
duplicity/librsync.py (+7/-7)
duplicity/patchdir.py (+1/-1)
duplicity/path.py (+1/-1)
duplicity/progress.py (+2/-2)
duplicity/robust.py (+1/-1)
duplicity/selection.py (+2/-2)
duplicity/statistics.py (+3/-3)
duplicity/tempdir.py (+1/-1)
duplicity/util.py (+2/-2)
testing/tests/test_badupload.py (+1/-1)
testing/tests/test_collections.py (+4/-4)
testing/tests/test_filenaming.py (+4/-4)
testing/tests/test_lazy.py (+2/-1)
testing/tests/test_patchdir.py (+6/-6)
testing/tests/test_python3.py (+61/-0)
testing/tests/test_restart.py (+1/-1)
To merge this branch: bzr merge lp:~mterry/duplicity/2.6isms
Reviewer Review Type Date Requested Status
duplicity-team Pending
Review via email: mp+216404@code.launchpad.net

Description of the change

Here's a whole stack of minor syntax modernizations that will become necessary in python3. They all work in python2.6.

I've added a new test to keep us honest and prevent backsliding on these modernizations. It runs 2to3 and will fail the test if 2to3 finds anything that needs fixing (with a specific set of exceptions carved out).

This branch has most of the easy 2to3 fixes, the ones with obvious and safe syntax changes.

We could just let 2to3 do them for us, but ideally we use 2to3 as little as possible, since it doesn't always know how to solve a given problem. I will propose a branch later that actually does use 2to3 to generate python3 versions of duplicity if they are requested. But this is a first step to clean up the code base.

To post a comment you must log in.

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'bin/duplicity'
2--- bin/duplicity 2014-04-16 02:43:43 +0000
3+++ bin/duplicity 2014-04-17 22:26:47 +0000
4@@ -1042,7 +1042,7 @@
5 log.Notice(_("Deleting local %s (not authoritative at backend).") % util.ufn(del_name))
6 try:
7 util.ignore_missing(os.unlink, del_name)
8- except Exception, e:
9+ except Exception as e:
10 log.Warn(_("Unable to delete %s: %s") % (util.ufn(del_name), str(e)))
11
12 def copy_to_local(fn):
13@@ -1505,18 +1505,18 @@
14 # sys.exit() function. Python handles this by
15 # raising the SystemExit exception. Cleanup code
16 # goes here, if needed.
17- except SystemExit, e:
18+ except SystemExit as e:
19 # No traceback, just get out
20 util.release_lockfile()
21 sys.exit(e)
22
23- except KeyboardInterrupt, e:
24+ except KeyboardInterrupt as e:
25 # No traceback, just get out
26 log.Info(_("INT intercepted...exiting."))
27 util.release_lockfile()
28 sys.exit(4)
29
30- except gpg.GPGError, e:
31+ except gpg.GPGError as e:
32 # For gpg errors, don't show an ugly stack trace by
33 # default. But do with sufficient verbosity.
34 util.release_lockfile()
35@@ -1526,7 +1526,7 @@
36 log.ErrorCode.gpg_failed,
37 e.__class__.__name__)
38
39- except duplicity.errors.UserError, e:
40+ except duplicity.errors.UserError as e:
41 util.release_lockfile()
42 # For user errors, don't show an ugly stack trace by
43 # default. But do with sufficient verbosity.
44@@ -1536,7 +1536,7 @@
45 log.ErrorCode.user_error,
46 e.__class__.__name__)
47
48- except duplicity.errors.BackendException, e:
49+ except duplicity.errors.BackendException as e:
50 util.release_lockfile()
51 # For backend errors, don't show an ugly stack trace by
52 # default. But do with sufficient verbosity.
53@@ -1546,7 +1546,7 @@
54 log.ErrorCode.user_error,
55 e.__class__.__name__)
56
57- except Exception, e:
58+ except Exception as e:
59 util.release_lockfile()
60 if "Forced assertion for testing" in str(e):
61 log.FatalError(u"%s: %s" % (e.__class__.__name__, unicode(e)),
62
63=== modified file 'bin/rdiffdir'
64--- bin/rdiffdir 2014-02-05 02:57:01 +0000
65+++ bin/rdiffdir 2014-04-17 22:26:47 +0000
66@@ -64,7 +64,7 @@
67 "include-filelist-stdin", "include-globbing-filelist",
68 "include-regexp=", "max-blocksize", "null-separator",
69 "verbosity=", "write-sig-to="])
70- except getopt.error, e:
71+ except getopt.error as e:
72 command_line_error("Bad command line option: %s" % (str(e),))
73
74 for opt, arg in optlist:
75
76=== modified file 'duplicity/backend.py'
77--- duplicity/backend.py 2014-04-17 19:02:22 +0000
78+++ duplicity/backend.py 2014-04-17 22:26:47 +0000
79@@ -306,7 +306,7 @@
80 try:
81 kwargs = {"raise_errors" : True}
82 return fn(*args, **kwargs)
83- except Exception, e:
84+ except Exception as e:
85 log.Warn(_("Attempt %s failed: %s: %s")
86 % (n, e.__class__.__name__, str(e)))
87 log.Debug(_("Backtrace of previous error: %s")
88@@ -332,10 +332,10 @@
89 try:
90 self.retry_count = n
91 return fn(self, *args)
92- except FatalBackendError, e:
93+ except FatalBackendError as e:
94 # die on fatal errors
95 raise e
96- except Exception, e:
97+ except Exception as e:
98 # retry on anything else
99 log.Warn(_("Attempt %s failed. %s: %s")
100 % (n, e.__class__.__name__, str(e)))
101@@ -345,7 +345,7 @@
102 # final trial, die on exception
103 self.retry_count = n+1
104 return fn(self, *args)
105- except Exception, e:
106+ except Exception as e:
107 log.Debug(_("Backtrace of previous error: %s")
108 % exception_traceback())
109 log.FatalError(_("Giving up after %s attempts. %s: %s")
110
111=== modified file 'duplicity/backends/_boto_multi.py'
112--- duplicity/backends/_boto_multi.py 2014-04-09 09:22:27 +0000
113+++ duplicity/backends/_boto_multi.py 2014-04-17 22:26:47 +0000
114@@ -33,8 +33,8 @@
115 from duplicity.filechunkio import FileChunkIO
116 from duplicity import progress
117
118-from _boto_single import BotoBackend as BotoSingleBackend
119-from _boto_single import get_connection
120+from ._boto_single import BotoBackend as BotoSingleBackend
121+from ._boto_single import get_connection
122
123 BOTO_MIN_VERSION = "2.1.1"
124
125@@ -63,7 +63,7 @@
126 try:
127 args = self.queue.get(True, 1)
128 progress.report_transfer(args[0], args[1])
129- except Queue.Empty, e:
130+ except Queue.Empty as e:
131 pass
132
133
134@@ -210,7 +210,7 @@
135 conn = None
136 bucket = None
137 del conn
138- except Exception, e:
139+ except Exception as e:
140 traceback.print_exc()
141 if num_retries:
142 log.Debug("%s: Upload of chunk %d failed. Retrying %d more times..." % (
143
144=== modified file 'duplicity/backends/_boto_single.py'
145--- duplicity/backends/_boto_single.py 2014-04-09 09:55:21 +0000
146+++ duplicity/backends/_boto_single.py 2014-04-17 22:26:47 +0000
147@@ -202,7 +202,7 @@
148 try:
149 try:
150 self.bucket = self.conn.get_bucket(self.bucket_name, validate=True)
151- except Exception, e:
152+ except Exception as e:
153 if "NoSuchBucket" in str(e):
154 if globals.s3_european_buckets:
155 self.bucket = self.conn.create_bucket(self.bucket_name,
156@@ -211,7 +211,7 @@
157 self.bucket = self.conn.create_bucket(self.bucket_name)
158 else:
159 raise e
160- except Exception, e:
161+ except Exception as e:
162 log.Warn("Failed to create bucket (attempt #%d) '%s' failed (reason: %s: %s)"
163 "" % (n, self.bucket_name,
164 e.__class__.__name__,
165@@ -252,7 +252,7 @@
166 self.resetConnection()
167 log.Debug("Uploaded %s/%s to %s Storage at roughly %f bytes/second" % (self.straight_url, remote_filename, storage_class, rough_upload_speed))
168 return
169- except Exception, e:
170+ except Exception as e:
171 log.Warn("Upload '%s/%s' failed (attempt #%d, reason: %s: %s)"
172 "" % (self.straight_url,
173 remote_filename,
174@@ -279,7 +279,7 @@
175 key.get_contents_to_filename(local_path.name)
176 local_path.setdata()
177 return
178- except Exception, e:
179+ except Exception as e:
180 log.Warn("Download %s/%s failed (attempt #%d, reason: %s: %s)"
181 "" % (self.straight_url,
182 remote_filename,
183@@ -304,7 +304,7 @@
184 log.Info("Listing %s" % self.straight_url)
185 try:
186 return self._list_filenames_in_bucket()
187- except Exception, e:
188+ except Exception as e:
189 log.Warn("List %s failed (attempt #%d, reason: %s: %s)"
190 "" % (self.straight_url,
191 n,
192@@ -348,7 +348,7 @@
193 if key is None:
194 return {'size': -1}
195 return {'size': key.size}
196- except Exception, e:
197+ except Exception as e:
198 log.Warn("Query %s/%s failed: %s"
199 "" % (self.straight_url,
200 filename,
201@@ -368,7 +368,7 @@
202
203 def pre_process_download(self, files_to_download, wait=False):
204 # Used primarily to move files in Glacier to S3
205- if isinstance(files_to_download, basestring):
206+ if isinstance(files_to_download, (bytes, str, unicode)):
207 files_to_download = [files_to_download]
208
209 for remote_filename in files_to_download:
210@@ -397,7 +397,7 @@
211 log.Info("File %s was successfully restored from Glacier" % remote_filename)
212 success = True
213 break
214- except Exception, e:
215+ except Exception as e:
216 log.Warn("Restoration from Glacier for file %s/%s failed (attempt #%d, reason: %s: %s)"
217 "" % (self.straight_url,
218 remote_filename,
219
220=== modified file 'duplicity/backends/_cf_cloudfiles.py'
221--- duplicity/backends/_cf_cloudfiles.py 2013-12-27 06:39:00 +0000
222+++ duplicity/backends/_cf_cloudfiles.py 2014-04-17 22:26:47 +0000
223@@ -44,17 +44,17 @@
224 self.resp_exc = ResponseError
225 conn_kwargs = {}
226
227- if not os.environ.has_key('CLOUDFILES_USERNAME'):
228+ if 'CLOUDFILES_USERNAME' not in os.environ:
229 raise BackendException('CLOUDFILES_USERNAME environment variable'
230 'not set.')
231
232- if not os.environ.has_key('CLOUDFILES_APIKEY'):
233+ if 'CLOUDFILES_APIKEY' not in os.environ:
234 raise BackendException('CLOUDFILES_APIKEY environment variable not set.')
235
236 conn_kwargs['username'] = os.environ['CLOUDFILES_USERNAME']
237 conn_kwargs['api_key'] = os.environ['CLOUDFILES_APIKEY']
238
239- if os.environ.has_key('CLOUDFILES_AUTHURL'):
240+ if 'CLOUDFILES_AUTHURL' in os.environ:
241 conn_kwargs['authurl'] = os.environ['CLOUDFILES_AUTHURL']
242 else:
243 conn_kwargs['authurl'] = consts.default_authurl
244@@ -63,7 +63,7 @@
245
246 try:
247 conn = Connection(**conn_kwargs)
248- except Exception, e:
249+ except Exception as e:
250 log.FatalError("Connection failed, please check your credentials: %s %s"
251 % (e.__class__.__name__, str(e)),
252 log.ErrorCode.connection_failed)
253@@ -79,10 +79,10 @@
254 sobject = self.container.create_object(remote_filename)
255 sobject.load_from_filename(source_path.name)
256 return
257- except self.resp_exc, error:
258+ except self.resp_exc as error:
259 log.Warn("Upload of '%s' failed (attempt %d): CloudFiles returned: %s %s"
260 % (remote_filename, n, error.status, error.reason))
261- except Exception, e:
262+ except Exception as e:
263 log.Warn("Upload of '%s' failed (attempt %s): %s: %s"
264 % (remote_filename, n, e.__class__.__name__, str(e)))
265 log.Debug("Backtrace of previous error: %s"
266@@ -102,10 +102,10 @@
267 f.write(chunk)
268 local_path.setdata()
269 return
270- except self.resp_exc, resperr:
271+ except self.resp_exc as resperr:
272 log.Warn("Download of '%s' failed (attempt %s): CloudFiles returned: %s %s"
273 % (remote_filename, n, resperr.status, resperr.reason))
274- except Exception, e:
275+ except Exception as e:
276 log.Warn("Download of '%s' failed (attempt %s): %s: %s"
277 % (remote_filename, n, e.__class__.__name__, str(e)))
278 log.Debug("Backtrace of previous error: %s"
279@@ -128,10 +128,10 @@
280 objs = self.container.list_objects(marker=keys[-1])
281 keys += objs
282 return keys
283- except self.resp_exc, resperr:
284+ except self.resp_exc as resperr:
285 log.Warn("Listing of '%s' failed (attempt %s): CloudFiles returned: %s %s"
286 % (self.container, n, resperr.status, resperr.reason))
287- except Exception, e:
288+ except Exception as e:
289 log.Warn("Listing of '%s' failed (attempt %s): %s: %s"
290 % (self.container, n, e.__class__.__name__, str(e)))
291 log.Debug("Backtrace of previous error: %s"
292@@ -148,14 +148,14 @@
293 try:
294 self.container.delete_object(remote_filename)
295 return
296- except self.resp_exc, resperr:
297+ except self.resp_exc as resperr:
298 if n > 1 and resperr.status == 404:
299 # We failed on a timeout, but delete succeeded on the server
300 log.Warn("Delete of '%s' missing after retry - must have succeded earler" % remote_filename )
301 return
302 log.Warn("Delete of '%s' failed (attempt %s): CloudFiles returned: %s %s"
303 % (remote_filename, n, resperr.status, resperr.reason))
304- except Exception, e:
305+ except Exception as e:
306 log.Warn("Delete of '%s' failed (attempt %s): %s: %s"
307 % (remote_filename, n, e.__class__.__name__, str(e)))
308 log.Debug("Backtrace of previous error: %s"
309@@ -179,7 +179,7 @@
310 return {'size': sobject.size}
311 except NoSuchObject:
312 return {'size': -1}
313- except Exception, e:
314+ except Exception as e:
315 log.Warn("Error querying '%s/%s': %s"
316 "" % (self.container,
317 filename,
318
319=== modified file 'duplicity/backends/_cf_pyrax.py'
320--- duplicity/backends/_cf_pyrax.py 2013-12-27 06:39:00 +0000
321+++ duplicity/backends/_cf_pyrax.py 2014-04-17 22:26:47 +0000
322@@ -45,24 +45,24 @@
323
324 conn_kwargs = {}
325
326- if not os.environ.has_key('CLOUDFILES_USERNAME'):
327+ if 'CLOUDFILES_USERNAME' not in os.environ:
328 raise BackendException('CLOUDFILES_USERNAME environment variable'
329 'not set.')
330
331- if not os.environ.has_key('CLOUDFILES_APIKEY'):
332+ if 'CLOUDFILES_APIKEY' not in os.environ:
333 raise BackendException('CLOUDFILES_APIKEY environment variable not set.')
334
335 conn_kwargs['username'] = os.environ['CLOUDFILES_USERNAME']
336 conn_kwargs['api_key'] = os.environ['CLOUDFILES_APIKEY']
337
338- if os.environ.has_key('CLOUDFILES_REGION'):
339+ if 'CLOUDFILES_REGION' in os.environ:
340 conn_kwargs['region'] = os.environ['CLOUDFILES_REGION']
341
342 container = parsed_url.path.lstrip('/')
343
344 try:
345 pyrax.set_credentials(**conn_kwargs)
346- except Exception, e:
347+ except Exception as e:
348 log.FatalError("Connection failed, please check your credentials: %s %s"
349 % (e.__class__.__name__, str(e)),
350 log.ErrorCode.connection_failed)
351@@ -81,10 +81,10 @@
352 try:
353 self.container.upload_file(source_path.name, remote_filename)
354 return
355- except self.client_exc, error:
356+ except self.client_exc as error:
357 log.Warn("Upload of '%s' failed (attempt %d): pyrax returned: %s %s"
358 % (remote_filename, n, error.__class__.__name__, error.message))
359- except Exception, e:
360+ except Exception as e:
361 log.Warn("Upload of '%s' failed (attempt %s): %s: %s"
362 % (remote_filename, n, e.__class__.__name__, str(e)))
363 log.Debug("Backtrace of previous error: %s"
364@@ -105,10 +105,10 @@
365 return
366 except self.nso_exc:
367 return
368- except self.client_exc, resperr:
369+ except self.client_exc as resperr:
370 log.Warn("Download of '%s' failed (attempt %s): pyrax returned: %s %s"
371 % (remote_filename, n, resperr.__class__.__name__, resperr.message))
372- except Exception, e:
373+ except Exception as e:
374 log.Warn("Download of '%s' failed (attempt %s): %s: %s"
375 % (remote_filename, n, e.__class__.__name__, str(e)))
376 log.Debug("Backtrace of previous error: %s"
377@@ -131,10 +131,10 @@
378 objs = self.container.get_object_names(marker = keys[-1])
379 keys += objs
380 return keys
381- except self.client_exc, resperr:
382+ except self.client_exc as resperr:
383 log.Warn("Listing of '%s' failed (attempt %s): pyrax returned: %s %s"
384 % (self.container, n, resperr.__class__.__name__, resperr.message))
385- except Exception, e:
386+ except Exception as e:
387 log.Warn("Listing of '%s' failed (attempt %s): %s: %s"
388 % (self.container, n, e.__class__.__name__, str(e)))
389 log.Debug("Backtrace of previous error: %s"
390@@ -151,14 +151,14 @@
391 try:
392 self.container.delete_object(remote_filename)
393 return
394- except self.client_exc, resperr:
395+ except self.client_exc as resperr:
396 if n > 1 and resperr.status == 404:
397 # We failed on a timeout, but delete succeeded on the server
398 log.Warn("Delete of '%s' missing after retry - must have succeded earler" % remote_filename)
399 return
400 log.Warn("Delete of '%s' failed (attempt %s): pyrax returned: %s %s"
401 % (remote_filename, n, resperr.__class__.__name__, resperr.message))
402- except Exception, e:
403+ except Exception as e:
404 log.Warn("Delete of '%s' failed (attempt %s): %s: %s"
405 % (remote_filename, n, e.__class__.__name__, str(e)))
406 log.Debug("Backtrace of previous error: %s"
407@@ -181,7 +181,7 @@
408 return {'size': sobject.total_bytes}
409 except self.nso_exc:
410 return {'size': -1}
411- except Exception, e:
412+ except Exception as e:
413 log.Warn("Error querying '%s/%s': %s"
414 "" % (self.container,
415 filename,
416
417=== modified file 'duplicity/backends/_ssh_paramiko.py'
418--- duplicity/backends/_ssh_paramiko.py 2013-12-30 16:01:49 +0000
419+++ duplicity/backends/_ssh_paramiko.py 2014-04-17 22:26:47 +0000
420@@ -134,7 +134,7 @@
421 try:
422 if os.path.isfile("/etc/ssh/ssh_known_hosts"):
423 self.client.load_system_host_keys("/etc/ssh/ssh_known_hosts")
424- except Exception, e:
425+ except Exception as e:
426 raise BackendException("could not load /etc/ssh/ssh_known_hosts, maybe corrupt?")
427 try:
428 # use load_host_keys() to signal it's writable to paramiko
429@@ -144,7 +144,7 @@
430 self.client.load_host_keys(file)
431 else:
432 self.client._host_keys_filename = file
433- except Exception, e:
434+ except Exception as e:
435 raise BackendException("could not load ~/.ssh/known_hosts, maybe corrupt?")
436
437 """ the next block reorganizes all host parameters into a
438@@ -211,7 +211,7 @@
439 allow_agent=True,
440 look_for_keys=True,
441 key_filename=self.config['identityfile'])
442- except Exception, e:
443+ except Exception as e:
444 raise BackendException("ssh connection to %s@%s:%d failed: %s" % (
445 self.config['user'],
446 self.config['hostname'],
447@@ -229,7 +229,7 @@
448 else:
449 try:
450 self.sftp=self.client.open_sftp()
451- except Exception, e:
452+ except Exception as e:
453 raise BackendException("sftp negotiation failed: %s" % e)
454
455
456@@ -244,17 +244,17 @@
457 continue
458 try:
459 attrs=self.sftp.stat(d)
460- except IOError, e:
461+ except IOError as e:
462 if e.errno == errno.ENOENT:
463 try:
464 self.sftp.mkdir(d)
465- except Exception, e:
466+ except Exception as e:
467 raise BackendException("sftp mkdir %s failed: %s" % (self.sftp.normalize(".")+"/"+d,e))
468 else:
469 raise BackendException("sftp stat %s failed: %s" % (self.sftp.normalize(".")+"/"+d,e))
470 try:
471 self.sftp.chdir(d)
472- except Exception, e:
473+ except Exception as e:
474 raise BackendException("sftp chdir to %s failed: %s" % (self.sftp.normalize(".")+"/"+d,e))
475
476 def put(self, source_path, remote_filename = None):
477@@ -275,7 +275,7 @@
478 chan=self.client.get_transport().open_session()
479 chan.settimeout(globals.timeout)
480 chan.exec_command("scp -t '%s'" % self.remote_dir) # scp in sink mode uses the arg as base directory
481- except Exception, e:
482+ except Exception as e:
483 raise BackendException("scp execution failed: %s" % e)
484 # scp protocol: one 0x0 after startup, one after the Create meta, one after saving
485 # if there's a problem: 0x1 or 0x02 and some error text
486@@ -298,9 +298,9 @@
487 try:
488 self.sftp.put(source_path.name,remote_filename)
489 return
490- except Exception, e:
491+ except Exception as e:
492 raise BackendException("sftp put of %s (as %s) failed: %s" % (source_path.name,remote_filename,e))
493- except Exception, e:
494+ except Exception as e:
495 log.Warn("%s (Try %d of %d) Will retry in %d seconds." % (e,n,globals.num_retries,self.retry_delay))
496 raise BackendException("Giving up trying to upload '%s' after %d attempts" % (remote_filename,n))
497
498@@ -320,7 +320,7 @@
499 chan=self.client.get_transport().open_session()
500 chan.settimeout(globals.timeout)
501 chan.exec_command("scp -f '%s/%s'" % (self.remote_dir,remote_filename))
502- except Exception, e:
503+ except Exception as e:
504 raise BackendException("scp execution failed: %s" % e)
505
506 chan.send('\0') # overall ready indicator
507@@ -343,7 +343,7 @@
508 buff=chan.recv(blocksize)
509 f.write(buff)
510 togo-=len(buff)
511- except Exception, e:
512+ except Exception as e:
513 raise BackendException("scp get %s failed: %s" % (remote_filename,e))
514
515 msg=chan.recv(1) # check the final status
516@@ -357,10 +357,10 @@
517 try:
518 self.sftp.get(remote_filename,local_path.name)
519 return
520- except Exception, e:
521+ except Exception as e:
522 raise BackendException("sftp get of %s (to %s) failed: %s" % (remote_filename,local_path.name,e))
523 local_path.setdata()
524- except Exception, e:
525+ except Exception as e:
526 log.Warn("%s (Try %d of %d) Will retry in %d seconds." % (e,n,globals.num_retries,self.retry_delay))
527 raise BackendException("Giving up trying to download '%s' after %d attempts" % (remote_filename,n))
528
529@@ -379,9 +379,9 @@
530 else:
531 try:
532 return self.sftp.listdir()
533- except Exception, e:
534+ except Exception as e:
535 raise BackendException("sftp listing of %s failed: %s" % (self.sftp.getcwd(),e))
536- except Exception, e:
537+ except Exception as e:
538 log.Warn("%s (Try %d of %d) Will retry in %d seconds." % (e,n,globals.num_retries,self.retry_delay))
539 raise BackendException("Giving up trying to list '%s' after %d attempts" % (self.remote_dir,n))
540
541@@ -397,12 +397,12 @@
542 else:
543 try:
544 self.sftp.remove(fn)
545- except Exception, e:
546+ except Exception as e:
547 raise BackendException("sftp rm %s failed: %s" % (fn,e))
548
549 # If we get here, we deleted this file successfully. Move on to the next one.
550 break
551- except Exception, e:
552+ except Exception as e:
553 if n == globals.num_retries:
554 log.FatalError(str(e), log.ErrorCode.backend_error)
555 else:
556@@ -416,7 +416,7 @@
557 chan=self.client.get_transport().open_session()
558 chan.settimeout(globals.timeout)
559 chan.exec_command(cmd)
560- except Exception, e:
561+ except Exception as e:
562 raise BackendException("%sexecution failed: %s" % (errorprefix,e))
563 output=chan.recv(-1)
564 res=chan.recv_exit_status()
565@@ -434,7 +434,7 @@
566 sshconfig = paramiko.SSHConfig()
567 try:
568 sshconfig.parse(open(file))
569- except Exception, e:
570+ except Exception as e:
571 raise BackendException("could not load '%s', maybe corrupt?" % (file))
572
573 return sshconfig.lookup(host)
574
575=== modified file 'duplicity/backends/botobackend.py'
576--- duplicity/backends/botobackend.py 2014-04-16 20:45:09 +0000
577+++ duplicity/backends/botobackend.py 2014-04-17 22:26:47 +0000
578@@ -22,8 +22,8 @@
579
580 import duplicity.backend
581 from duplicity import globals
582-from _boto_multi import BotoBackend as BotoMultiUploadBackend
583-from _boto_single import BotoBackend as BotoSingleUploadBackend
584+from ._boto_multi import BotoBackend as BotoMultiUploadBackend
585+from ._boto_single import BotoBackend as BotoSingleUploadBackend
586
587 if globals.s3_use_multiprocessing:
588 duplicity.backend.register_backend("gs", BotoMultiUploadBackend)
589
590=== modified file 'duplicity/backends/cfbackend.py'
591--- duplicity/backends/cfbackend.py 2013-11-24 16:49:57 +0000
592+++ duplicity/backends/cfbackend.py 2014-04-17 22:26:47 +0000
593@@ -22,6 +22,6 @@
594
595 if (globals.cf_backend and
596 globals.cf_backend.lower().strip() == 'pyrax'):
597- import _cf_pyrax
598+ from . import _cf_pyrax
599 else:
600- import _cf_cloudfiles
601+ from . import _cf_cloudfiles
602
603=== modified file 'duplicity/backends/dpbxbackend.py'
604--- duplicity/backends/dpbxbackend.py 2014-03-05 17:05:04 +0000
605+++ duplicity/backends/dpbxbackend.py 2014-04-17 22:26:47 +0000
606@@ -29,6 +29,7 @@
607 import urllib
608 import re
609 import locale, sys
610+from functools import reduce
611
612 import traceback, StringIO
613 from exceptions import Exception
614@@ -80,14 +81,14 @@
615
616 try:
617 return f(self, *args)
618- except TypeError, e:
619+ except TypeError as e:
620 log_exception(e)
621 log.FatalError('dpbx type error "%s"' % (e,), log.ErrorCode.backend_code_error)
622- except rest.ErrorResponse, e:
623+ except rest.ErrorResponse as e:
624 msg = e.user_error_msg or str(e)
625 log.Error('dpbx error: %s' % (msg,), log.ErrorCode.backend_command_error)
626 raise e
627- except Exception, e:
628+ except Exception as e:
629 log_exception(e)
630 log.Error('dpbx code error "%s"' % (e,), log.ErrorCode.backend_code_error)
631 raise e
632@@ -119,7 +120,7 @@
633
634 def write_creds(self, token):
635 open(self.TOKEN_FILE, 'w').close() # create/reset file
636- os.chmod(self.TOKEN_FILE,0600) # set it -rw------ (NOOP in Windows?)
637+ os.chmod(self.TOKEN_FILE, 0o600) # set it -rw------ (NOOP in Windows?)
638 # now write the content
639 f = open(self.TOKEN_FILE, 'w')
640 f.write("|".join([token.key, token.secret]))
641@@ -159,7 +160,7 @@
642 if not self.sess.is_linked():
643 try: # to login to the box
644 self.sess.link()
645- except rest.ErrorResponse, e:
646+ except rest.ErrorResponse as e:
647 log.FatalError('dpbx Error: %s\n' % str(e), log.ErrorCode.dpbx_nologin)
648 if not self.sess.is_linked(): # stil not logged in
649 log.FatalError("dpbx Cannot login: check your credentials",log.ErrorCode.dpbx_nologin)
650
651=== modified file 'duplicity/backends/gdocsbackend.py'
652--- duplicity/backends/gdocsbackend.py 2014-01-03 10:37:54 +0000
653+++ duplicity/backends/gdocsbackend.py 2014-04-17 22:26:47 +0000
654@@ -113,7 +113,7 @@
655 self.__handle_error("Failed to initialize upload of file '%s' to remote folder '%s'"
656 % (source_path.get_filename(), self.folder.title.text), raise_errors)
657 assert not file.close()
658- except Exception, e:
659+ except Exception as e:
660 self.__handle_error("Failed to upload file '%s' to remote folder '%s': %s"
661 % (source_path.get_filename(), self.folder.title.text, str(e)), raise_errors)
662
663@@ -132,7 +132,7 @@
664 else:
665 self.__handle_error("Failed to find file '%s' in remote folder '%s'"
666 % (remote_filename, self.folder.title.text), raise_errors)
667- except Exception, e:
668+ except Exception as e:
669 self.__handle_error("Failed to download file '%s' in remote folder '%s': %s"
670 % (remote_filename, self.folder.title.text, str(e)), raise_errors)
671
672@@ -143,7 +143,7 @@
673 entries = self.__fetch_entries(self.folder.resource_id.text,
674 GDocsBackend.BACKUP_DOCUMENT_TYPE)
675 return [entry.title.text for entry in entries]
676- except Exception, e:
677+ except Exception as e:
678 self.__handle_error("Failed to fetch list of files in remote folder '%s': %s"
679 % (self.folder.title.text, str(e)), raise_errors)
680
681@@ -166,7 +166,7 @@
682 else:
683 log.Warn("Failed to fetch file '%s' in remote folder '%s'"
684 % (filename, self.folder.title.text))
685- except Exception, e:
686+ except Exception as e:
687 self.__handle_error("Failed to remove file '%s' in remote folder '%s': %s"
688 % (filename, self.folder.title.text, str(e)), raise_errors)
689
690@@ -184,7 +184,7 @@
691 service='writely',
692 captcha_token=captcha_token,
693 captcha_response=captcha_response)
694- except gdata.client.CaptchaChallenge, challenge:
695+ except gdata.client.CaptchaChallenge as challenge:
696 print('A captcha challenge in required. Please visit ' + challenge.captcha_url)
697 answer = None
698 while not answer:
699@@ -196,7 +196,7 @@
700 'access code for using this Duplicity backend. Follow the instrucction in '
701 'http://www.google.com/support/accounts/bin/static.py?page=guide.cs&guide=1056283&topic=1056286 '
702 'and create your application-specific password to run duplicity backups.')
703- except Exception, e:
704+ except Exception as e:
705 self.__handle_error('Error while authenticating client: %s.' % str(e))
706
707 def __fetch_entries(self, folder_id, type, title=None):
708@@ -238,7 +238,7 @@
709
710 # Done!
711 return result
712- except Exception, e:
713+ except Exception as e:
714 self.__handle_error('Error while fetching remote entries: %s.' % str(e))
715
716 duplicity.backend.register_backend('gdocs', GDocsBackend)
717
718=== modified file 'duplicity/backends/giobackend.py'
719--- duplicity/backends/giobackend.py 2013-12-27 06:39:00 +0000
720+++ duplicity/backends/giobackend.py 2014-04-17 22:26:47 +0000
721@@ -93,14 +93,14 @@
722 # Now make the directory if it doesn't exist
723 try:
724 self.remote_file.make_directory_with_parents(None)
725- except GLib.GError, e:
726+ except GLib.GError as e:
727 if e.code != Gio.IOErrorEnum.EXISTS:
728 raise
729
730 def done_with_mount(self, fileobj, result, loop):
731 try:
732 fileobj.mount_enclosing_volume_finish(result)
733- except GLib.GError, e:
734+ except GLib.GError as e:
735 # check for NOT_SUPPORTED because some schemas (e.g. file://) validly don't
736 if e.code != Gio.IOErrorEnum.ALREADY_MOUNTED and e.code != Gio.IOErrorEnum.NOT_SUPPORTED:
737 log.FatalError(_("Connection failed, please check your password: %s")
738@@ -132,7 +132,7 @@
739 source.copy(target,
740 Gio.FileCopyFlags.OVERWRITE | Gio.FileCopyFlags.NOFOLLOW_SYMLINKS,
741 None, self.copy_progress, None)
742- except Exception, e:
743+ except Exception as e:
744 self.handle_error(raise_errors, e, op, source.get_parse_name(),
745 target.get_parse_name())
746
747@@ -163,7 +163,7 @@
748 while info:
749 files.append(info.get_name())
750 info = enum.next_file(None)
751- except Exception, e:
752+ except Exception as e:
753 self.handle_error(raise_errors, e, 'list',
754 self.remote_file.get_parse_name())
755 return files
756@@ -176,7 +176,7 @@
757 target_file = self.remote_file.get_child(filename)
758 try:
759 target_file.delete(None)
760- except Exception, e:
761+ except Exception as e:
762 if isinstance(e, GLib.GError):
763 if e.code == Gio.IOErrorEnum.NOT_FOUND:
764 continue
765@@ -193,7 +193,7 @@
766 info = target_file.query_info(attrs, Gio.FileQueryInfoFlags.NONE,
767 None)
768 return {'size': info.get_size()}
769- except Exception, e:
770+ except Exception as e:
771 if isinstance(e, GLib.GError):
772 if e.code == Gio.IOErrorEnum.NOT_FOUND:
773 return {'size': -1} # early exit, no need to retry
774
775=== modified file 'duplicity/backends/imapbackend.py'
776--- duplicity/backends/imapbackend.py 2013-12-27 06:39:00 +0000
777+++ duplicity/backends/imapbackend.py 2014-04-17 22:26:47 +0000
778@@ -54,7 +54,7 @@
779
780 # Set the password
781 if ( not parsed_url.password ):
782- if os.environ.has_key('IMAP_PASSWORD'):
783+ if 'IMAP_PASSWORD' in os.environ:
784 password = os.environ.get('IMAP_PASSWORD')
785 else:
786 password = getpass.getpass("Enter account password: ")
787
788=== modified file 'duplicity/backends/localbackend.py'
789--- duplicity/backends/localbackend.py 2013-12-27 06:39:00 +0000
790+++ duplicity/backends/localbackend.py 2014-04-17 22:26:47 +0000
791@@ -74,13 +74,13 @@
792 source_path.rename(target_path)
793 except OSError:
794 pass
795- except Exception, e:
796+ except Exception as e:
797 self.handle_error(e, 'put', source_path.name, target_path.name)
798 else:
799 return
800 try:
801 target_path.writefileobj(source_path.open("rb"))
802- except Exception, e:
803+ except Exception as e:
804 self.handle_error(e, 'put', source_path.name, target_path.name)
805
806 """If we get here, renaming failed previously"""
807@@ -93,7 +93,7 @@
808 source_path = self.remote_pathdir.append(filename)
809 try:
810 local_path.writefileobj(source_path.open("rb"))
811- except Exception, e:
812+ except Exception as e:
813 self.handle_error(e, 'get', source_path.name, local_path.name)
814
815 def _list(self):
816@@ -104,7 +104,7 @@
817 pass
818 try:
819 return self.remote_pathdir.listdir()
820- except Exception, e:
821+ except Exception as e:
822 self.handle_error(e, 'list', self.remote_pathdir.name)
823
824 def delete(self, filename_list):
825@@ -113,7 +113,7 @@
826 for filename in filename_list:
827 try:
828 self.remote_pathdir.append(filename).delete()
829- except Exception, e:
830+ except Exception as e:
831 self.handle_error(e, 'delete', self.remote_pathdir.append(filename).name)
832
833 def _query_file_info(self, filename):
834@@ -125,7 +125,7 @@
835 target_file.setdata()
836 size = target_file.getsize()
837 return {'size': size}
838- except Exception, e:
839+ except Exception as e:
840 self.handle_error(e, 'query', target_file.name)
841 return {'size': None}
842
843
844=== modified file 'duplicity/backends/megabackend.py'
845--- duplicity/backends/megabackend.py 2013-12-27 06:39:00 +0000
846+++ duplicity/backends/megabackend.py 2014-04-17 22:26:47 +0000
847@@ -80,7 +80,7 @@
848
849 self.client.upload(source_path.get_canonical(), self.folder, dest_filename=remote_filename)
850
851- except Exception, e:
852+ except Exception as e:
853 self.__handle_error("Failed to upload file '%s' to remote folder '%s': %s"
854 % (source_path.get_canonical(), self.__get_node_name(self.folder), str(e)), raise_errors)
855
856@@ -100,7 +100,7 @@
857 else:
858 self.__handle_error("Failed to find file '%s' in remote folder '%s'"
859 % (remote_filename, self.__get_node_name(self.folder)), raise_errors)
860- except Exception, e:
861+ except Exception as e:
862 self.__handle_error("Failed to download file '%s' in remote folder '%s': %s"
863 % (remote_filename, self.__get_node_name(self.folder), str(e)), raise_errors)
864
865@@ -110,7 +110,7 @@
866 try:
867 entries = self.client.get_files_in_node(self.folder)
868 return [ self.client.get_name_from_file({entry:entries[entry]}) for entry in entries]
869- except Exception, e:
870+ except Exception as e:
871 self.__handle_error("Failed to fetch list of files in remote folder '%s': %s"
872 % (self.__get_node_name(self.folder), str(e)), raise_errors)
873
874@@ -129,7 +129,7 @@
875 else:
876 log.Warn("Failed to fetch file '%s' in remote folder '%s'"
877 % (filename, self.__get_node_name(self.folder)))
878- except Exception, e:
879+ except Exception as e:
880 self.__handle_error("Failed to remove file '%s' in remote folder '%s': %s"
881 % (filename, self.__get_node_name(self.folder), str(e)), raise_errors)
882
883@@ -147,7 +147,7 @@
884 def __authorize(self, email, password):
885 try:
886 self.client.login(email, password)
887- except Exception, e:
888+ except Exception as e:
889 self.__handle_error('Error while authenticating client: %s.' % str(e))
890
891 def __filter_entries(self, entries, parent_id=None, title=None, type=None):
892@@ -169,7 +169,7 @@
893 result.update({k:v})
894
895 return result
896- except Exception, e:
897+ except Exception as e:
898 self.__handle_error('Error while fetching remote entries: %s.' % str(e))
899
900 duplicity.backend.register_backend('mega', MegaBackend)
901
902=== modified file 'duplicity/backends/sshbackend.py'
903--- duplicity/backends/sshbackend.py 2012-05-16 11:03:20 +0000
904+++ duplicity/backends/sshbackend.py 2014-04-17 22:26:47 +0000
905@@ -26,11 +26,11 @@
906
907 if (globals.ssh_backend and
908 globals.ssh_backend.lower().strip() == 'pexpect'):
909- import _ssh_pexpect
910+ from . import _ssh_pexpect
911 else:
912 # take user by the hand to prevent typo driven bug reports
913 if globals.ssh_backend.lower().strip() != 'paramiko':
914 log.Warn(_("Warning: Selected ssh backend '%s' is neither 'paramiko nor 'pexpect'. Will use default paramiko instead.") % globals.ssh_backend)
915 warn_option("--scp-command", globals.scp_command)
916 warn_option("--sftp-command", globals.sftp_command)
917- import _ssh_paramiko
918+ from . import _ssh_paramiko
919
920=== modified file 'duplicity/backends/swiftbackend.py'
921--- duplicity/backends/swiftbackend.py 2013-12-27 06:39:00 +0000
922+++ duplicity/backends/swiftbackend.py 2014-04-17 22:26:47 +0000
923@@ -44,20 +44,20 @@
924 conn_kwargs = {}
925
926 # if the user has already authenticated
927- if os.environ.has_key('SWIFT_PREAUTHURL') and os.environ.has_key('SWIFT_PREAUTHTOKEN'):
928+ if 'SWIFT_PREAUTHURL' in os.environ and 'SWIFT_PREAUTHTOKEN' in os.environ:
929 conn_kwargs['preauthurl'] = os.environ['SWIFT_PREAUTHURL']
930 conn_kwargs['preauthtoken'] = os.environ['SWIFT_PREAUTHTOKEN']
931
932 else:
933- if not os.environ.has_key('SWIFT_USERNAME'):
934+ if 'SWIFT_USERNAME' not in os.environ:
935 raise BackendException('SWIFT_USERNAME environment variable '
936 'not set.')
937
938- if not os.environ.has_key('SWIFT_PASSWORD'):
939+ if 'SWIFT_PASSWORD' not in os.environ:
940 raise BackendException('SWIFT_PASSWORD environment variable '
941 'not set.')
942
943- if not os.environ.has_key('SWIFT_AUTHURL'):
944+ if 'SWIFT_AUTHURL' not in os.environ:
945 raise BackendException('SWIFT_AUTHURL environment variable '
946 'not set.')
947
948@@ -65,11 +65,11 @@
949 conn_kwargs['key'] = os.environ['SWIFT_PASSWORD']
950 conn_kwargs['authurl'] = os.environ['SWIFT_AUTHURL']
951
952- if os.environ.has_key('SWIFT_AUTHVERSION'):
953+ if 'SWIFT_AUTHVERSION' in os.environ:
954 conn_kwargs['auth_version'] = os.environ['SWIFT_AUTHVERSION']
955 else:
956 conn_kwargs['auth_version'] = '1'
957- if os.environ.has_key('SWIFT_TENANTNAME'):
958+ if 'SWIFT_TENANTNAME' in os.environ:
959 conn_kwargs['tenant_name'] = os.environ['SWIFT_TENANTNAME']
960
961 self.container = parsed_url.path.lstrip('/')
962@@ -77,7 +77,7 @@
963 try:
964 self.conn = Connection(**conn_kwargs)
965 self.conn.put_container(self.container)
966- except Exception, e:
967+ except Exception as e:
968 log.FatalError("Connection failed: %s %s"
969 % (e.__class__.__name__, str(e)),
970 log.ErrorCode.connection_failed)
971@@ -93,10 +93,10 @@
972 remote_filename,
973 file(source_path.name))
974 return
975- except self.resp_exc, error:
976+ except self.resp_exc as error:
977 log.Warn("Upload of '%s' failed (attempt %d): Swift server returned: %s %s"
978 % (remote_filename, n, error.http_status, error.message))
979- except Exception, e:
980+ except Exception as e:
981 log.Warn("Upload of '%s' failed (attempt %s): %s: %s"
982 % (remote_filename, n, e.__class__.__name__, str(e)))
983 log.Debug("Backtrace of previous error: %s"
984@@ -117,10 +117,10 @@
985 f.write(chunk)
986 local_path.setdata()
987 return
988- except self.resp_exc, resperr:
989+ except self.resp_exc as resperr:
990 log.Warn("Download of '%s' failed (attempt %s): Swift server returned: %s %s"
991 % (remote_filename, n, resperr.http_status, resperr.message))
992- except Exception, e:
993+ except Exception as e:
994 log.Warn("Download of '%s' failed (attempt %s): %s: %s"
995 % (remote_filename, n, e.__class__.__name__, str(e)))
996 log.Debug("Backtrace of previous error: %s"
997@@ -139,10 +139,10 @@
998 # to make multiple requests to get them all.
999 headers, objs = self.conn.get_container(self.container)
1000 return [ o['name'] for o in objs ]
1001- except self.resp_exc, resperr:
1002+ except self.resp_exc as resperr:
1003 log.Warn("Listing of '%s' failed (attempt %s): Swift server returned: %s %s"
1004 % (self.container, n, resperr.http_status, resperr.message))
1005- except Exception, e:
1006+ except Exception as e:
1007 log.Warn("Listing of '%s' failed (attempt %s): %s: %s"
1008 % (self.container, n, e.__class__.__name__, str(e)))
1009 log.Debug("Backtrace of previous error: %s"
1010@@ -159,14 +159,14 @@
1011 try:
1012 self.conn.delete_object(self.container, remote_filename)
1013 return
1014- except self.resp_exc, resperr:
1015+ except self.resp_exc as resperr:
1016 if n > 1 and resperr.http_status == 404:
1017 # We failed on a timeout, but delete succeeded on the server
1018 log.Warn("Delete of '%s' missing after retry - must have succeded earlier" % remote_filename )
1019 return
1020 log.Warn("Delete of '%s' failed (attempt %s): Swift server returned: %s %s"
1021 % (remote_filename, n, resperr.http_status, resperr.message))
1022- except Exception, e:
1023+ except Exception as e:
1024 log.Warn("Delete of '%s' failed (attempt %s): %s: %s"
1025 % (remote_filename, n, e.__class__.__name__, str(e)))
1026 log.Debug("Backtrace of previous error: %s"
1027@@ -186,10 +186,10 @@
1028 def _query_file_info(self, filename, raise_errors=False):
1029 try:
1030 sobject = self.conn.head_object(self.container, filename)
1031- return {'size': long(sobject['content-length'])}
1032+ return {'size': int(sobject['content-length'])}
1033 except self.resp_exc:
1034 return {'size': -1}
1035- except Exception, e:
1036+ except Exception as e:
1037 log.Warn("Error querying '%s/%s': %s"
1038 "" % (self.container,
1039 filename,
1040
1041=== modified file 'duplicity/backends/webdavbackend.py'
1042--- duplicity/backends/webdavbackend.py 2014-04-16 20:45:09 +0000
1043+++ duplicity/backends/webdavbackend.py 2014-04-17 22:26:47 +0000
1044@@ -96,7 +96,7 @@
1045 def request(self, *args, **kwargs):
1046 try:
1047 return httplib.HTTPSConnection.request(self, *args, **kwargs)
1048- except ssl.SSLError, e:
1049+ except ssl.SSLError as e:
1050 # encapsulate ssl errors
1051 raise BackendException("SSL failed: %s" % str(e),log.ErrorCode.backend_error)
1052
1053@@ -293,7 +293,7 @@
1054 if filename:
1055 result.append(filename)
1056 return result
1057- except Exception, e:
1058+ except Exception as e:
1059 raise e
1060 finally:
1061 if response: response.close()
1062@@ -383,7 +383,7 @@
1063 reason = response.reason
1064 response.close()
1065 raise BackendException("Bad status code %s reason %s." % (status,reason))
1066- except Exception, e:
1067+ except Exception as e:
1068 raise e
1069 finally:
1070 if response: response.close()
1071@@ -407,7 +407,7 @@
1072 reason = response.reason
1073 response.close()
1074 raise BackendException("Bad status code %s reason %s." % (status,reason))
1075- except Exception, e:
1076+ except Exception as e:
1077 raise e
1078 finally:
1079 if response: response.close()
1080@@ -429,7 +429,7 @@
1081 reason = response.reason
1082 response.close()
1083 raise BackendException("Bad status code %s reason %s." % (status,reason))
1084- except Exception, e:
1085+ except Exception as e:
1086 raise e
1087 finally:
1088 if response: response.close()
1089
1090=== modified file 'duplicity/cached_ops.py'
1091--- duplicity/cached_ops.py 2012-11-09 03:21:40 +0000
1092+++ duplicity/cached_ops.py 2014-04-17 22:26:47 +0000
1093@@ -34,7 +34,7 @@
1094 def __call__(self, *args):
1095 try:
1096 return self.cache[args]
1097- except (KeyError, TypeError), e:
1098+ except (KeyError, TypeError) as e:
1099 result = self.f(*args)
1100 if not isinstance(e, TypeError):
1101 # TypeError most likely means that args is not hashable
1102
1103=== modified file 'duplicity/collections.py'
1104--- duplicity/collections.py 2014-01-17 16:44:46 +0000
1105+++ duplicity/collections.py 2014-04-17 22:26:47 +0000
1106@@ -96,7 +96,7 @@
1107 self.set_manifest(filename)
1108 else:
1109 assert pr.volume_number is not None
1110- assert not self.volume_name_dict.has_key(pr.volume_number), \
1111+ assert pr.volume_number not in self.volume_name_dict, \
1112 (self.volume_name_dict, filename)
1113 self.volume_name_dict[pr.volume_number] = filename
1114
1115@@ -222,7 +222,7 @@
1116 # public key w/o secret key
1117 try:
1118 manifest_buffer = self.backend.get_data(self.remote_manifest_name)
1119- except GPGError, message:
1120+ except GPGError as message:
1121 #TODO: We check for gpg v1 and v2 messages, should be an error code.
1122 if ("secret key not available" in message.args[0] or
1123 "No secret key" in message.args[0]):
1124@@ -916,7 +916,7 @@
1125 # Build dictionary from end_times to lists of corresponding chains
1126 endtime_chain_dict = {}
1127 for chain in chain_list:
1128- if endtime_chain_dict.has_key(chain.end_time):
1129+ if chain.end_time in endtime_chain_dict:
1130 endtime_chain_dict[chain.end_time].append(chain)
1131 else:
1132 endtime_chain_dict[chain.end_time] = [chain]
1133
1134=== modified file 'duplicity/commandline.py'
1135--- duplicity/commandline.py 2014-04-17 17:45:37 +0000
1136+++ duplicity/commandline.py 2014-04-17 22:26:47 +0000
1137@@ -109,7 +109,7 @@
1138 def check_time(option, opt, value):
1139 try:
1140 return dup_time.genstrtotime(value)
1141- except dup_time.TimeException, e:
1142+ except dup_time.TimeException as e:
1143 raise optparse.OptionValueError(str(e))
1144
1145 def check_verbosity(option, opt, value):
1146
1147=== modified file 'duplicity/diffdir.py'
1148--- duplicity/diffdir.py 2013-12-27 06:39:00 +0000
1149+++ duplicity/diffdir.py 2014-04-17 22:26:47 +0000
1150@@ -389,7 +389,7 @@
1151 def read(self, length = -1):
1152 try:
1153 buf = self.infile.read(length)
1154- except IOError, ex:
1155+ except IOError as ex:
1156 buf = ""
1157 log.Warn(_("Error %s getting delta for %s") % (str(ex), util.ufn(self.infile.name)))
1158 if stats:
1159@@ -461,7 +461,7 @@
1160 TarBlockIter initializer
1161 """
1162 self.input_iter = input_iter
1163- self.offset = 0l # total length of data read
1164+ self.offset = 0 # total length of data read
1165 self.process_waiting = False # process_continued has more blocks
1166 self.process_next_vol_number = None # next volume number to write in multivol
1167 self.previous_index = None # holds index of last block returned
1168@@ -564,7 +564,7 @@
1169 Return closing string for tarfile, reset offset
1170 """
1171 blocks, remainder = divmod(self.offset, tarfile.RECORDSIZE) #@UnusedVariable
1172- self.offset = 0l
1173+ self.offset = 0
1174 return '\0' * (tarfile.RECORDSIZE - remainder) # remainder can be 0
1175
1176 def __iter__(self):
1177@@ -736,5 +736,5 @@
1178 return 512 # set minimum of 512 bytes
1179 else:
1180 # Split file into about 2000 pieces, rounding to 512
1181- file_blocksize = long((file_len / (2000 * 512)) * 512)
1182+ file_blocksize = int((file_len / (2000 * 512)) * 512)
1183 return min(file_blocksize, globals.max_blocksize)
1184
1185=== modified file 'duplicity/dup_temp.py'
1186--- duplicity/dup_temp.py 2013-12-27 06:39:00 +0000
1187+++ duplicity/dup_temp.py 2014-04-17 22:26:47 +0000
1188@@ -179,9 +179,9 @@
1189 tgt = self.dirpath.append(self.remname)
1190 src_iter = SrcIter(src)
1191 if pr.compressed:
1192- gpg.GzipWriteFile(src_iter, tgt.name, size = sys.maxint)
1193+ gpg.GzipWriteFile(src_iter, tgt.name, size = sys.maxsize)
1194 elif pr.encrypted:
1195- gpg.GPGWriteFile(src_iter, tgt.name, globals.gpg_profile, size = sys.maxint)
1196+ gpg.GPGWriteFile(src_iter, tgt.name, globals.gpg_profile, size = sys.maxsize)
1197 else:
1198 os.system("cp -p \"%s\" \"%s\"" % (src.name, tgt.name))
1199 globals.backend.move(tgt) #@UndefinedVariable
1200@@ -195,7 +195,7 @@
1201 src_iter = SrcIter(src)
1202 pr = file_naming.parse(self.permname)
1203 if pr.compressed:
1204- gpg.GzipWriteFile(src_iter, tgt.name, size = sys.maxint)
1205+ gpg.GzipWriteFile(src_iter, tgt.name, size = sys.maxsize)
1206 os.unlink(src.name)
1207 else:
1208 os.rename(src.name, tgt.name)
1209
1210=== modified file 'duplicity/dup_threading.py'
1211--- duplicity/dup_threading.py 2010-07-22 19:15:11 +0000
1212+++ duplicity/dup_threading.py 2014-04-17 22:26:47 +0000
1213@@ -192,7 +192,7 @@
1214 if state['error'] is None:
1215 return state['value']
1216 else:
1217- raise state['error'], None, state['trace']
1218+ raise state['error'].with_traceback(state['trace'])
1219 finally:
1220 cv.release()
1221
1222@@ -207,7 +207,7 @@
1223 cv.release()
1224
1225 return (True, waiter)
1226- except Exception, e:
1227+ except Exception as e:
1228 cv.acquire()
1229 state['done'] = True
1230 state['error'] = e
1231
1232=== modified file 'duplicity/dup_time.py'
1233--- duplicity/dup_time.py 2011-11-03 11:27:45 +0000
1234+++ duplicity/dup_time.py 2014-04-17 22:26:47 +0000
1235@@ -62,7 +62,7 @@
1236 def setcurtime(time_in_secs = None):
1237 """Sets the current time in curtime and curtimestr"""
1238 global curtime, curtimestr
1239- t = time_in_secs or long(time.time())
1240+ t = time_in_secs or int(time.time())
1241 assert type(t) in (types.LongType, types.IntType)
1242 curtime, curtimestr = t, timetostring(t)
1243
1244@@ -137,9 +137,9 @@
1245 # even when we're not in the same timezone that wrote the
1246 # string
1247 if len(timestring) == 16:
1248- return long(utc_in_secs)
1249+ return int(utc_in_secs)
1250 else:
1251- return long(utc_in_secs + tzdtoseconds(timestring[19:]))
1252+ return int(utc_in_secs + tzdtoseconds(timestring[19:]))
1253 except (TypeError, ValueError, AssertionError):
1254 return None
1255
1256@@ -169,7 +169,7 @@
1257 if seconds == 1:
1258 partlist.append("1 second")
1259 elif not partlist or seconds > 1:
1260- if isinstance(seconds, int) or isinstance(seconds, long):
1261+ if isinstance(seconds, (types.LongType, types.IntType)):
1262 partlist.append("%s seconds" % seconds)
1263 else:
1264 partlist.append("%.2f seconds" % seconds)
1265
1266=== modified file 'duplicity/file_naming.py'
1267--- duplicity/file_naming.py 2014-01-31 12:41:00 +0000
1268+++ duplicity/file_naming.py 2014-04-17 22:26:47 +0000
1269@@ -158,7 +158,7 @@
1270 """
1271 Convert string s in base 36 to long int
1272 """
1273- total = 0L
1274+ total = 0
1275 for i in range(len(s)):
1276 total *= 36
1277 digit_ord = ord(s[i])
1278
1279=== modified file 'duplicity/globals.py'
1280--- duplicity/globals.py 2014-04-09 09:22:27 +0000
1281+++ duplicity/globals.py 2014-04-17 22:26:47 +0000
1282@@ -87,7 +87,7 @@
1283 gpg_options = ''
1284
1285 # Maximum file blocksize
1286-max_blocksize = 2048L
1287+max_blocksize = 2048
1288
1289 # If true, filelists and directory statistics will be split on
1290 # nulls instead of newlines.
1291
1292=== modified file 'duplicity/gpg.py'
1293--- duplicity/gpg.py 2013-12-27 06:39:00 +0000
1294+++ duplicity/gpg.py 2014-04-17 22:26:47 +0000
1295@@ -215,7 +215,7 @@
1296 msg += unicode(line.strip(), locale.getpreferredencoding(), 'replace') + u"\n"
1297 msg += u"===== End GnuPG log =====\n"
1298 if not (msg.find(u"invalid packet (ctb=14)") > -1):
1299- raise GPGError, msg
1300+ raise GPGError(msg)
1301 else:
1302 return ""
1303
1304
1305=== modified file 'duplicity/gpginterface.py'
1306--- duplicity/gpginterface.py 2013-12-27 06:39:00 +0000
1307+++ duplicity/gpginterface.py 2014-04-17 22:26:47 +0000
1308@@ -353,14 +353,14 @@
1309 if attach_fhs == None: attach_fhs = {}
1310
1311 for std in _stds:
1312- if not attach_fhs.has_key(std) \
1313+ if std not in attach_fhs \
1314 and std not in create_fhs:
1315 attach_fhs.setdefault(std, getattr(sys, std))
1316
1317 handle_passphrase = 0
1318
1319 if self.passphrase != None \
1320- and not attach_fhs.has_key('passphrase') \
1321+ and 'passphrase' not in attach_fhs \
1322 and 'passphrase' not in create_fhs:
1323 handle_passphrase = 1
1324 create_fhs.append('passphrase')
1325@@ -384,18 +384,18 @@
1326 process = Process()
1327
1328 for fh_name in create_fhs + attach_fhs.keys():
1329- if not _fd_modes.has_key(fh_name):
1330- raise KeyError, \
1331+ if fh_name not in _fd_modes:
1332+ raise KeyError(
1333 "unrecognized filehandle name '%s'; must be one of %s" \
1334- % (fh_name, _fd_modes.keys())
1335+ % (fh_name, _fd_modes.keys()))
1336
1337 for fh_name in create_fhs:
1338 # make sure the user doesn't specify a filehandle
1339 # to be created *and* attached
1340- if attach_fhs.has_key(fh_name):
1341- raise ValueError, \
1342+ if fh_name in attach_fhs:
1343+ raise ValueError(
1344 "cannot have filehandle '%s' in both create_fhs and attach_fhs" \
1345- % fh_name
1346+ % fh_name)
1347
1348 pipe = os.pipe()
1349 # fix by drt@un.bewaff.net noting
1350@@ -660,7 +660,7 @@
1351 if self.returned == None:
1352 self.thread.join()
1353 if self.returned != 0:
1354- raise IOError, "GnuPG exited non-zero, with code %d" % (self.returned >> 8)
1355+ raise IOError("GnuPG exited non-zero, with code %d" % (self.returned >> 8))
1356
1357
1358 def threaded_waitpid(process):
1359
1360=== modified file 'duplicity/librsync.py'
1361--- duplicity/librsync.py 2010-11-20 15:39:00 +0000
1362+++ duplicity/librsync.py 2014-04-17 22:26:47 +0000
1363@@ -26,7 +26,7 @@
1364
1365 """
1366
1367-import _librsync
1368+from . import _librsync
1369 import types, array
1370
1371 blocksize = _librsync.RS_JOB_BLOCKSIZE
1372@@ -90,7 +90,7 @@
1373 self._add_to_inbuf()
1374 try:
1375 self.eof, len_inbuf_read, cycle_out = self.maker.cycle(self.inbuf)
1376- except _librsync.librsyncError, e:
1377+ except _librsync.librsyncError as e:
1378 raise librsyncError(str(e))
1379 self.inbuf = self.inbuf[len_inbuf_read:]
1380 self.outbuf.fromstring(cycle_out)
1381@@ -126,7 +126,7 @@
1382 LikeFile.__init__(self, infile)
1383 try:
1384 self.maker = _librsync.new_sigmaker(blocksize)
1385- except _librsync.librsyncError, e:
1386+ except _librsync.librsyncError as e:
1387 raise librsyncError(str(e))
1388
1389 class DeltaFile(LikeFile):
1390@@ -148,7 +148,7 @@
1391 assert not signature.close()
1392 try:
1393 self.maker = _librsync.new_deltamaker(sig_string)
1394- except _librsync.librsyncError, e:
1395+ except _librsync.librsyncError as e:
1396 raise librsyncError(str(e))
1397
1398
1399@@ -167,7 +167,7 @@
1400 raise TypeError("basis_file must be a (true) file")
1401 try:
1402 self.maker = _librsync.new_patchmaker(basis_file)
1403- except _librsync.librsyncError, e:
1404+ except _librsync.librsyncError as e:
1405 raise librsyncError(str(e))
1406
1407
1408@@ -182,7 +182,7 @@
1409 """Return new signature instance"""
1410 try:
1411 self.sig_maker = _librsync.new_sigmaker(blocksize)
1412- except _librsync.librsyncError, e:
1413+ except _librsync.librsyncError as e:
1414 raise librsyncError(str(e))
1415 self.gotsig = None
1416 self.buffer = ""
1417@@ -201,7 +201,7 @@
1418 """Run self.buffer through sig_maker, add to self.sig_string"""
1419 try:
1420 eof, len_buf_read, cycle_out = self.sig_maker.cycle(self.buffer)
1421- except _librsync.librsyncError, e:
1422+ except _librsync.librsyncError as e:
1423 raise librsyncError(str(e))
1424 self.buffer = self.buffer[len_buf_read:]
1425 self.sigstring_list.append(cycle_out)
1426
1427=== modified file 'duplicity/patchdir.py'
1428--- duplicity/patchdir.py 2013-12-27 06:39:00 +0000
1429+++ duplicity/patchdir.py 2014-04-17 22:26:47 +0000
1430@@ -504,7 +504,7 @@
1431 if final_ropath.exists():
1432 # otherwise final patch was delete
1433 yield final_ropath
1434- except Exception, e:
1435+ except Exception as e:
1436 filename = normalized[-1].get_ropath().get_relative_path()
1437 log.Warn(_("Error '%s' patching %s") %
1438 (str(e), filename),
1439
1440=== modified file 'duplicity/path.py'
1441--- duplicity/path.py 2013-12-27 06:39:00 +0000
1442+++ duplicity/path.py 2014-04-17 22:26:47 +0000
1443@@ -500,7 +500,7 @@
1444 """Refresh stat cache"""
1445 try:
1446 self.stat = os.lstat(self.name)
1447- except OSError, e:
1448+ except OSError as e:
1449 err_string = errno.errorcode[e[0]]
1450 if err_string in ["ENOENT", "ENOTDIR", "ELOOP", "ENOTCONN"]:
1451 self.stat, self.type = None, None # file doesn't exist
1452
1453=== modified file 'duplicity/progress.py'
1454--- duplicity/progress.py 2013-04-15 12:10:35 +0000
1455+++ duplicity/progress.py 2014-04-17 22:26:47 +0000
1456@@ -264,7 +264,7 @@
1457 projection = 1.0
1458 if self.progress_estimation > 0:
1459 projection = (1.0 - self.progress_estimation) / self.progress_estimation
1460- self.time_estimation = long(projection * float(self.elapsed_sum.total_seconds()))
1461+ self.time_estimation = int(projection * float(self.elapsed_sum.total_seconds()))
1462
1463 # Apply values only when monotonic, so the estimates look more consistent to the human eye
1464 if self.progress_estimation < last_progress_estimation:
1465@@ -299,7 +299,7 @@
1466 volume and for the current volume
1467 """
1468 changing = max(bytecount - self.last_bytecount, 0)
1469- self.total_bytecount += long(changing) # Annotate only changing bytes since last probe
1470+ self.total_bytecount += int(changing) # Annotate only changing bytes since last probe
1471 self.last_bytecount = bytecount
1472 if changing > 0:
1473 self.stall_last_time = datetime.now()
1474
1475=== modified file 'duplicity/robust.py'
1476--- duplicity/robust.py 2013-12-27 06:39:00 +0000
1477+++ duplicity/robust.py 2014-04-17 22:26:47 +0000
1478@@ -39,7 +39,7 @@
1479 # RPathException, Rdiff.RdiffException,
1480 # librsync.librsyncError, C.UnknownFileTypeError), exc:
1481 # TracebackArchive.add()
1482- except (IOError, EnvironmentError, librsync.librsyncError, path.PathException), exc:
1483+ except (IOError, EnvironmentError, librsync.librsyncError, path.PathException) as exc:
1484 if (not isinstance(exc, EnvironmentError) or
1485 ((exc[0] in errno.errorcode)
1486 and errno.errorcode[exc[0]] in
1487
1488=== modified file 'duplicity/selection.py'
1489--- duplicity/selection.py 2013-12-27 06:39:00 +0000
1490+++ duplicity/selection.py 2014-04-17 22:26:47 +0000
1491@@ -256,7 +256,7 @@
1492 self.add_selection_func(self.regexp_get_sf(arg, 1))
1493 else:
1494 assert 0, "Bad selection option %s" % opt
1495- except SelectError, e:
1496+ except SelectError as e:
1497 self.parse_catch_error(e)
1498 assert filelists_index == len(filelists)
1499 self.parse_last_excludes()
1500@@ -351,7 +351,7 @@
1501 continue # skip blanks
1502 try:
1503 tuple = self.filelist_parse_line(line, include)
1504- except FilePrefixError, exc:
1505+ except FilePrefixError as exc:
1506 incr_warnings(exc)
1507 continue
1508 tuple_list.append(tuple)
1509
1510=== modified file 'duplicity/statistics.py'
1511--- duplicity/statistics.py 2010-07-22 19:15:11 +0000
1512+++ duplicity/statistics.py 2014-04-17 22:26:47 +0000
1513@@ -104,7 +104,7 @@
1514 if not index:
1515 filename = "."
1516 else:
1517- filename = apply(os.path.join, index)
1518+ filename = os.path.join(*index)
1519 if use_repr:
1520 # use repr to quote newlines in relative filename, then
1521 # take of leading and trailing quote and quote spaces.
1522@@ -123,7 +123,7 @@
1523 for attr, val_string in zip(self.stat_file_attrs,
1524 lineparts[-len(self.stat_file_attrs):]):
1525 try:
1526- val = long(val_string)
1527+ val = int(val_string)
1528 except ValueError:
1529 try:
1530 val = float(val_string)
1531@@ -230,7 +230,7 @@
1532 error(line)
1533 try:
1534 try:
1535- val1 = long(value_string)
1536+ val1 = int(value_string)
1537 except ValueError:
1538 val1 = None
1539 val2 = float(value_string)
1540
1541=== modified file 'duplicity/tempdir.py'
1542--- duplicity/tempdir.py 2013-12-27 06:39:00 +0000
1543+++ duplicity/tempdir.py 2014-04-17 22:26:47 +0000
1544@@ -213,7 +213,7 @@
1545 """
1546 self.__lock.acquire()
1547 try:
1548- if self.__pending.has_key(fname):
1549+ if fname in self.__pending:
1550 log.Debug(_("Forgetting temporary file %s") % util.ufn(fname))
1551 del(self.__pending[fname])
1552 else:
1553
1554=== modified file 'duplicity/util.py'
1555--- duplicity/util.py 2014-01-17 16:44:46 +0000
1556+++ duplicity/util.py 2014-04-17 22:26:47 +0000
1557@@ -80,7 +80,7 @@
1558 """
1559 try:
1560 return fn()
1561- except Exception, e:
1562+ except Exception as e:
1563 if globals.ignore_errors:
1564 log.Warn(_("IGNORED_ERROR: Warning: ignoring error as requested: %s: %s")
1565 % (e.__class__.__name__, str(e)))
1566@@ -131,7 +131,7 @@
1567 """
1568 try:
1569 fn(filename)
1570- except OSError, ex:
1571+ except OSError as ex:
1572 if ex.errno == errno.ENOENT:
1573 pass
1574 else:
1575
1576=== modified file 'testing/gnupg/trustdb.gpg'
1577Binary files testing/gnupg/trustdb.gpg 2011-11-04 12:48:04 +0000 and testing/gnupg/trustdb.gpg 2014-04-17 22:26:47 +0000 differ
1578=== modified file 'testing/tests/test_badupload.py'
1579--- testing/tests/test_badupload.py 2014-04-16 02:43:43 +0000
1580+++ testing/tests/test_badupload.py 2014-04-17 22:26:47 +0000
1581@@ -36,7 +36,7 @@
1582 try:
1583 self.backup("full", "testfiles/dir1", options=["--skip-volume=1"])
1584 self.fail()
1585- except CmdError, e:
1586+ except CmdError as e:
1587 self.assertEqual(e.exit_status, 44)
1588
1589 if __name__ == "__main__":
1590
1591=== modified file 'testing/tests/test_collections.py'
1592--- testing/tests/test_collections.py 2014-04-16 02:43:43 +0000
1593+++ testing/tests/test_collections.py 2014-04-17 22:26:47 +0000
1594@@ -111,8 +111,8 @@
1595 assert 0
1596
1597 chain = chains[0]
1598- assert chain.end_time == 1029654270L
1599- assert chain.fullset.time == 1029626221L
1600+ assert chain.end_time == 1029654270
1601+ assert chain.fullset.time == 1029626221
1602
1603 def test_collections_status(self):
1604 """Test CollectionStatus object's set_values()"""
1605@@ -121,7 +121,7 @@
1606 assert cs.values_set
1607
1608 assert cs.matched_chain_pair
1609- assert cs.matched_chain_pair[0].end_time == 1029826800L
1610+ assert cs.matched_chain_pair[0].end_time == 1029826800
1611 assert len(cs.all_backup_chains) == 1, cs.all_backup_chains
1612
1613 cs = collections.CollectionsStatus(self.real_backend, globals.archive_dir).set_values()
1614@@ -153,7 +153,7 @@
1615 for op in orphaned_paths: print op
1616 assert 0
1617 assert len(chains) == 1, chains
1618- assert chains[0].end_time == 1029826800L
1619+ assert chains[0].end_time == 1029826800
1620
1621 def sigchain_fileobj_get(self, local):
1622 """Return chain, local if local is true with filenames added"""
1623
1624=== modified file 'testing/tests/test_filenaming.py'
1625--- testing/tests/test_filenaming.py 2014-04-16 02:43:43 +0000
1626+++ testing/tests/test_filenaming.py 2014-04-17 22:26:47 +0000
1627@@ -88,13 +88,13 @@
1628 pr = file_naming.parse(globals.file_prefix + globals.file_prefix_signature + "dns.h112bi.h14rg0.st.g")
1629 assert pr, pr
1630 assert pr.type == "new-sig"
1631- assert pr.end_time == 1029826800L
1632+ assert pr.end_time == 1029826800
1633
1634 if not globals.short_filenames:
1635 pr = file_naming.parse(globals.file_prefix + globals.file_prefix_signature + "duplicity-new-signatures.2002-08-18T00:04:30-07:00.to.2002-08-20T00:00:00-07:00.sigtar.gpg")
1636 assert pr, pr
1637 assert pr.type == "new-sig"
1638- assert pr.end_time == 1029826800L
1639+ assert pr.end_time == 1029826800
1640
1641 pr = file_naming.parse(globals.file_prefix + globals.file_prefix_signature + "dfs.h5dixs.st.g")
1642 assert pr, pr
1643@@ -108,14 +108,14 @@
1644 assert pr, pr
1645 assert pr.partial
1646 assert pr.type == "new-sig"
1647- assert pr.end_time == 1029826800L
1648+ assert pr.end_time == 1029826800
1649
1650 if not globals.short_filenames:
1651 pr = file_naming.parse(globals.file_prefix + globals.file_prefix_signature + "duplicity-new-signatures.2002-08-18T00:04:30-07:00.to.2002-08-20T00:00:00-07:00.sigtar.part.gpg")
1652 assert pr, pr
1653 assert pr.partial
1654 assert pr.type == "new-sig"
1655- assert pr.end_time == 1029826800L
1656+ assert pr.end_time == 1029826800
1657
1658 pr = file_naming.parse(globals.file_prefix + globals.file_prefix_signature + "dfs.h5dixs.st.p.g")
1659 assert pr, pr
1660
1661=== modified file 'testing/tests/test_lazy.py'
1662--- testing/tests/test_lazy.py 2014-04-16 02:43:43 +0000
1663+++ testing/tests/test_lazy.py 2014-04-17 22:26:47 +0000
1664@@ -21,6 +21,7 @@
1665
1666 import helper
1667 import unittest, pickle, sys
1668+from functools import reduce
1669
1670 from duplicity.lazy import * #@UnusedWildImport
1671
1672@@ -33,7 +34,7 @@
1673 empty = lambda s: iter([])
1674
1675 def __init__(self, *args):
1676- apply (unittest.TestCase.__init__, (self,) + args)
1677+ unittest.TestCase.__init__(self, *args)
1678 self.falseerror = self.falseerror_maker()
1679 self.trueerror = self.trueerror_maker()
1680 self.emptygen = self.emptygen_maker()
1681
1682=== modified file 'testing/tests/test_patchdir.py'
1683--- testing/tests/test_patchdir.py 2014-04-16 02:43:43 +0000
1684+++ testing/tests/test_patchdir.py 2014-04-17 22:26:47 +0000
1685@@ -209,12 +209,12 @@
1686 self.out = out
1687
1688 def snapshot(self):
1689- """Make a snapshot ROPath, permissions 0600"""
1690+ """Make a snapshot ROPath, permissions 0o600"""
1691 ss = self.out.append("snapshot")
1692 fout = ss.open("wb")
1693 fout.write("hello, world!")
1694 assert not fout.close()
1695- ss.chmod(0600)
1696+ ss.chmod(0o600)
1697 ss.difftype = "snapshot"
1698 return ss
1699
1700@@ -230,24 +230,24 @@
1701 return deltabuf
1702
1703 def delta1(self):
1704- """Make a delta ROPath, permissions 0640"""
1705+ """Make a delta ROPath, permissions 0o640"""
1706 delta1 = self.out.append("delta1")
1707 fout = delta1.open("wb")
1708 fout.write(self.get_delta("hello, world!",
1709 "aonseuth aosetnuhaonsuhtansoetuhaoe"))
1710 assert not fout.close()
1711- delta1.chmod(0640)
1712+ delta1.chmod(0o640)
1713 delta1.difftype = "diff"
1714 return delta1
1715
1716 def delta2(self):
1717- """Make another delta ROPath, permissions 0644"""
1718+ """Make another delta ROPath, permissions 0o644"""
1719 delta2 = self.out.append("delta1")
1720 fout = delta2.open("wb")
1721 fout.write(self.get_delta("aonseuth aosetnuhaonsuhtansoetuhaoe",
1722 "3499 34957839485792357 458348573"))
1723 assert not fout.close()
1724- delta2.chmod(0644)
1725+ delta2.chmod(0o644)
1726 delta2.difftype = "diff"
1727 return delta2
1728
1729
1730=== added file 'testing/tests/test_python3.py'
1731--- testing/tests/test_python3.py 1970-01-01 00:00:00 +0000
1732+++ testing/tests/test_python3.py 2014-04-17 22:26:47 +0000
1733@@ -0,0 +1,61 @@
1734+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
1735+#
1736+# Copyright 2014 Michael Terry <michael.terry@canonical.com>
1737+#
1738+# This file is part of duplicity.
1739+#
1740+# Duplicity is free software; you can redistribute it and/or modify it
1741+# under the terms of the GNU General Public License as published by the
1742+# Free Software Foundation; either version 2 of the License, or (at your
1743+# option) any later version.
1744+#
1745+# Duplicity is distributed in the hope that it will be useful, but
1746+# WITHOUT ANY WARRANTY; without even the implied warranty of
1747+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
1748+# General Public License for more details.
1749+#
1750+# You should have received a copy of the GNU General Public License
1751+# along with duplicity; if not, write to the Free Software Foundation,
1752+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
1753+
1754+import helper
1755+import os
1756+import subprocess
1757+import unittest
1758+
1759+helper.setup()
1760+
1761+
1762+class Python3ReadinessTest(unittest.TestCase):
1763+ def test_2to3(self):
1764+ _top_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)),
1765+ "..", "..")
1766+
1767+ # As we modernize the source code, we can remove more and more nofixes
1768+ process = subprocess.Popen(["2to3",
1769+ "--nofix=dict",
1770+ "--nofix=filter",
1771+ "--nofix=map",
1772+ "--nofix=next",
1773+ "--nofix=print",
1774+ "--nofix=types",
1775+ "--nofix=unicode",
1776+ "--nofix=xrange",
1777+ # The following fixes we don't want to remove, since they are false
1778+ # positives, things we don't care about, or real incompatibilities
1779+ # but which 2to3 can fix for us better automatically.
1780+ "--nofix=callable",
1781+ "--nofix=future",
1782+ "--nofix=imports",
1783+ "--nofix=raw_input",
1784+ "--nofix=urllib",
1785+ _top_dir],
1786+ stdout=subprocess.PIPE,
1787+ stderr=subprocess.PIPE)
1788+ output = process.communicate()[0]
1789+ self.assertEqual(0, process.returncode)
1790+ self.assertEqual("", output, output)
1791+
1792+
1793+if __name__ == "__main__":
1794+ unittest.main()
1795
1796=== modified file 'testing/tests/test_restart.py'
1797--- testing/tests/test_restart.py 2014-04-16 02:43:43 +0000
1798+++ testing/tests/test_restart.py 2014-04-17 22:26:47 +0000
1799@@ -326,7 +326,7 @@
1800 self.backup("full", "testfiles/blocktartest")
1801 # Create an exact clone of the snapshot folder in the sigtar already.
1802 # Permissions and mtime must match.
1803- os.mkdir("testfiles/snapshot", 0755)
1804+ os.mkdir("testfiles/snapshot", 0o755)
1805 os.utime("testfiles/snapshot", (1030384548, 1030384548))
1806 # Adjust the sigtar.gz file to have a bogus second snapshot/ entry
1807 # at the beginning.

Subscribers

People subscribed via source and target branches

to all changes: