Merge lp:~mgorse/duplicity/0.8-series into lp:~duplicity-team/duplicity/0.8-series
- 0.8-series
- Merge into 0.8-series
Proposed by
Mgorse
Status: | Merged | ||||
---|---|---|---|---|---|
Merged at revision: | 1335 | ||||
Proposed branch: | lp:~mgorse/duplicity/0.8-series | ||||
Merge into: | lp:~duplicity-team/duplicity/0.8-series | ||||
Diff against target: |
2983 lines (+560/-566) 8 files modified
duplicity/diffdir.py (+100/-100) duplicity/manifest.py (+109/-109) duplicity/patchdir.py (+75/-75) duplicity/path.py (+154/-154) duplicity/statistics.py (+96/-96) duplicity/util.py (+25/-25) po/duplicity.pot (+1/-1) testing/test_code.py (+0/-6) |
||||
To merge this branch: | bzr merge lp:~mgorse/duplicity/0.8-series | ||||
Related bugs: |
|
||||
Related blueprints: |
Python 3 Support
(High)
|
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
duplicity-team | Pending | ||
Review via email: mp+355568@code.launchpad.net |
Commit message
Description of the change
Annotate more strings in duplicity/*.py
To post a comment you must log in.
Revision history for this message
Mgorse (mgorse) wrote : | # |
lp:~mgorse/duplicity/0.8-series
updated
- 1335. By Kenneth Loafman
-
* Merged in lp:~mgorse/duplicity/0.8-series
- Adorn more strings in duplicity/*.py
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'duplicity/diffdir.py' |
2 | --- duplicity/diffdir.py 2018-07-27 02:18:12 +0000 |
3 | +++ duplicity/diffdir.py 2018-09-24 21:19:45 +0000 |
4 | @@ -19,7 +19,7 @@ |
5 | # along with duplicity; if not, write to the Free Software Foundation, |
6 | # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA |
7 | |
8 | -""" |
9 | +u""" |
10 | Functions for producing signatures and deltas of directories |
11 | |
12 | Note that the main processes of this module have two parts. In the |
13 | @@ -49,14 +49,14 @@ |
14 | |
15 | |
16 | def DirSig(path_iter): |
17 | - """ |
18 | + u""" |
19 | Alias for SigTarBlockIter below |
20 | """ |
21 | return SigTarBlockIter(path_iter) |
22 | |
23 | |
24 | def DirFull(path_iter): |
25 | - """ |
26 | + u""" |
27 | Return a tarblock full backup of items in path_iter |
28 | |
29 | A full backup is just a diff starting from nothing (it may be less |
30 | @@ -64,18 +64,18 @@ |
31 | will be easy to split up the tar and make the volumes the same |
32 | sizes). |
33 | """ |
34 | - return DirDelta(path_iter, cStringIO.StringIO("")) |
35 | + return DirDelta(path_iter, cStringIO.StringIO(u"")) |
36 | |
37 | |
38 | def DirFull_WriteSig(path_iter, sig_outfp): |
39 | - """ |
40 | + u""" |
41 | Return full backup like above, but also write signature to sig_outfp |
42 | """ |
43 | - return DirDelta_WriteSig(path_iter, cStringIO.StringIO(""), sig_outfp) |
44 | + return DirDelta_WriteSig(path_iter, cStringIO.StringIO(u""), sig_outfp) |
45 | |
46 | |
47 | def DirDelta(path_iter, dirsig_fileobj_list): |
48 | - """ |
49 | + u""" |
50 | Produce tarblock diff given dirsig_fileobj_list and pathiter |
51 | |
52 | dirsig_fileobj_list should either be a tar fileobj or a list of |
53 | @@ -96,7 +96,7 @@ |
54 | |
55 | |
56 | def delta_iter_error_handler(exc, new_path, sig_path, sig_tar=None): |
57 | - """ |
58 | + u""" |
59 | Called by get_delta_iter, report error in getting delta |
60 | """ |
61 | if new_path: |
62 | @@ -104,13 +104,13 @@ |
63 | elif sig_path: |
64 | index_string = sig_path.get_relative_path() |
65 | else: |
66 | - assert 0, "Both new and sig are None for some reason" |
67 | - log.Warn(_("Error %s getting delta for %s") % (str(exc), util.fsdecode(index_string))) |
68 | + assert 0, u"Both new and sig are None for some reason" |
69 | + log.Warn(_(u"Error %s getting delta for %s") % (str(exc), util.fsdecode(index_string))) |
70 | return None |
71 | |
72 | |
73 | def get_delta_path(new_path, sig_path, sigTarFile=None): |
74 | - """ |
75 | + u""" |
76 | Return new delta_path which, when read, writes sig to sig_fileobj, |
77 | if sigTarFile is not None |
78 | """ |
79 | @@ -119,35 +119,35 @@ |
80 | ti = new_path.get_tarinfo() |
81 | index = new_path.index |
82 | delta_path = new_path.get_ropath() |
83 | - log.Debug(_("Getting delta of %s and %s") % (new_path, sig_path)) |
84 | + log.Debug(_(u"Getting delta of %s and %s") % (new_path, sig_path)) |
85 | |
86 | def callback(sig_string): |
87 | - """ |
88 | + u""" |
89 | Callback activated when FileWithSignature read to end |
90 | """ |
91 | ti.size = len(sig_string) |
92 | - ti.name = "signature/" + "/".join(index) |
93 | + ti.name = b"signature/" + b"/".join(index) |
94 | sigTarFile.addfile(ti, cStringIO.StringIO(sig_string)) |
95 | |
96 | - if new_path.isreg() and sig_path and sig_path.isreg() and sig_path.difftype == "signature": |
97 | - delta_path.difftype = "diff" |
98 | - old_sigfp = sig_path.open("rb") |
99 | - newfp = FileWithReadCounter(new_path.open("rb")) |
100 | + if new_path.isreg() and sig_path and sig_path.isreg() and sig_path.difftype == u"signature": |
101 | + delta_path.difftype = u"diff" |
102 | + old_sigfp = sig_path.open(u"rb") |
103 | + newfp = FileWithReadCounter(new_path.open(u"rb")) |
104 | if sigTarFile: |
105 | newfp = FileWithSignature(newfp, callback, |
106 | new_path.getsize()) |
107 | delta_path.setfileobj(librsync.DeltaFile(old_sigfp, newfp)) |
108 | else: |
109 | - delta_path.difftype = "snapshot" |
110 | + delta_path.difftype = u"snapshot" |
111 | if sigTarFile: |
112 | - ti.name = "snapshot/" + "/".join(index) |
113 | + ti.name = b"snapshot/" + b"/".join(index) |
114 | if not new_path.isreg(): |
115 | if sigTarFile: |
116 | sigTarFile.addfile(ti) |
117 | if stats: |
118 | stats.SourceFileSize += delta_path.getsize() |
119 | else: |
120 | - newfp = FileWithReadCounter(new_path.open("rb")) |
121 | + newfp = FileWithReadCounter(new_path.open(u"rb")) |
122 | if sigTarFile: |
123 | newfp = FileWithSignature(newfp, callback, |
124 | new_path.getsize()) |
125 | @@ -158,27 +158,27 @@ |
126 | |
127 | |
128 | def log_delta_path(delta_path, new_path=None, stats=None): |
129 | - """ |
130 | + u""" |
131 | Look at delta path and log delta. Add stats if new_path is set |
132 | """ |
133 | - if delta_path.difftype == "snapshot": |
134 | + if delta_path.difftype == u"snapshot": |
135 | if new_path and stats: |
136 | stats.add_new_file(new_path) |
137 | - log.Info(_("A %s") % |
138 | + log.Info(_(u"A %s") % |
139 | (util.fsdecode(delta_path.get_relative_path())), |
140 | log.InfoCode.diff_file_new, |
141 | util.escape(delta_path.get_relative_path())) |
142 | else: |
143 | if new_path and stats: |
144 | stats.add_changed_file(new_path) |
145 | - log.Info(_("M %s") % |
146 | + log.Info(_(u"M %s") % |
147 | (util.fsdecode(delta_path.get_relative_path())), |
148 | log.InfoCode.diff_file_changed, |
149 | util.escape(delta_path.get_relative_path())) |
150 | |
151 | |
152 | def get_delta_iter(new_iter, sig_iter, sig_fileobj=None): |
153 | - """ |
154 | + u""" |
155 | Generate delta iter from new Path iter and sig Path iter. |
156 | |
157 | For each delta path of regular file type, path.difftype with be |
158 | @@ -189,25 +189,25 @@ |
159 | """ |
160 | collated = collate2iters(new_iter, sig_iter) |
161 | if sig_fileobj: |
162 | - sigTarFile = util.make_tarfile("w", sig_fileobj) |
163 | + sigTarFile = util.make_tarfile(u"w", sig_fileobj) |
164 | else: |
165 | sigTarFile = None |
166 | for new_path, sig_path in collated: |
167 | - log.Debug(_("Comparing %s and %s") % (new_path and util.uindex(new_path.index), |
168 | - sig_path and util.uindex(sig_path.index))) |
169 | + log.Debug(_(u"Comparing %s and %s") % (new_path and util.uindex(new_path.index), |
170 | + sig_path and util.uindex(sig_path.index))) |
171 | if not new_path or not new_path.type: |
172 | # File doesn't exist (but ignore attempts to delete base dir; |
173 | # old versions of duplicity could have written out the sigtar in |
174 | # such a way as to fool us; LP: #929067) |
175 | if sig_path and sig_path.exists() and sig_path.index != (): |
176 | # but signature says it did |
177 | - log.Info(_("D %s") % |
178 | + log.Info(_(u"D %s") % |
179 | (util.fsdecode(sig_path.get_relative_path())), |
180 | log.InfoCode.diff_file_deleted, |
181 | util.escape(sig_path.get_relative_path())) |
182 | if sigTarFile: |
183 | ti = ROPath(sig_path.index).get_tarinfo() |
184 | - ti.name = "deleted/" + "/".join(sig_path.index) |
185 | + ti.name = u"deleted/" + u"/".join(sig_path.index) |
186 | sigTarFile.addfile(ti) |
187 | stats.add_deleted_file(sig_path) |
188 | yield ROPath(sig_path.index) |
189 | @@ -231,28 +231,28 @@ |
190 | |
191 | |
192 | def sigtar2path_iter(sigtarobj): |
193 | - """ |
194 | + u""" |
195 | Convert signature tar file object open for reading into path iter |
196 | """ |
197 | - tf = util.make_tarfile("r", sigtarobj) |
198 | + tf = util.make_tarfile(u"r", sigtarobj) |
199 | tf.debug = 1 |
200 | for tarinfo in tf: |
201 | tiname = util.get_tarinfo_name(tarinfo) |
202 | - for prefix in ["signature/", "snapshot/", "deleted/"]: |
203 | + for prefix in [b"signature/", b"snapshot/", b"deleted/"]: |
204 | if tiname.startswith(prefix): |
205 | # strip prefix and '/' from name and set it to difftype |
206 | name, difftype = tiname[len(prefix):], prefix[:-1] |
207 | break |
208 | else: |
209 | - raise DiffDirException("Bad tarinfo name %s" % (tiname,)) |
210 | + raise DiffDirException(u"Bad tarinfo name %s" % (tiname,)) |
211 | |
212 | - index = tuple(name.split("/")) |
213 | + index = tuple(name.split(u"/")) |
214 | if not index[-1]: |
215 | index = index[:-1] # deal with trailing /, "" |
216 | |
217 | ropath = ROPath(index) |
218 | ropath.difftype = difftype |
219 | - if difftype == "signature" or difftype == "snapshot": |
220 | + if difftype == u"signature" or difftype == u"snapshot": |
221 | ropath.init_from_tarinfo(tarinfo) |
222 | if ropath.isreg(): |
223 | ropath.setfileobj(tf.extractfile(tarinfo)) |
224 | @@ -261,7 +261,7 @@ |
225 | |
226 | |
227 | def collate2iters(riter1, riter2): |
228 | - """ |
229 | + u""" |
230 | Collate two iterators. |
231 | |
232 | The elements yielded by each iterator must be have an index |
233 | @@ -305,7 +305,7 @@ |
234 | |
235 | |
236 | def combine_path_iters(path_iter_list): |
237 | - """ |
238 | + u""" |
239 | Produce new iterator by combining the iterators in path_iter_list |
240 | |
241 | This new iter will iterate every path that is in path_iter_list in |
242 | @@ -320,7 +320,7 @@ |
243 | path_iter_list.reverse() |
244 | |
245 | def get_triple(iter_index): |
246 | - """ |
247 | + u""" |
248 | Represent the next element as a triple, to help sorting |
249 | """ |
250 | try: |
251 | @@ -330,7 +330,7 @@ |
252 | return (path.index, iter_index, path) |
253 | |
254 | def refresh_triple_list(triple_list): |
255 | - """ |
256 | + u""" |
257 | Update all elements with path_index same as first element |
258 | """ |
259 | path_index = triple_list[0][0] |
260 | @@ -355,7 +355,7 @@ |
261 | |
262 | |
263 | def DirDelta_WriteSig(path_iter, sig_infp_list, newsig_outfp): |
264 | - """ |
265 | + u""" |
266 | Like DirDelta but also write signature into sig_fileobj |
267 | |
268 | Like DirDelta, sig_infp_list can be a tar fileobj or a sorted list |
269 | @@ -376,26 +376,26 @@ |
270 | |
271 | |
272 | def get_combined_path_iter(sig_infp_list): |
273 | - """ |
274 | + u""" |
275 | Return path iter combining signatures in list of open sig files |
276 | """ |
277 | return combine_path_iters([sigtar2path_iter(x) for x in sig_infp_list]) |
278 | |
279 | |
280 | class FileWithReadCounter: |
281 | - """ |
282 | + u""" |
283 | File-like object which also computes amount read as it is read |
284 | """ |
285 | def __init__(self, infile): |
286 | - """FileWithReadCounter initializer""" |
287 | + u"""FileWithReadCounter initializer""" |
288 | self.infile = infile |
289 | |
290 | def read(self, length=-1): |
291 | try: |
292 | buf = self.infile.read(length) |
293 | except IOError as ex: |
294 | - buf = "" |
295 | - log.Warn(_("Error %s getting delta for %s") % (str(ex), self.infile.uc_name)) |
296 | + buf = u"" |
297 | + log.Warn(_(u"Error %s getting delta for %s") % (str(ex), self.infile.uc_name)) |
298 | if stats: |
299 | stats.SourceFileSize += len(buf) |
300 | return buf |
301 | @@ -405,13 +405,13 @@ |
302 | |
303 | |
304 | class FileWithSignature: |
305 | - """ |
306 | + u""" |
307 | File-like object which also computes signature as it is read |
308 | """ |
309 | blocksize = 32 * 1024 |
310 | |
311 | def __init__(self, infile, callback, filelen, *extra_args): |
312 | - """ |
313 | + u""" |
314 | FileTee initializer |
315 | |
316 | The object will act like infile, but whenever it is read it |
317 | @@ -442,11 +442,11 @@ |
318 | |
319 | |
320 | class TarBlock: |
321 | - """ |
322 | + u""" |
323 | Contain information to add next file to tar |
324 | """ |
325 | def __init__(self, index, data): |
326 | - """ |
327 | + u""" |
328 | TarBlock initializer - just store data |
329 | """ |
330 | self.index = index |
331 | @@ -454,7 +454,7 @@ |
332 | |
333 | |
334 | class TarBlockIter: |
335 | - """ |
336 | + u""" |
337 | A bit like an iterator, yield tar blocks given input iterator |
338 | |
339 | Unlike an iterator, however, control over the maximum size of a |
340 | @@ -462,7 +462,7 @@ |
341 | get_footer() is available. |
342 | """ |
343 | def __init__(self, input_iter): |
344 | - """ |
345 | + u""" |
346 | TarBlockIter initializer |
347 | """ |
348 | self.input_iter = input_iter |
349 | @@ -476,28 +476,28 @@ |
350 | self.remember_block = None # holds block of next block |
351 | self.queued_data = None # data to return in next next() call |
352 | |
353 | - def tarinfo2tarblock(self, index, tarinfo, file_data=""): |
354 | - """ |
355 | + def tarinfo2tarblock(self, index, tarinfo, file_data=b""): |
356 | + u""" |
357 | Make tarblock out of tarinfo and file data |
358 | """ |
359 | tarinfo.size = len(file_data) |
360 | - headers = tarinfo.tobuf(errors='replace') |
361 | + headers = tarinfo.tobuf(errors=u'replace') |
362 | blocks, remainder = divmod(tarinfo.size, tarfile.BLOCKSIZE) # @UnusedVariable |
363 | if remainder > 0: |
364 | - filler_data = "\0" * (tarfile.BLOCKSIZE - remainder) |
365 | + filler_data = b"\0" * (tarfile.BLOCKSIZE - remainder) |
366 | else: |
367 | - filler_data = "" |
368 | - return TarBlock(index, "%s%s%s" % (headers, file_data, filler_data)) |
369 | + filler_data = b"" |
370 | + return TarBlock(index, b"%s%s%s" % (headers, file_data, filler_data)) |
371 | |
372 | def process(self, val): |
373 | - """ |
374 | + u""" |
375 | Turn next value of input_iter into a TarBlock |
376 | """ |
377 | assert not self.process_waiting |
378 | XXX # Override in subclass @UndefinedVariable |
379 | |
380 | def process_continued(self): |
381 | - """ |
382 | + u""" |
383 | Get more tarblocks |
384 | |
385 | If processing val above would produce more than one TarBlock, |
386 | @@ -507,7 +507,7 @@ |
387 | XXX # Override in subclass @UndefinedVariable |
388 | |
389 | def next(self): |
390 | - """ |
391 | + u""" |
392 | Return next block and update offset |
393 | """ |
394 | if self.queued_data is not None: |
395 | @@ -539,19 +539,19 @@ |
396 | return 64 * 1024 |
397 | |
398 | def get_previous_index(self): |
399 | - """ |
400 | + u""" |
401 | Return index of last tarblock, or None if no previous index |
402 | """ |
403 | return self.previous_index, self.previous_block |
404 | |
405 | def queue_index_data(self, data): |
406 | - """ |
407 | + u""" |
408 | Next time next() is called, we will return data instead of processing |
409 | """ |
410 | self.queued_data = data |
411 | |
412 | def remember_next_index(self): |
413 | - """ |
414 | + u""" |
415 | When called, remember the index of the next block iterated |
416 | """ |
417 | self.remember_next = True |
418 | @@ -559,29 +559,29 @@ |
419 | self.remember_block = None |
420 | |
421 | def recall_index(self): |
422 | - """ |
423 | + u""" |
424 | Retrieve index remembered with remember_next_index |
425 | """ |
426 | return self.remember_value, self.remember_block |
427 | |
428 | def get_footer(self): |
429 | - """ |
430 | + u""" |
431 | Return closing string for tarfile, reset offset |
432 | """ |
433 | blocks, remainder = divmod(self.offset, tarfile.RECORDSIZE) # @UnusedVariable |
434 | self.offset = 0 |
435 | - return '\0' * (tarfile.RECORDSIZE - remainder) # remainder can be 0 |
436 | + return u'\0' * (tarfile.RECORDSIZE - remainder) # remainder can be 0 |
437 | |
438 | def __iter__(self): |
439 | return self |
440 | |
441 | |
442 | class DummyBlockIter(TarBlockIter): |
443 | - """ |
444 | + u""" |
445 | TarBlockIter that does no file reading |
446 | """ |
447 | def process(self, delta_ropath): |
448 | - """ |
449 | + u""" |
450 | Get a fake tarblock from delta_ropath |
451 | """ |
452 | ti = delta_ropath.get_tarinfo() |
453 | @@ -601,28 +601,28 @@ |
454 | |
455 | |
456 | class SigTarBlockIter(TarBlockIter): |
457 | - """ |
458 | + u""" |
459 | TarBlockIter that yields blocks of a signature tar from path_iter |
460 | """ |
461 | def process(self, path): |
462 | - """ |
463 | + u""" |
464 | Return associated signature TarBlock from path |
465 | """ |
466 | ti = path.get_tarinfo() |
467 | if path.isreg(): |
468 | - sfp = librsync.SigFile(path.open("rb"), |
469 | + sfp = librsync.SigFile(path.open(u"rb"), |
470 | get_block_size(path.getsize())) |
471 | sigbuf = sfp.read() |
472 | sfp.close() |
473 | - ti.name = "signature/" + "/".join(path.index) |
474 | + ti.name = u"signature/" + u"/".join(path.index) |
475 | return self.tarinfo2tarblock(path.index, ti, sigbuf) |
476 | else: |
477 | - ti.name = "snapshot/" + "/".join(path.index) |
478 | + ti.name = u"snapshot/" + u"/".join(path.index) |
479 | return self.tarinfo2tarblock(path.index, ti) |
480 | |
481 | |
482 | class DeltaTarBlockIter(TarBlockIter): |
483 | - """ |
484 | + u""" |
485 | TarBlockIter that yields parts of a deltatar file |
486 | |
487 | Unlike SigTarBlockIter, the argument to __init__ is a |
488 | @@ -630,15 +630,15 @@ |
489 | calculated. |
490 | """ |
491 | def process(self, delta_ropath): |
492 | - """ |
493 | + u""" |
494 | Get a tarblock from delta_ropath |
495 | """ |
496 | def add_prefix(tarinfo, prefix): |
497 | - """Add prefix to the name of a tarinfo file""" |
498 | - if tarinfo.name == ".": |
499 | - tarinfo.name = prefix + "/" |
500 | + u"""Add prefix to the name of a tarinfo file""" |
501 | + if tarinfo.name == b".": |
502 | + tarinfo.name = prefix + b"/" |
503 | else: |
504 | - tarinfo.name = "%s/%s" % (prefix, tarinfo.name) |
505 | + tarinfo.name = b"%s/%s" % (prefix, tarinfo.name) |
506 | |
507 | ti = delta_ropath.get_tarinfo() |
508 | index = delta_ropath.index |
509 | @@ -646,29 +646,29 @@ |
510 | # Return blocks of deleted files or fileless snapshots |
511 | if not delta_ropath.type or not delta_ropath.fileobj: |
512 | if not delta_ropath.type: |
513 | - add_prefix(ti, "deleted") |
514 | + add_prefix(ti, u"deleted") |
515 | else: |
516 | - assert delta_ropath.difftype == "snapshot" |
517 | - add_prefix(ti, "snapshot") |
518 | + assert delta_ropath.difftype == u"snapshot" |
519 | + add_prefix(ti, b"snapshot") |
520 | return self.tarinfo2tarblock(index, ti) |
521 | |
522 | # Now handle single volume block case |
523 | - fp = delta_ropath.open("rb") |
524 | + fp = delta_ropath.open(u"rb") |
525 | data, last_block = self.get_data_block(fp) |
526 | if stats: |
527 | stats.RawDeltaSize += len(data) |
528 | if last_block: |
529 | - if delta_ropath.difftype == "snapshot": |
530 | - add_prefix(ti, "snapshot") |
531 | - elif delta_ropath.difftype == "diff": |
532 | - add_prefix(ti, "diff") |
533 | + if delta_ropath.difftype == u"snapshot": |
534 | + add_prefix(ti, b"snapshot") |
535 | + elif delta_ropath.difftype == u"diff": |
536 | + add_prefix(ti, b"diff") |
537 | else: |
538 | - assert 0, "Unknown difftype" |
539 | + assert 0, u"Unknown difftype" |
540 | return self.tarinfo2tarblock(index, ti, data) |
541 | |
542 | # Finally, do multivol snapshot or diff case |
543 | - full_name = "multivol_%s/%s" % (delta_ropath.difftype, ti.name) |
544 | - ti.name = full_name + "/1" |
545 | + full_name = u"multivol_%s/%s" % (delta_ropath.difftype, ti.name) |
546 | + ti.name = full_name + u"/1" |
547 | self.process_prefix = full_name |
548 | self.process_fp = fp |
549 | self.process_ropath = delta_ropath |
550 | @@ -677,26 +677,26 @@ |
551 | return self.tarinfo2tarblock(index, ti, data) |
552 | |
553 | def get_data_block(self, fp): |
554 | - """ |
555 | + u""" |
556 | Return pair (next data block, boolean last data block) |
557 | """ |
558 | read_size = self.get_read_size() |
559 | buf = fp.read(read_size) |
560 | if len(buf) < read_size: |
561 | if fp.close(): |
562 | - raise DiffDirException("Error closing file") |
563 | + raise DiffDirException(u"Error closing file") |
564 | return (buf, True) |
565 | else: |
566 | return (buf, False) |
567 | |
568 | def process_continued(self): |
569 | - """ |
570 | + u""" |
571 | Return next volume in multivol diff or snapshot |
572 | """ |
573 | assert self.process_waiting |
574 | ropath = self.process_ropath |
575 | ti, index = ropath.get_tarinfo(), ropath.index |
576 | - ti.name = "%s/%d" % (self.process_prefix, self.process_next_vol_number) |
577 | + ti.name = u"%s/%d" % (self.process_prefix, self.process_next_vol_number) |
578 | data, last_block = self.get_data_block(self.process_fp) |
579 | if stats: |
580 | stats.RawDeltaSize += len(data) |
581 | @@ -712,13 +712,13 @@ |
582 | |
583 | |
584 | def write_block_iter(block_iter, out_obj): |
585 | - """ |
586 | + u""" |
587 | Write block_iter to filename, path, or file object |
588 | """ |
589 | if isinstance(out_obj, Path): |
590 | - fp = open(out_obj.name, "wb") |
591 | + fp = open(out_obj.name, u"wb") |
592 | elif isinstance(out_obj, types.StringTypes): |
593 | - fp = open(out_obj, "wb") |
594 | + fp = open(out_obj, u"wb") |
595 | else: |
596 | fp = out_obj |
597 | for block in block_iter: |
598 | @@ -730,7 +730,7 @@ |
599 | |
600 | |
601 | def get_block_size(file_len): |
602 | - """ |
603 | + u""" |
604 | Return a reasonable block size to use on files of length file_len |
605 | |
606 | If the block size is too big, deltas will be bigger than is |
607 | |
608 | === modified file 'duplicity/manifest.py' |
609 | --- duplicity/manifest.py 2018-09-06 11:14:11 +0000 |
610 | +++ duplicity/manifest.py 2018-09-24 21:19:45 +0000 |
611 | @@ -19,7 +19,7 @@ |
612 | # along with duplicity; if not, write to the Free Software Foundation, |
613 | # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA |
614 | |
615 | -"""Create and edit manifest for session contents""" |
616 | +u"""Create and edit manifest for session contents""" |
617 | |
618 | from future_builtins import filter |
619 | |
620 | @@ -32,18 +32,18 @@ |
621 | |
622 | |
623 | class ManifestError(Exception): |
624 | - """ |
625 | + u""" |
626 | Exception raised when problem with manifest |
627 | """ |
628 | pass |
629 | |
630 | |
631 | class Manifest: |
632 | - """ |
633 | + u""" |
634 | List of volumes and information about each one |
635 | """ |
636 | def __init__(self, fh=None): |
637 | - """ |
638 | + u""" |
639 | Create blank Manifest |
640 | |
641 | @param fh: fileobj for manifest |
642 | @@ -59,7 +59,7 @@ |
643 | self.files_changed = [] |
644 | |
645 | def set_dirinfo(self): |
646 | - """ |
647 | + u""" |
648 | Set information about directory from globals, |
649 | and write to manifest file. |
650 | |
651 | @@ -70,13 +70,13 @@ |
652 | self.local_dirname = globals.local_path.name # @UndefinedVariable |
653 | if self.fh: |
654 | if self.hostname: |
655 | - self.fh.write("Hostname %s\n" % self.hostname) |
656 | + self.fh.write(u"Hostname %s\n" % self.hostname) |
657 | if self.local_dirname: |
658 | - self.fh.write("Localdir %s\n" % Quote(self.local_dirname)) |
659 | + self.fh.write(u"Localdir %s\n" % Quote(self.local_dirname)) |
660 | return self |
661 | |
662 | def check_dirinfo(self): |
663 | - """ |
664 | + u""" |
665 | Return None if dirinfo is the same, otherwise error message |
666 | |
667 | Does not raise an error message if hostname or local_dirname |
668 | @@ -89,41 +89,41 @@ |
669 | return |
670 | |
671 | if self.hostname and self.hostname != globals.hostname: |
672 | - errmsg = _("Fatal Error: Backup source host has changed.\n" |
673 | - "Current hostname: %s\n" |
674 | - "Previous hostname: %s") % (globals.hostname, self.hostname) |
675 | + errmsg = _(u"Fatal Error: Backup source host has changed.\n" |
676 | + u"Current hostname: %s\n" |
677 | + u"Previous hostname: %s") % (globals.hostname, self.hostname) |
678 | code = log.ErrorCode.hostname_mismatch |
679 | - code_extra = "%s %s" % (util.escape(globals.hostname), util.escape(self.hostname)) |
680 | + code_extra = u"%s %s" % (util.escape(globals.hostname), util.escape(self.hostname)) |
681 | |
682 | elif (self.local_dirname and self.local_dirname != globals.local_path.name): # @UndefinedVariable |
683 | - errmsg = _("Fatal Error: Backup source directory has changed.\n" |
684 | - "Current directory: %s\n" |
685 | - "Previous directory: %s") % (globals.local_path.name, self.local_dirname) # @UndefinedVariable |
686 | + errmsg = _(u"Fatal Error: Backup source directory has changed.\n" |
687 | + u"Current directory: %s\n" |
688 | + u"Previous directory: %s") % (globals.local_path.name, self.local_dirname) # @UndefinedVariable |
689 | code = log.ErrorCode.source_dir_mismatch |
690 | - code_extra = "%s %s" % (util.escape(globals.local_path.name), |
691 | - util.escape(self.local_dirname)) # @UndefinedVariable |
692 | + code_extra = u"%s %s" % (util.escape(globals.local_path.name), |
693 | + util.escape(self.local_dirname)) # @UndefinedVariable |
694 | else: |
695 | return |
696 | |
697 | - log.FatalError(errmsg + "\n\n" + |
698 | - _("Aborting because you may have accidentally tried to " |
699 | - "backup two different data sets to the same remote " |
700 | - "location, or using the same archive directory. If " |
701 | - "this is not a mistake, use the " |
702 | - "--allow-source-mismatch switch to avoid seeing this " |
703 | - "message"), code, code_extra) |
704 | + log.FatalError(errmsg + u"\n\n" + |
705 | + _(u"Aborting because you may have accidentally tried to " |
706 | + u"backup two different data sets to the same remote " |
707 | + u"location, or using the same archive directory. If " |
708 | + u"this is not a mistake, use the " |
709 | + u"--allow-source-mismatch switch to avoid seeing this " |
710 | + u"message"), code, code_extra) |
711 | |
712 | def set_files_changed_info(self, files_changed): |
713 | if files_changed: |
714 | self.files_changed = files_changed |
715 | |
716 | if self.fh: |
717 | - self.fh.write("Filelist %d\n" % len(self.files_changed)) |
718 | + self.fh.write(u"Filelist %d\n" % len(self.files_changed)) |
719 | for fileinfo in self.files_changed: |
720 | - self.fh.write(" %-7s %s\n" % (fileinfo[1], Quote(fileinfo[0]))) |
721 | + self.fh.write(b" %-7s %s\n" % (fileinfo[1], Quote(fileinfo[0]))) |
722 | |
723 | def add_volume_info(self, vi): |
724 | - """ |
725 | + u""" |
726 | Add volume info vi to manifest and write to manifest |
727 | |
728 | @param vi: volume info to add |
729 | @@ -134,10 +134,10 @@ |
730 | vol_num = vi.volume_number |
731 | self.volume_info_dict[vol_num] = vi |
732 | if self.fh: |
733 | - self.fh.write(vi.to_string() + "\n") |
734 | + self.fh.write(vi.to_string() + b"\n") |
735 | |
736 | def del_volume_info(self, vol_num): |
737 | - """ |
738 | + u""" |
739 | Remove volume vol_num from the manifest |
740 | |
741 | @param vol_num: volume number to delete |
742 | @@ -148,87 +148,87 @@ |
743 | try: |
744 | del self.volume_info_dict[vol_num] |
745 | except Exception: |
746 | - raise ManifestError("Volume %d not present in manifest" % (vol_num,)) |
747 | + raise ManifestError(u"Volume %d not present in manifest" % (vol_num,)) |
748 | |
749 | def to_string(self): |
750 | - """ |
751 | + u""" |
752 | Return string version of self (just concatenate vi strings) |
753 | |
754 | @rtype: string |
755 | @return: self in string form |
756 | """ |
757 | - result = "" |
758 | + result = u"" |
759 | if self.hostname: |
760 | - result += "Hostname %s\n" % self.hostname |
761 | + result += b"Hostname %s\n" % self.hostname |
762 | if self.local_dirname: |
763 | - result += "Localdir %s\n" % Quote(self.local_dirname) |
764 | + result += b"Localdir %s\n" % Quote(self.local_dirname) |
765 | |
766 | - result += "Filelist %d\n" % len(self.files_changed) |
767 | + result += b"Filelist %d\n" % len(self.files_changed) |
768 | for fileinfo in self.files_changed: |
769 | - result += " %-7s %s\n" % (fileinfo[1], Quote(fileinfo[0])) |
770 | + result += b" %-7s %s\n" % (fileinfo[1], Quote(fileinfo[0])) |
771 | |
772 | vol_num_list = self.volume_info_dict.keys() |
773 | vol_num_list.sort() |
774 | |
775 | def vol_num_to_string(vol_num): |
776 | return self.volume_info_dict[vol_num].to_string() |
777 | - result = "%s%s\n" % (result, |
778 | - "\n".join(map(vol_num_to_string, vol_num_list))) |
779 | + result = b"%s%s\n" % (result, |
780 | + b"\n".join(map(vol_num_to_string, vol_num_list))) |
781 | return result |
782 | |
783 | __str__ = to_string |
784 | |
785 | def from_string(self, s): |
786 | - """ |
787 | + u""" |
788 | Initialize self from string s, return self |
789 | """ |
790 | def get_field(fieldname): |
791 | - """ |
792 | + u""" |
793 | Return the value of a field by parsing s, or None if no field |
794 | """ |
795 | - m = re.search("(^|\\n)%s\\s(.*?)\n" % fieldname, s, re.I) |
796 | + m = re.search(u"(^|\\n)%s\\s(.*?)\n" % fieldname, s, re.I) |
797 | if not m: |
798 | return None |
799 | else: |
800 | return Unquote(m.group(2)) |
801 | - self.hostname = get_field("hostname") |
802 | - self.local_dirname = get_field("localdir") |
803 | + self.hostname = get_field(u"hostname") |
804 | + self.local_dirname = get_field(u"localdir") |
805 | |
806 | highest_vol = 0 |
807 | latest_vol = 0 |
808 | - vi_regexp = re.compile("(?:^|\\n)(volume\\s.*(?:\\n.*)*?)(?=\\nvolume\\s|$)", re.I) |
809 | + vi_regexp = re.compile(u"(?:^|\\n)(volume\\s.*(?:\\n.*)*?)(?=\\nvolume\\s|$)", re.I) |
810 | vi_iterator = vi_regexp.finditer(s) |
811 | for match in vi_iterator: |
812 | vi = VolumeInfo().from_string(match.group(1)) |
813 | self.add_volume_info(vi) |
814 | latest_vol = vi.volume_number |
815 | highest_vol = max(highest_vol, latest_vol) |
816 | - log.Debug(_("Found manifest volume %s") % latest_vol) |
817 | + log.Debug(_(u"Found manifest volume %s") % latest_vol) |
818 | # If we restarted after losing some remote volumes, the highest volume |
819 | # seen may be higher than the last volume recorded. That is, the |
820 | # manifest could contain "vol1, vol2, vol3, vol2." If so, we don't |
821 | # want to keep vol3's info. |
822 | for i in range(latest_vol + 1, highest_vol + 1): |
823 | self.del_volume_info(i) |
824 | - log.Info(_("Found %s volumes in manifest") % latest_vol) |
825 | + log.Info(_(u"Found %s volumes in manifest") % latest_vol) |
826 | |
827 | # Get file changed list - not needed if --file-changed not present |
828 | filecount = 0 |
829 | if globals.file_changed is not None: |
830 | - filelist_regexp = re.compile("(^|\\n)filelist\\s([0-9]+)\\n(.*?)(\\nvolume\\s|$)", re.I | re.S) |
831 | + filelist_regexp = re.compile(u"(^|\\n)filelist\\s([0-9]+)\\n(.*?)(\\nvolume\\s|$)", re.I | re.S) |
832 | match = filelist_regexp.search(s) |
833 | if match: |
834 | filecount = int(match.group(2)) |
835 | if filecount > 0: |
836 | def parse_fileinfo(line): |
837 | fileinfo = line.strip().split() |
838 | - return (fileinfo[0], ''.join(fileinfo[1:])) |
839 | + return (fileinfo[0], u''.join(fileinfo[1:])) |
840 | |
841 | - self.files_changed = list(map(parse_fileinfo, match.group(3).split('\n'))) |
842 | + self.files_changed = list(map(parse_fileinfo, match.group(3).split(u'\n'))) |
843 | |
844 | if filecount != len(self.files_changed): |
845 | - log.Error(_("Manifest file '%s' is corrupt: File count says %d, File list contains %d" % |
846 | - (self.fh.base if self.fh else "", filecount, len(self.files_changed)))) |
847 | + log.Error(_(u"Manifest file '%s' is corrupt: File count says %d, File list contains %d" % |
848 | + (self.fh.base if self.fh else u"", filecount, len(self.files_changed)))) |
849 | self.corrupt_filelist = True |
850 | |
851 | return self |
852 | @@ -237,7 +237,7 @@ |
853 | return self.files_changed |
854 | |
855 | def __eq__(self, other): |
856 | - """ |
857 | + u""" |
858 | Two manifests are equal if they contain the same volume infos |
859 | """ |
860 | vi_list1 = self.volume_info_dict.keys() |
861 | @@ -246,39 +246,39 @@ |
862 | vi_list2.sort() |
863 | |
864 | if vi_list1 != vi_list2: |
865 | - log.Notice(_("Manifests not equal because different volume numbers")) |
866 | + log.Notice(_(u"Manifests not equal because different volume numbers")) |
867 | return False |
868 | |
869 | for i in range(len(vi_list1)): |
870 | if not vi_list1[i] == vi_list2[i]: |
871 | - log.Notice(_("Manifests not equal because volume lists differ")) |
872 | + log.Notice(_(u"Manifests not equal because volume lists differ")) |
873 | return False |
874 | |
875 | if (self.hostname != other.hostname or |
876 | self.local_dirname != other.local_dirname): |
877 | - log.Notice(_("Manifests not equal because hosts or directories differ")) |
878 | + log.Notice(_(u"Manifests not equal because hosts or directories differ")) |
879 | return False |
880 | |
881 | return True |
882 | |
883 | def __ne__(self, other): |
884 | - """ |
885 | + u""" |
886 | Defines !=. Not doing this always leads to annoying bugs... |
887 | """ |
888 | return not self.__eq__(other) |
889 | |
890 | def write_to_path(self, path): |
891 | - """ |
892 | + u""" |
893 | Write string version of manifest to given path |
894 | """ |
895 | assert not path.exists() |
896 | - fout = path.open("wb") |
897 | + fout = path.open(u"wb") |
898 | fout.write(self.to_string()) |
899 | assert not fout.close() |
900 | path.setdata() |
901 | |
902 | def get_containing_volumes(self, index_prefix): |
903 | - """ |
904 | + u""" |
905 | Return list of volume numbers that may contain index_prefix |
906 | """ |
907 | return filter(lambda vol_num: |
908 | @@ -287,18 +287,18 @@ |
909 | |
910 | |
911 | class VolumeInfoError(Exception): |
912 | - """ |
913 | + u""" |
914 | Raised when there is a problem initializing a VolumeInfo from string |
915 | """ |
916 | pass |
917 | |
918 | |
919 | class VolumeInfo: |
920 | - """ |
921 | + u""" |
922 | Information about a single volume |
923 | """ |
924 | def __init__(self): |
925 | - """VolumeInfo initializer""" |
926 | + u"""VolumeInfo initializer""" |
927 | self.volume_number = None |
928 | self.start_index = None |
929 | self.start_block = None |
930 | @@ -309,7 +309,7 @@ |
931 | def set_info(self, vol_number, |
932 | start_index, start_block, |
933 | end_index, end_block): |
934 | - """ |
935 | + u""" |
936 | Set essential VolumeInfo information, return self |
937 | |
938 | Call with starting and ending paths stored in the volume. If |
939 | @@ -325,13 +325,13 @@ |
940 | return self |
941 | |
942 | def set_hash(self, hash_name, data): |
943 | - """ |
944 | + u""" |
945 | Set the value of hash hash_name (e.g. "MD5") to data |
946 | """ |
947 | self.hashes[hash_name] = data |
948 | |
949 | def get_best_hash(self): |
950 | - """ |
951 | + u""" |
952 | Return pair (hash_type, hash_data) |
953 | |
954 | SHA1 is the best hash, and MD5 is the second best hash. None |
955 | @@ -340,59 +340,59 @@ |
956 | if not self.hashes: |
957 | return None |
958 | try: |
959 | - return ("SHA1", self.hashes['SHA1']) |
960 | + return (u"SHA1", self.hashes[u'SHA1']) |
961 | except KeyError: |
962 | pass |
963 | try: |
964 | - return ("MD5", self.hashes['MD5']) |
965 | + return (u"MD5", self.hashes[u'MD5']) |
966 | except KeyError: |
967 | pass |
968 | return self.hashes.items()[0] |
969 | |
970 | def to_string(self): |
971 | - """ |
972 | + u""" |
973 | Return nicely formatted string reporting all information |
974 | """ |
975 | def index_to_string(index): |
976 | - """Return printable version of index without any whitespace""" |
977 | + u"""Return printable version of index without any whitespace""" |
978 | if index: |
979 | - s = "/".join(index) |
980 | + s = b"/".join(index) |
981 | return Quote(s) |
982 | else: |
983 | - return "." |
984 | + return b"." |
985 | |
986 | - slist = ["Volume %d:" % self.volume_number] |
987 | - whitespace = " " |
988 | - slist.append("%sStartingPath %s %s" % |
989 | - (whitespace, index_to_string(self.start_index), (self.start_block or " "))) |
990 | - slist.append("%sEndingPath %s %s" % |
991 | - (whitespace, index_to_string(self.end_index), (self.end_block or " "))) |
992 | + slist = [b"Volume %d:" % self.volume_number] |
993 | + whitespace = b" " |
994 | + slist.append(b"%sStartingPath %s %s" % |
995 | + (whitespace, index_to_string(self.start_index), (self.start_block or b" "))) |
996 | + slist.append(b"%sEndingPath %s %s" % |
997 | + (whitespace, index_to_string(self.end_index), (self.end_block or b" "))) |
998 | for key in self.hashes: |
999 | - slist.append("%sHash %s %s" % |
1000 | + slist.append(b"%sHash %s %s" % |
1001 | (whitespace, key.encode(), self.hashes[key])) |
1002 | - return "\n".join(slist) |
1003 | + return b"\n".join(slist) |
1004 | |
1005 | __str__ = to_string |
1006 | |
1007 | def from_string(self, s): |
1008 | - """ |
1009 | + u""" |
1010 | Initialize self from string s as created by to_string |
1011 | """ |
1012 | def string_to_index(s): |
1013 | - """ |
1014 | + u""" |
1015 | Return tuple index from string |
1016 | """ |
1017 | s = Unquote(s) |
1018 | - if s == ".": |
1019 | + if s == b".": |
1020 | return () |
1021 | - return tuple(s.split("/")) |
1022 | + return tuple(s.split(b"/")) |
1023 | |
1024 | - linelist = s.strip().split("\n") |
1025 | + linelist = s.strip().split(b"\n") |
1026 | |
1027 | # Set volume number |
1028 | - m = re.search("^Volume ([0-9]+):", linelist[0], re.I) |
1029 | + m = re.search(u"^Volume ([0-9]+):", linelist[0], re.I) |
1030 | if not m: |
1031 | - raise VolumeInfoError("Bad first line '%s'" % (linelist[0],)) |
1032 | + raise VolumeInfoError(u"Bad first line '%s'" % (linelist[0],)) |
1033 | self.volume_number = int(m.group(1)) |
1034 | |
1035 | # Set other fields |
1036 | @@ -402,61 +402,61 @@ |
1037 | line_split = line.strip().split() |
1038 | field_name = line_split[0].lower() |
1039 | other_fields = line_split[1:] |
1040 | - if field_name == "Volume": |
1041 | - log.Warn(_("Warning, found extra Volume identifier")) |
1042 | + if field_name == u"Volume": |
1043 | + log.Warn(_(u"Warning, found extra Volume identifier")) |
1044 | break |
1045 | - elif field_name == "startingpath": |
1046 | + elif field_name == u"startingpath": |
1047 | self.start_index = string_to_index(other_fields[0]) |
1048 | if len(other_fields) > 1: |
1049 | self.start_block = int(other_fields[1]) |
1050 | else: |
1051 | self.start_block = None |
1052 | - elif field_name == "endingpath": |
1053 | + elif field_name == u"endingpath": |
1054 | self.end_index = string_to_index(other_fields[0]) |
1055 | if len(other_fields) > 1: |
1056 | self.end_block = int(other_fields[1]) |
1057 | else: |
1058 | self.end_block = None |
1059 | - elif field_name == "hash": |
1060 | + elif field_name == u"hash": |
1061 | self.set_hash(other_fields[0], other_fields[1]) |
1062 | |
1063 | if self.start_index is None or self.end_index is None: |
1064 | - raise VolumeInfoError("Start or end index not set") |
1065 | + raise VolumeInfoError(u"Start or end index not set") |
1066 | return self |
1067 | |
1068 | def __eq__(self, other): |
1069 | - """ |
1070 | + u""" |
1071 | Used in test suite |
1072 | """ |
1073 | if not isinstance(other, VolumeInfo): |
1074 | - log.Notice(_("Other is not VolumeInfo")) |
1075 | + log.Notice(_(u"Other is not VolumeInfo")) |
1076 | return None |
1077 | if self.volume_number != other.volume_number: |
1078 | - log.Notice(_("Volume numbers don't match")) |
1079 | + log.Notice(_(u"Volume numbers don't match")) |
1080 | return None |
1081 | if self.start_index != other.start_index: |
1082 | - log.Notice(_("start_indicies don't match")) |
1083 | + log.Notice(_(u"start_indicies don't match")) |
1084 | return None |
1085 | if self.end_index != other.end_index: |
1086 | - log.Notice(_("end_index don't match")) |
1087 | + log.Notice(_(u"end_index don't match")) |
1088 | return None |
1089 | hash_list1 = self.hashes.items() |
1090 | hash_list1.sort() |
1091 | hash_list2 = other.hashes.items() |
1092 | hash_list2.sort() |
1093 | if hash_list1 != hash_list2: |
1094 | - log.Notice(_("Hashes don't match")) |
1095 | + log.Notice(_(u"Hashes don't match")) |
1096 | return None |
1097 | return 1 |
1098 | |
1099 | def __ne__(self, other): |
1100 | - """ |
1101 | + u""" |
1102 | Defines != |
1103 | """ |
1104 | return not self.__eq__(other) |
1105 | |
1106 | def contains(self, index_prefix, recursive=1): |
1107 | - """ |
1108 | + u""" |
1109 | Return true if volume might contain index |
1110 | |
1111 | If recursive is true, then return true if any index starting |
1112 | @@ -471,11 +471,11 @@ |
1113 | return self.start_index <= index_prefix <= self.end_index |
1114 | |
1115 | |
1116 | -nonnormal_char_re = re.compile("(\\s|[\\\\\"'])") |
1117 | +nonnormal_char_re = re.compile(u"(\\s|[\\\\\"'])") |
1118 | |
1119 | |
1120 | def Quote(s): |
1121 | - """ |
1122 | + u""" |
1123 | Return quoted version of s safe to put in a manifest or volume info |
1124 | """ |
1125 | if not nonnormal_char_re.search(s): |
1126 | @@ -483,29 +483,29 @@ |
1127 | slist = [] |
1128 | for char in s: |
1129 | if nonnormal_char_re.search(char): |
1130 | - slist.append("\\x%02x" % ord(char)) |
1131 | + slist.append(b"\\x%02x" % ord(char)) |
1132 | else: |
1133 | slist.append(char) |
1134 | - return '"%s"' % "".join(slist) |
1135 | + return b'"%s"' % u"".join(slist) |
1136 | |
1137 | |
1138 | def Unquote(quoted_string): |
1139 | - """ |
1140 | + u""" |
1141 | Return original string from quoted_string produced by above |
1142 | """ |
1143 | - if not quoted_string[0] == '"' or quoted_string[0] == "'": |
1144 | + if not quoted_string[0] == b'"' or quoted_string[0] == b"'": |
1145 | return quoted_string |
1146 | assert quoted_string[0] == quoted_string[-1] |
1147 | return_list = [] |
1148 | i = 1 # skip initial char |
1149 | while i < len(quoted_string) - 1: |
1150 | char = quoted_string[i] |
1151 | - if char == "\\": |
1152 | + if char == b"\\": |
1153 | # quoted section |
1154 | - assert quoted_string[i + 1] == "x" |
1155 | + assert quoted_string[i + 1] == b"x" |
1156 | return_list.append(chr(int(quoted_string[i + 2:i + 4], 16))) |
1157 | i += 4 |
1158 | else: |
1159 | return_list.append(char) |
1160 | i += 1 |
1161 | - return "".join(return_list) |
1162 | + return b"".join(return_list) |
1163 | |
1164 | === modified file 'duplicity/patchdir.py' |
1165 | --- duplicity/patchdir.py 2018-07-24 11:52:33 +0000 |
1166 | +++ duplicity/patchdir.py 2018-09-24 21:19:45 +0000 |
1167 | @@ -37,7 +37,7 @@ |
1168 | from duplicity.path import * # @UnusedWildImport |
1169 | from duplicity.lazy import * # @UnusedWildImport |
1170 | |
1171 | -"""Functions for patching of directories""" |
1172 | +u"""Functions for patching of directories""" |
1173 | |
1174 | |
1175 | class PatchDirException(Exception): |
1176 | @@ -45,20 +45,20 @@ |
1177 | |
1178 | |
1179 | def Patch(base_path, difftar_fileobj): |
1180 | - """Patch given base_path and file object containing delta""" |
1181 | - diff_tarfile = tarfile.TarFile("arbitrary", "r", difftar_fileobj) |
1182 | + u"""Patch given base_path and file object containing delta""" |
1183 | + diff_tarfile = tarfile.TarFile(u"arbitrary", u"r", difftar_fileobj) |
1184 | patch_diff_tarfile(base_path, diff_tarfile) |
1185 | assert not difftar_fileobj.close() |
1186 | |
1187 | |
1188 | def Patch_from_iter(base_path, fileobj_iter, restrict_index=()): |
1189 | - """Patch given base_path and iterator of delta file objects""" |
1190 | + u"""Patch given base_path and iterator of delta file objects""" |
1191 | diff_tarfile = TarFile_FromFileobjs(fileobj_iter) |
1192 | patch_diff_tarfile(base_path, diff_tarfile, restrict_index) |
1193 | |
1194 | |
1195 | def patch_diff_tarfile(base_path, diff_tarfile, restrict_index=()): |
1196 | - """Patch given Path object using delta tarfile (as in tarfile.TarFile) |
1197 | + u"""Patch given Path object using delta tarfile (as in tarfile.TarFile) |
1198 | |
1199 | If restrict_index is set, ignore any deltas in diff_tarfile that |
1200 | don't start with restrict_index. |
1201 | @@ -77,12 +77,12 @@ |
1202 | ITR = IterTreeReducer(PathPatcher, [base_path]) |
1203 | for basis_path, diff_ropath in collated: |
1204 | if basis_path: |
1205 | - log.Info(_("Patching %s") % (util.fsdecode(basis_path.get_relative_path())), |
1206 | + log.Info(_(u"Patching %s") % (util.fsdecode(basis_path.get_relative_path())), |
1207 | log.InfoCode.patch_file_patching, |
1208 | util.escape(basis_path.get_relative_path())) |
1209 | ITR(basis_path.index, basis_path, diff_ropath) |
1210 | else: |
1211 | - log.Info(_("Patching %s") % (util.fsdecode(diff_ropath.get_relative_path())), |
1212 | + log.Info(_(u"Patching %s") % (util.fsdecode(diff_ropath.get_relative_path())), |
1213 | log.InfoCode.patch_file_patching, |
1214 | util.escape(diff_ropath.get_relative_path())) |
1215 | ITR(diff_ropath.index, basis_path, diff_ropath) |
1216 | @@ -96,7 +96,7 @@ |
1217 | |
1218 | |
1219 | def filter_path_iter(path_iter, index): |
1220 | - """Rewrite path elements of path_iter so they start with index |
1221 | + u"""Rewrite path elements of path_iter so they start with index |
1222 | |
1223 | Discard any that doesn't start with index, and remove the index |
1224 | prefix from the rest. |
1225 | @@ -111,7 +111,7 @@ |
1226 | |
1227 | |
1228 | def difftar2path_iter(diff_tarfile): |
1229 | - """Turn file-like difftarobj into iterator of ROPaths""" |
1230 | + u"""Turn file-like difftarobj into iterator of ROPaths""" |
1231 | tar_iter = iter(diff_tarfile) |
1232 | multivol_fileobj = None |
1233 | |
1234 | @@ -132,7 +132,7 @@ |
1235 | ropath = ROPath(index) |
1236 | ropath.init_from_tarinfo(tarinfo_list[0]) |
1237 | ropath.difftype = difftype |
1238 | - if difftype == "deleted": |
1239 | + if difftype == u"deleted": |
1240 | ropath.type = None |
1241 | elif ropath.isreg(): |
1242 | if multivol: |
1243 | @@ -148,61 +148,61 @@ |
1244 | |
1245 | |
1246 | def get_index_from_tarinfo(tarinfo): |
1247 | - """Return (index, difftype, multivol) pair from tarinfo object""" |
1248 | - for prefix in ["snapshot/", "diff/", "deleted/", |
1249 | - "multivol_diff/", "multivol_snapshot/"]: |
1250 | + u"""Return (index, difftype, multivol) pair from tarinfo object""" |
1251 | + for prefix in [b"snapshot/", b"diff/", b"deleted/", |
1252 | + b"multivol_diff/", b"multivol_snapshot/"]: |
1253 | tiname = util.get_tarinfo_name(tarinfo) |
1254 | if tiname.startswith(prefix): |
1255 | name = tiname[len(prefix):] # strip prefix |
1256 | - if prefix.startswith("multivol"): |
1257 | - if prefix == "multivol_diff/": |
1258 | - difftype = "diff" |
1259 | + if prefix.startswith(u"multivol"): |
1260 | + if prefix == u"multivol_diff/": |
1261 | + difftype = u"diff" |
1262 | else: |
1263 | - difftype = "snapshot" |
1264 | + difftype = u"snapshot" |
1265 | multivol = 1 |
1266 | name, num_subs = \ |
1267 | - re.subn("(?s)^multivol_(diff|snapshot)/?(.*)/[0-9]+$", |
1268 | - "\\2", tiname) |
1269 | + re.subn(b"(?s)^multivol_(diff|snapshot)/?(.*)/[0-9]+$", |
1270 | + b"\\2", tiname) |
1271 | if num_subs != 1: |
1272 | raise PatchDirException(u"Unrecognized diff entry %s" % |
1273 | util.fsdecode(tiname)) |
1274 | else: |
1275 | difftype = prefix[:-1] # strip trailing / |
1276 | name = tiname[len(prefix):] |
1277 | - if name.endswith("/"): |
1278 | + if name.endswith(b"/"): |
1279 | name = name[:-1] # strip trailing /'s |
1280 | multivol = 0 |
1281 | break |
1282 | else: |
1283 | raise PatchDirException(u"Unrecognized diff entry %s" % |
1284 | util.fsdecode(tiname)) |
1285 | - if name == "." or name == "": |
1286 | + if name == b"." or name == b"": |
1287 | index = () |
1288 | else: |
1289 | - index = tuple(name.split("/")) |
1290 | - if '..' in index: |
1291 | + index = tuple(name.split(b"/")) |
1292 | + if b'..' in index: |
1293 | raise PatchDirException(u"Tar entry %s contains '..'. Security " |
1294 | - "violation" % util.fsdecode(tiname)) |
1295 | + u"violation" % util.fsdecode(tiname)) |
1296 | return (index, difftype, multivol) |
1297 | |
1298 | |
1299 | class Multivol_Filelike: |
1300 | - """Emulate a file like object from multivols |
1301 | + u"""Emulate a file like object from multivols |
1302 | |
1303 | Maintains a buffer about the size of a volume. When it is read() |
1304 | to the end, pull in more volumes as desired. |
1305 | |
1306 | """ |
1307 | def __init__(self, tf, tar_iter, tarinfo_list, index): |
1308 | - """Initializer. tf is TarFile obj, tarinfo is first tarinfo""" |
1309 | + u"""Initializer. tf is TarFile obj, tarinfo is first tarinfo""" |
1310 | self.tf, self.tar_iter = tf, tar_iter |
1311 | self.tarinfo_list = tarinfo_list # must store as list for write access |
1312 | self.index = index |
1313 | - self.buffer = "" |
1314 | + self.buffer = b"" |
1315 | self.at_end = 0 |
1316 | |
1317 | def read(self, length=-1): |
1318 | - """Read length bytes from file""" |
1319 | + u"""Read length bytes from file""" |
1320 | if length < 0: |
1321 | while self.addtobuffer(): |
1322 | pass |
1323 | @@ -218,7 +218,7 @@ |
1324 | return result |
1325 | |
1326 | def addtobuffer(self): |
1327 | - """Add next chunk to buffer""" |
1328 | + u"""Add next chunk to buffer""" |
1329 | if self.at_end: |
1330 | return None |
1331 | index, difftype, multivol = get_index_from_tarinfo( # @UnusedVariable |
1332 | @@ -242,24 +242,24 @@ |
1333 | return 1 |
1334 | |
1335 | def close(self): |
1336 | - """If not at end, read remaining data""" |
1337 | + u"""If not at end, read remaining data""" |
1338 | if not self.at_end: |
1339 | while 1: |
1340 | - self.buffer = "" |
1341 | + self.buffer = b"" |
1342 | if not self.addtobuffer(): |
1343 | break |
1344 | self.at_end = 1 |
1345 | |
1346 | |
1347 | class PathPatcher(ITRBranch): |
1348 | - """Used by DirPatch, process the given basis and diff""" |
1349 | + u"""Used by DirPatch, process the given basis and diff""" |
1350 | def __init__(self, base_path): |
1351 | - """Set base_path, Path of root of tree""" |
1352 | + u"""Set base_path, Path of root of tree""" |
1353 | self.base_path = base_path |
1354 | self.dir_diff_ropath = None |
1355 | |
1356 | def start_process(self, index, basis_path, diff_ropath): |
1357 | - """Start processing when diff_ropath is a directory""" |
1358 | + u"""Start processing when diff_ropath is a directory""" |
1359 | if not (diff_ropath and diff_ropath.isdir()): |
1360 | assert index == (), util.uindex(index) # should only happen for first elem |
1361 | self.fast_process(index, basis_path, diff_ropath) |
1362 | @@ -276,44 +276,44 @@ |
1363 | self.dir_diff_ropath = diff_ropath |
1364 | |
1365 | def end_process(self): |
1366 | - """Copy directory permissions when leaving tree""" |
1367 | + u"""Copy directory permissions when leaving tree""" |
1368 | if self.dir_diff_ropath: |
1369 | self.dir_diff_ropath.copy_attribs(self.dir_basis_path) |
1370 | |
1371 | def can_fast_process(self, index, basis_path, diff_ropath): |
1372 | - """No need to recurse if diff_ropath isn't a directory""" |
1373 | + u"""No need to recurse if diff_ropath isn't a directory""" |
1374 | return not (diff_ropath and diff_ropath.isdir()) |
1375 | |
1376 | def fast_process(self, index, basis_path, diff_ropath): |
1377 | - """For use when neither is a directory""" |
1378 | + u"""For use when neither is a directory""" |
1379 | if not diff_ropath: |
1380 | return # no change |
1381 | elif not basis_path: |
1382 | - if diff_ropath.difftype == "deleted": |
1383 | + if diff_ropath.difftype == u"deleted": |
1384 | pass # already deleted |
1385 | else: |
1386 | # just copy snapshot over |
1387 | diff_ropath.copy(self.base_path.new_index(index)) |
1388 | - elif diff_ropath.difftype == "deleted": |
1389 | + elif diff_ropath.difftype == u"deleted": |
1390 | if basis_path.isdir(): |
1391 | basis_path.deltree() |
1392 | else: |
1393 | basis_path.delete() |
1394 | - elif not basis_path.isreg() or (basis_path.isreg() and diff_ropath.difftype == "snapshot"): |
1395 | + elif not basis_path.isreg() or (basis_path.isreg() and diff_ropath.difftype == u"snapshot"): |
1396 | if basis_path.isdir(): |
1397 | basis_path.deltree() |
1398 | else: |
1399 | basis_path.delete() |
1400 | diff_ropath.copy(basis_path) |
1401 | else: |
1402 | - assert diff_ropath.difftype == "diff", diff_ropath.difftype |
1403 | + assert diff_ropath.difftype == u"diff", diff_ropath.difftype |
1404 | basis_path.patch_with_attribs(diff_ropath) |
1405 | |
1406 | |
1407 | class TarFile_FromFileobjs: |
1408 | - """Like a tarfile.TarFile iterator, but read from multiple fileobjs""" |
1409 | + u"""Like a tarfile.TarFile iterator, but read from multiple fileobjs""" |
1410 | def __init__(self, fileobj_iter): |
1411 | - """Make new tarinfo iterator |
1412 | + u"""Make new tarinfo iterator |
1413 | |
1414 | fileobj_iter should be an iterator of file objects opened for |
1415 | reading. They will be closed at end of reading. |
1416 | @@ -327,11 +327,11 @@ |
1417 | return self |
1418 | |
1419 | def set_tarfile(self): |
1420 | - """Set tarfile from next file object, or raise StopIteration""" |
1421 | + u"""Set tarfile from next file object, or raise StopIteration""" |
1422 | if self.current_fp: |
1423 | assert not self.current_fp.close() |
1424 | self.current_fp = next(self.fileobj_iter) |
1425 | - self.tarfile = util.make_tarfile("r", self.current_fp) |
1426 | + self.tarfile = util.make_tarfile(u"r", self.current_fp) |
1427 | self.tar_iter = iter(self.tarfile) |
1428 | |
1429 | def next(self): |
1430 | @@ -345,12 +345,12 @@ |
1431 | return next(self.tar_iter) |
1432 | |
1433 | def extractfile(self, tarinfo): |
1434 | - """Return data associated with given tarinfo""" |
1435 | + u"""Return data associated with given tarinfo""" |
1436 | return self.tarfile.extractfile(tarinfo) |
1437 | |
1438 | |
1439 | def collate_iters(iter_list): |
1440 | - """Collate iterators by index |
1441 | + u"""Collate iterators by index |
1442 | |
1443 | Input is a list of n iterators each of which must iterate elements |
1444 | with an index attribute. The elements must come out in increasing |
1445 | @@ -371,7 +371,7 @@ |
1446 | elems = overflow[:] |
1447 | |
1448 | def setrorps(overflow, elems): |
1449 | - """Set the overflow and rorps list""" |
1450 | + u"""Set the overflow and rorps list""" |
1451 | for i in range(iter_num): |
1452 | if not overflow[i] and elems[i] is None: |
1453 | try: |
1454 | @@ -381,7 +381,7 @@ |
1455 | elems[i] = None |
1456 | |
1457 | def getleastindex(elems): |
1458 | - """Return the first index in elems, assuming elems isn't empty""" |
1459 | + u"""Return the first index in elems, assuming elems isn't empty""" |
1460 | return min(map(lambda elem: elem.index, filter(lambda x: x, elems))) |
1461 | |
1462 | def yield_tuples(iter_num, overflow, elems): |
1463 | @@ -403,7 +403,7 @@ |
1464 | |
1465 | |
1466 | class IndexedTuple: |
1467 | - """Like a tuple, but has .index (used previously by collate_iters)""" |
1468 | + u"""Like a tuple, but has .index (used previously by collate_iters)""" |
1469 | def __init__(self, index, sequence): |
1470 | self.index = index |
1471 | self.data = tuple(sequence) |
1472 | @@ -412,7 +412,7 @@ |
1473 | return len(self.data) |
1474 | |
1475 | def __getitem__(self, key): |
1476 | - """This only works for numerical keys (easier this way)""" |
1477 | + u"""This only works for numerical keys (easier this way)""" |
1478 | return self.data[key] |
1479 | |
1480 | def __lt__(self, other): |
1481 | @@ -448,11 +448,11 @@ |
1482 | return None |
1483 | |
1484 | def __str__(self): |
1485 | - return "(%s).%s" % (", ".join(map(str, self.data)), self.index) |
1486 | + return u"(%s).%s" % (u", ".join(map(str, self.data)), self.index) |
1487 | |
1488 | |
1489 | def normalize_ps(patch_sequence): |
1490 | - """Given an sequence of ROPath deltas, remove blank and unnecessary |
1491 | + u"""Given an sequence of ROPath deltas, remove blank and unnecessary |
1492 | |
1493 | The sequence is assumed to be in patch order (later patches apply |
1494 | to earlier ones). A patch is unnecessary if a later one doesn't |
1495 | @@ -467,29 +467,29 @@ |
1496 | if delta is not None: |
1497 | # skip blank entries |
1498 | result_list.insert(0, delta) |
1499 | - if delta.difftype != "diff": |
1500 | + if delta.difftype != u"diff": |
1501 | break |
1502 | i -= 1 |
1503 | return result_list |
1504 | |
1505 | |
1506 | def patch_seq2ropath(patch_seq): |
1507 | - """Apply the patches in patch_seq, return single ropath""" |
1508 | + u"""Apply the patches in patch_seq, return single ropath""" |
1509 | first = patch_seq[0] |
1510 | - assert first.difftype != "diff", "First patch in sequence " \ |
1511 | - "%s was a diff" % patch_seq |
1512 | + assert first.difftype != u"diff", u"First patch in sequence " \ |
1513 | + u"%s was a diff" % patch_seq |
1514 | if not first.isreg(): |
1515 | # No need to bother with data if not regular file |
1516 | - assert len(patch_seq) == 1, "Patch sequence isn't regular, but " \ |
1517 | - "has %d entries" % len(patch_seq) |
1518 | + assert len(patch_seq) == 1, u"Patch sequence isn't regular, but " \ |
1519 | + u"has %d entries" % len(patch_seq) |
1520 | return first.get_ropath() |
1521 | |
1522 | - current_file = first.open("rb") |
1523 | + current_file = first.open(u"rb") |
1524 | |
1525 | for delta_ropath in patch_seq[1:]: |
1526 | - assert delta_ropath.difftype == "diff", delta_ropath.difftype |
1527 | + assert delta_ropath.difftype == u"diff", delta_ropath.difftype |
1528 | if not isinstance(current_file, file): |
1529 | - """ |
1530 | + u""" |
1531 | librsync insists on a real file object, which we create manually |
1532 | by using the duplicity.tempdir to tell us where. |
1533 | |
1534 | @@ -503,14 +503,14 @@ |
1535 | tempfp.seek(0) |
1536 | current_file = tempfp |
1537 | current_file = librsync.PatchedFile(current_file, |
1538 | - delta_ropath.open("rb")) |
1539 | + delta_ropath.open(u"rb")) |
1540 | result = patch_seq[-1].get_ropath() |
1541 | result.setfileobj(current_file) |
1542 | return result |
1543 | |
1544 | |
1545 | def integrate_patch_iters(iter_list): |
1546 | - """Combine a list of iterators of ropath patches |
1547 | + u"""Combine a list of iterators of ropath patches |
1548 | |
1549 | The iter_list should be sorted in patch order, and the elements in |
1550 | each iter_list need to be orderd by index. The output will be an |
1551 | @@ -527,14 +527,14 @@ |
1552 | yield final_ropath |
1553 | except Exception as e: |
1554 | filename = normalized[-1].get_ropath().get_relative_path() |
1555 | - log.Warn(_("Error '%s' patching %s") % |
1556 | + log.Warn(_(u"Error '%s' patching %s") % |
1557 | (util.uexc(e), util.fsdecode(filename)), |
1558 | log.WarningCode.cannot_process, |
1559 | util.escape(filename)) |
1560 | |
1561 | |
1562 | def tarfiles2rop_iter(tarfile_list, restrict_index=()): |
1563 | - """Integrate tarfiles of diffs into single ROPath iter |
1564 | + u"""Integrate tarfiles of diffs into single ROPath iter |
1565 | |
1566 | Then filter out all the diffs in that index which don't start with |
1567 | the restrict_index. |
1568 | @@ -548,7 +548,7 @@ |
1569 | |
1570 | |
1571 | def Write_ROPaths(base_path, rop_iter): |
1572 | - """Write out ropaths in rop_iter starting at base_path |
1573 | + u"""Write out ropaths in rop_iter starting at base_path |
1574 | |
1575 | Returns 1 if something was actually written, 0 otherwise. |
1576 | |
1577 | @@ -564,20 +564,20 @@ |
1578 | |
1579 | |
1580 | class ROPath_IterWriter(ITRBranch): |
1581 | - """Used in Write_ROPaths above |
1582 | + u"""Used in Write_ROPaths above |
1583 | |
1584 | We need to use an ITR because we have to update the |
1585 | permissions/times of directories after we write the files in them. |
1586 | |
1587 | """ |
1588 | def __init__(self, base_path): |
1589 | - """Set base_path, Path of root of tree""" |
1590 | + u"""Set base_path, Path of root of tree""" |
1591 | self.base_path = base_path |
1592 | self.dir_diff_ropath = None |
1593 | self.dir_new_path = None |
1594 | |
1595 | def start_process(self, index, ropath): |
1596 | - """Write ropath. Only handles the directory case""" |
1597 | + u"""Write ropath. Only handles the directory case""" |
1598 | if not ropath.isdir(): |
1599 | # Base may not be a directory, but rest should |
1600 | assert ropath.index == (), ropath.index |
1601 | @@ -596,19 +596,19 @@ |
1602 | self.dir_diff_ropath = ropath |
1603 | |
1604 | def end_process(self): |
1605 | - """Update information of a directory when leaving it""" |
1606 | + u"""Update information of a directory when leaving it""" |
1607 | if self.dir_diff_ropath: |
1608 | self.dir_diff_ropath.copy_attribs(self.dir_new_path) |
1609 | |
1610 | def can_fast_process(self, index, ropath): |
1611 | - """Can fast process (no recursion) if ropath isn't a directory""" |
1612 | - log.Info(_("Writing %s of type %s") % |
1613 | + u"""Can fast process (no recursion) if ropath isn't a directory""" |
1614 | + log.Info(_(u"Writing %s of type %s") % |
1615 | (util.fsdecode(ropath.get_relative_path()), ropath.type), |
1616 | log.InfoCode.patch_file_writing, |
1617 | - "%s %s" % (util.escape(ropath.get_relative_path()), ropath.type)) |
1618 | + u"%s %s" % (util.escape(ropath.get_relative_path()), ropath.type)) |
1619 | return not ropath.isdir() |
1620 | |
1621 | def fast_process(self, index, ropath): |
1622 | - """Write non-directory ropath to destination""" |
1623 | + u"""Write non-directory ropath to destination""" |
1624 | if ropath.exists(): |
1625 | ropath.copy(self.base_path.new_index(index)) |
1626 | |
1627 | === modified file 'duplicity/path.py' |
1628 | --- duplicity/path.py 2018-07-24 11:52:33 +0000 |
1629 | +++ duplicity/path.py 2018-09-24 21:19:45 +0000 |
1630 | @@ -19,7 +19,7 @@ |
1631 | # along with duplicity; if not, write to the Free Software Foundation, |
1632 | # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA |
1633 | |
1634 | -"""Wrapper class around a file like "/usr/bin/env" |
1635 | +u"""Wrapper class around a file like "/usr/bin/env" |
1636 | |
1637 | This class makes certain file operations more convenient and |
1638 | associates stat information with filenames |
1639 | @@ -53,7 +53,7 @@ |
1640 | |
1641 | |
1642 | class StatResult: |
1643 | - """Used to emulate the output of os.stat() and related""" |
1644 | + u"""Used to emulate the output of os.stat() and related""" |
1645 | # st_mode is required by the TarInfo class, but it's unclear how |
1646 | # to generate it from file permissions. |
1647 | st_mode = 0 |
1648 | @@ -64,158 +64,158 @@ |
1649 | |
1650 | |
1651 | class ROPath: |
1652 | - """Read only Path |
1653 | + u"""Read only Path |
1654 | |
1655 | Objects of this class doesn't represent real files, so they don't |
1656 | have a name. They are required to be indexed though. |
1657 | |
1658 | """ |
1659 | def __init__(self, index, stat=None): |
1660 | - """ROPath initializer""" |
1661 | + u"""ROPath initializer""" |
1662 | self.opened, self.fileobj = None, None |
1663 | self.index = index |
1664 | self.stat, self.type = None, None |
1665 | self.mode, self.devnums = None, None |
1666 | |
1667 | def set_from_stat(self): |
1668 | - """Set the value of self.type, self.mode from self.stat""" |
1669 | + u"""Set the value of self.type, self.mode from self.stat""" |
1670 | if not self.stat: |
1671 | self.type = None |
1672 | |
1673 | st_mode = self.stat.st_mode |
1674 | if stat.S_ISREG(st_mode): |
1675 | - self.type = "reg" |
1676 | + self.type = u"reg" |
1677 | elif stat.S_ISDIR(st_mode): |
1678 | - self.type = "dir" |
1679 | + self.type = u"dir" |
1680 | elif stat.S_ISLNK(st_mode): |
1681 | - self.type = "sym" |
1682 | + self.type = u"sym" |
1683 | elif stat.S_ISFIFO(st_mode): |
1684 | - self.type = "fifo" |
1685 | + self.type = u"fifo" |
1686 | elif stat.S_ISSOCK(st_mode): |
1687 | raise PathException(util.fsdecode(self.get_relative_path()) + |
1688 | u"is a socket, unsupported by tar") |
1689 | - self.type = "sock" |
1690 | + self.type = u"sock" |
1691 | elif stat.S_ISCHR(st_mode): |
1692 | - self.type = "chr" |
1693 | + self.type = u"chr" |
1694 | elif stat.S_ISBLK(st_mode): |
1695 | - self.type = "blk" |
1696 | + self.type = u"blk" |
1697 | else: |
1698 | - raise PathException("Unknown type") |
1699 | + raise PathException(u"Unknown type") |
1700 | |
1701 | self.mode = stat.S_IMODE(st_mode) |
1702 | - if self.type in ("chr", "blk"): |
1703 | + if self.type in (u"chr", u"blk"): |
1704 | try: |
1705 | self.devnums = (os.major(self.stat.st_rdev), |
1706 | os.minor(self.stat.st_rdev)) |
1707 | except: |
1708 | - log.Warn(_("Warning: %s invalid devnums (0x%X), treating as (0, 0).") |
1709 | + log.Warn(_(u"Warning: %s invalid devnums (0x%X), treating as (0, 0).") |
1710 | % (util.fsdecode(self.get_relative_path()), self.stat.st_rdev)) |
1711 | self.devnums = (0, 0) |
1712 | |
1713 | def blank(self): |
1714 | - """Black out self - set type and stat to None""" |
1715 | + u"""Black out self - set type and stat to None""" |
1716 | self.type, self.stat = None, None |
1717 | |
1718 | def exists(self): |
1719 | - """True if corresponding file exists""" |
1720 | + u"""True if corresponding file exists""" |
1721 | return self.type |
1722 | |
1723 | def isreg(self): |
1724 | - """True if self corresponds to regular file""" |
1725 | - return self.type == "reg" |
1726 | + u"""True if self corresponds to regular file""" |
1727 | + return self.type == u"reg" |
1728 | |
1729 | def isdir(self): |
1730 | - """True if self is dir""" |
1731 | - return self.type == "dir" |
1732 | + u"""True if self is dir""" |
1733 | + return self.type == u"dir" |
1734 | |
1735 | def issym(self): |
1736 | - """True if self is sym""" |
1737 | - return self.type == "sym" |
1738 | + u"""True if self is sym""" |
1739 | + return self.type == u"sym" |
1740 | |
1741 | def isfifo(self): |
1742 | - """True if self is fifo""" |
1743 | - return self.type == "fifo" |
1744 | + u"""True if self is fifo""" |
1745 | + return self.type == u"fifo" |
1746 | |
1747 | def issock(self): |
1748 | - """True is self is socket""" |
1749 | - return self.type == "sock" |
1750 | + u"""True is self is socket""" |
1751 | + return self.type == u"sock" |
1752 | |
1753 | def isdev(self): |
1754 | - """True is self is a device file""" |
1755 | - return self.type == "chr" or self.type == "blk" |
1756 | + u"""True is self is a device file""" |
1757 | + return self.type == u"chr" or self.type == u"blk" |
1758 | |
1759 | def getdevloc(self): |
1760 | - """Return device number path resides on""" |
1761 | + u"""Return device number path resides on""" |
1762 | return self.stat.st_dev |
1763 | |
1764 | def getsize(self): |
1765 | - """Return length in bytes from stat object""" |
1766 | + u"""Return length in bytes from stat object""" |
1767 | return self.stat.st_size |
1768 | |
1769 | def getmtime(self): |
1770 | - """Return mod time of path in seconds""" |
1771 | + u"""Return mod time of path in seconds""" |
1772 | return int(self.stat.st_mtime) |
1773 | |
1774 | def get_relative_path(self): |
1775 | - """Return relative path, created from index""" |
1776 | + u"""Return relative path, created from index""" |
1777 | if self.index: |
1778 | - return "/".join(self.index) |
1779 | + return b"/".join(self.index) |
1780 | else: |
1781 | - return "." |
1782 | + return b"." |
1783 | |
1784 | def getperms(self): |
1785 | - """Return permissions mode, owner and group""" |
1786 | + u"""Return permissions mode, owner and group""" |
1787 | s1 = self.stat |
1788 | - return '%s:%s %o' % (s1.st_uid, s1.st_gid, self.mode) |
1789 | + return u'%s:%s %o' % (s1.st_uid, s1.st_gid, self.mode) |
1790 | |
1791 | def open(self, mode): |
1792 | - """Return fileobj associated with self""" |
1793 | - assert mode == "rb" and self.fileobj and not self.opened, \ |
1794 | - "%s %s %s" % (mode, self.fileobj, self.opened) |
1795 | + u"""Return fileobj associated with self""" |
1796 | + assert mode == u"rb" and self.fileobj and not self.opened, \ |
1797 | + u"%s %s %s" % (mode, self.fileobj, self.opened) |
1798 | self.opened = 1 |
1799 | return self.fileobj |
1800 | |
1801 | def get_data(self): |
1802 | - """Return contents of associated fileobj in string""" |
1803 | - fin = self.open("rb") |
1804 | + u"""Return contents of associated fileobj in string""" |
1805 | + fin = self.open(u"rb") |
1806 | buf = fin.read() |
1807 | assert not fin.close() |
1808 | return buf |
1809 | |
1810 | def setfileobj(self, fileobj): |
1811 | - """Set file object returned by open()""" |
1812 | + u"""Set file object returned by open()""" |
1813 | assert not self.fileobj |
1814 | self.fileobj = fileobj |
1815 | self.opened = None |
1816 | |
1817 | def init_from_tarinfo(self, tarinfo): |
1818 | - """Set data from tarinfo object (part of tarfile module)""" |
1819 | + u"""Set data from tarinfo object (part of tarfile module)""" |
1820 | # Set the typepp |
1821 | type = tarinfo.type |
1822 | if type == tarfile.REGTYPE or type == tarfile.AREGTYPE: |
1823 | - self.type = "reg" |
1824 | + self.type = u"reg" |
1825 | elif type == tarfile.LNKTYPE: |
1826 | - raise PathException("Hard links not supported yet") |
1827 | + raise PathException(u"Hard links not supported yet") |
1828 | elif type == tarfile.SYMTYPE: |
1829 | - self.type = "sym" |
1830 | + self.type = u"sym" |
1831 | self.symtext = tarinfo.linkname |
1832 | elif type == tarfile.CHRTYPE: |
1833 | - self.type = "chr" |
1834 | + self.type = u"chr" |
1835 | self.devnums = (tarinfo.devmajor, tarinfo.devminor) |
1836 | elif type == tarfile.BLKTYPE: |
1837 | - self.type = "blk" |
1838 | + self.type = u"blk" |
1839 | self.devnums = (tarinfo.devmajor, tarinfo.devminor) |
1840 | elif type == tarfile.DIRTYPE: |
1841 | - self.type = "dir" |
1842 | + self.type = u"dir" |
1843 | elif type == tarfile.FIFOTYPE: |
1844 | - self.type = "fifo" |
1845 | + self.type = u"fifo" |
1846 | else: |
1847 | - raise PathException("Unknown tarinfo type %s" % (type,)) |
1848 | + raise PathException(u"Unknown tarinfo type %s" % (type,)) |
1849 | |
1850 | self.mode = tarinfo.mode |
1851 | self.stat = StatResult() |
1852 | |
1853 | - """ Set user and group id |
1854 | + u""" Set user and group id |
1855 | use numeric id if name lookup fails |
1856 | OR |
1857 | --numeric-owner is set |
1858 | @@ -235,13 +235,13 @@ |
1859 | |
1860 | self.stat.st_mtime = int(tarinfo.mtime) |
1861 | if self.stat.st_mtime < 0: |
1862 | - log.Warn(_("Warning: %s has negative mtime, treating as 0.") |
1863 | + log.Warn(_(u"Warning: %s has negative mtime, treating as 0.") |
1864 | % (tarinfo.uc_name)) |
1865 | self.stat.st_mtime = 0 |
1866 | self.stat.st_size = tarinfo.size |
1867 | |
1868 | def get_ropath(self): |
1869 | - """Return ropath copy of self""" |
1870 | + u"""Return ropath copy of self""" |
1871 | new_ropath = ROPath(self.index, self.stat) |
1872 | new_ropath.type, new_ropath.mode = self.type, self.mode |
1873 | if self.issym(): |
1874 | @@ -253,7 +253,7 @@ |
1875 | return new_ropath |
1876 | |
1877 | def get_tarinfo(self): |
1878 | - """Generate a tarfile.TarInfo object based on self |
1879 | + u"""Generate a tarfile.TarInfo object based on self |
1880 | |
1881 | Doesn't set size based on stat, because we may want to replace |
1882 | data wiht other stream. Size should be set separately by |
1883 | @@ -262,11 +262,11 @@ |
1884 | """ |
1885 | ti = tarfile.TarInfo() |
1886 | if self.index: |
1887 | - ti.name = "/".join(self.index) |
1888 | + ti.name = b"/".join(self.index) |
1889 | else: |
1890 | - ti.name = "." |
1891 | + ti.name = b"." |
1892 | if self.isdir(): |
1893 | - ti.name += "/" # tar dir naming convention |
1894 | + ti.name += b"/" # tar dir naming convention |
1895 | |
1896 | ti.size = 0 |
1897 | if self.type: |
1898 | @@ -283,18 +283,18 @@ |
1899 | ti.type = tarfile.SYMTYPE |
1900 | ti.linkname = self.symtext |
1901 | elif self.isdev(): |
1902 | - if self.type == "chr": |
1903 | + if self.type == u"chr": |
1904 | ti.type = tarfile.CHRTYPE |
1905 | else: |
1906 | ti.type = tarfile.BLKTYPE |
1907 | ti.devmajor, ti.devminor = self.devnums |
1908 | else: |
1909 | - raise PathException("Unrecognized type " + str(self.type)) |
1910 | + raise PathException(u"Unrecognized type " + str(self.type)) |
1911 | |
1912 | ti.mode = self.mode |
1913 | ti.uid, ti.gid = self.stat.st_uid, self.stat.st_gid |
1914 | if self.stat.st_mtime < 0: |
1915 | - log.Warn(_("Warning: %s has negative mtime, treating as 0.") |
1916 | + log.Warn(_(u"Warning: %s has negative mtime, treating as 0.") |
1917 | % (util.fsdecode(self.get_relative_path()))) |
1918 | ti.mtime = 0 |
1919 | else: |
1920 | @@ -303,14 +303,14 @@ |
1921 | try: |
1922 | ti.uname = cached_ops.getpwuid(ti.uid)[0] |
1923 | except KeyError: |
1924 | - ti.uname = '' |
1925 | + ti.uname = u'' |
1926 | try: |
1927 | ti.gname = cached_ops.getgrgid(ti.gid)[0] |
1928 | except KeyError: |
1929 | - ti.gname = '' |
1930 | + ti.gname = b'' |
1931 | |
1932 | if ti.type in (tarfile.CHRTYPE, tarfile.BLKTYPE): |
1933 | - if hasattr(os, "major") and hasattr(os, "minor"): |
1934 | + if hasattr(os, u"major") and hasattr(os, u"minor"): |
1935 | ti.devmajor, ti.devminor = self.devnums |
1936 | else: |
1937 | # Currently we depend on an uninitiliazed tarinfo file to |
1938 | @@ -320,7 +320,7 @@ |
1939 | return ti |
1940 | |
1941 | def __eq__(self, other): |
1942 | - """Used to compare two ROPaths. Doesn't look at fileobjs""" |
1943 | + u"""Used to compare two ROPaths. Doesn't look at fileobjs""" |
1944 | if not self.type and not other.type: |
1945 | return 1 # neither exists |
1946 | if not self.stat and other.stat or not other.stat and self.stat: |
1947 | @@ -348,7 +348,7 @@ |
1948 | return not self.__eq__(other) |
1949 | |
1950 | def compare_verbose(self, other, include_data=0): |
1951 | - """Compare ROPaths like __eq__, but log reason if different |
1952 | + u"""Compare ROPaths like __eq__, but log reason if different |
1953 | |
1954 | This is placed in a separate function from __eq__ because |
1955 | __eq__ should be very time sensitive, and logging statements |
1956 | @@ -358,7 +358,7 @@ |
1957 | |
1958 | """ |
1959 | def log_diff(log_string): |
1960 | - log_str = _("Difference found:") + u" " + log_string |
1961 | + log_str = _(u"Difference found:") + u" " + log_string |
1962 | log.Notice(log_str % (util.fsdecode(self.get_relative_path()))) |
1963 | |
1964 | if include_data is False: |
1965 | @@ -367,24 +367,24 @@ |
1966 | if not self.type and not other.type: |
1967 | return 1 |
1968 | if not self.stat and other.stat: |
1969 | - log_diff(_("New file %s")) |
1970 | + log_diff(_(u"New file %s")) |
1971 | return 0 |
1972 | if not other.stat and self.stat: |
1973 | - log_diff(_("File %s is missing")) |
1974 | + log_diff(_(u"File %s is missing")) |
1975 | return 0 |
1976 | if self.type != other.type: |
1977 | - log_diff(_("File %%s has type %s, expected %s") % |
1978 | + log_diff(_(u"File %%s has type %s, expected %s") % |
1979 | (other.type, self.type)) |
1980 | return 0 |
1981 | |
1982 | if self.isreg() or self.isdir() or self.isfifo(): |
1983 | if not self.perms_equal(other): |
1984 | - log_diff(_("File %%s has permissions %s, expected %s") % |
1985 | + log_diff(_(u"File %%s has permissions %s, expected %s") % |
1986 | (other.getperms(), self.getperms())) |
1987 | return 0 |
1988 | if ((int(self.stat.st_mtime) != int(other.stat.st_mtime)) and |
1989 | (self.stat.st_mtime > 0 or other.stat.st_mtime > 0)): |
1990 | - log_diff(_("File %%s has mtime %s, expected %s") % |
1991 | + log_diff(_(u"File %%s has mtime %s, expected %s") % |
1992 | (dup_time.timetopretty(int(other.stat.st_mtime)), |
1993 | dup_time.timetopretty(int(self.stat.st_mtime)))) |
1994 | return 0 |
1995 | @@ -392,33 +392,33 @@ |
1996 | if self.compare_data(other): |
1997 | return 1 |
1998 | else: |
1999 | - log_diff(_("Data for file %s is different")) |
2000 | + log_diff(_(u"Data for file %s is different")) |
2001 | return 0 |
2002 | else: |
2003 | return 1 |
2004 | elif self.issym(): |
2005 | - if self.symtext == other.symtext or self.symtext + "/" == other.symtext: |
2006 | + if self.symtext == other.symtext or self.symtext + u"/" == other.symtext: |
2007 | return 1 |
2008 | else: |
2009 | - log_diff(_("Symlink %%s points to %s, expected %s") % |
2010 | + log_diff(_(u"Symlink %%s points to %s, expected %s") % |
2011 | (other.symtext, self.symtext)) |
2012 | return 0 |
2013 | elif self.isdev(): |
2014 | if not self.perms_equal(other): |
2015 | - log_diff(_("File %%s has permissions %s, expected %s") % |
2016 | + log_diff(_(u"File %%s has permissions %s, expected %s") % |
2017 | (other.getperms(), self.getperms())) |
2018 | return 0 |
2019 | if self.devnums != other.devnums: |
2020 | - log_diff(_("Device file %%s has numbers %s, expected %s") |
2021 | + log_diff(_(u"Device file %%s has numbers %s, expected %s") |
2022 | % (other.devnums, self.devnums)) |
2023 | return 0 |
2024 | return 1 |
2025 | assert 0 |
2026 | |
2027 | def compare_data(self, other): |
2028 | - """Compare data from two regular files, return true if same""" |
2029 | - f1 = self.open("rb") |
2030 | - f2 = other.open("rb") |
2031 | + u"""Compare data from two regular files, return true if same""" |
2032 | + f1 = self.open(u"rb") |
2033 | + f2 = other.open(u"rb") |
2034 | |
2035 | def close(): |
2036 | assert not f1.close() |
2037 | @@ -435,15 +435,15 @@ |
2038 | return 1 |
2039 | |
2040 | def perms_equal(self, other): |
2041 | - """True if self and other have same permissions and ownership""" |
2042 | + u"""True if self and other have same permissions and ownership""" |
2043 | s1, s2 = self.stat, other.stat |
2044 | return (self.mode == other.mode and |
2045 | s1.st_gid == s2.st_gid and s1.st_uid == s2.st_uid) |
2046 | |
2047 | def copy(self, other): |
2048 | - """Copy self to other. Also copies data. Other must be Path""" |
2049 | + u"""Copy self to other. Also copies data. Other must be Path""" |
2050 | if self.isreg(): |
2051 | - other.writefileobj(self.open("rb")) |
2052 | + other.writefileobj(self.open(u"rb")) |
2053 | elif self.isdir(): |
2054 | os.mkdir(other.name) |
2055 | elif self.issym(): |
2056 | @@ -456,15 +456,15 @@ |
2057 | elif self.issock(): |
2058 | socket.socket(socket.AF_UNIX).bind(other.name) |
2059 | elif self.isdev(): |
2060 | - if self.type == "chr": |
2061 | - devtype = "c" |
2062 | + if self.type == u"chr": |
2063 | + devtype = u"c" |
2064 | else: |
2065 | - devtype = "b" |
2066 | + devtype = u"b" |
2067 | other.makedev(devtype, *self.devnums) |
2068 | self.copy_attribs(other) |
2069 | |
2070 | def copy_attribs(self, other): |
2071 | - """Only copy attributes from self to other""" |
2072 | + u"""Only copy attributes from self to other""" |
2073 | if isinstance(other, Path): |
2074 | if self.stat is not None: |
2075 | util.maybe_ignore_errors(lambda: os.chown(other.name, self.stat.st_uid, self.stat.st_gid)) |
2076 | @@ -481,18 +481,18 @@ |
2077 | other.mode = self.mode |
2078 | |
2079 | def __unicode__(self): |
2080 | - """Return string representation""" |
2081 | + u"""Return string representation""" |
2082 | return u"(%s %s)" % (util.uindex(self.index), self.type) |
2083 | |
2084 | |
2085 | class Path(ROPath): |
2086 | - """ |
2087 | + u""" |
2088 | Path class - wrapper around ordinary local files |
2089 | |
2090 | Besides caching stat() results, this class organizes various file |
2091 | code. |
2092 | """ |
2093 | - regex_chars_to_quote = re.compile("[\\\\\\\"\\$`]") |
2094 | + regex_chars_to_quote = re.compile(u"[\\\\\\\"\\$`]") |
2095 | |
2096 | def rename_index(self, index): |
2097 | if not globals.rename or not index: |
2098 | @@ -508,7 +508,7 @@ |
2099 | return index # no rename found |
2100 | |
2101 | def __init__(self, base, index=()): |
2102 | - """Path initializer""" |
2103 | + u"""Path initializer""" |
2104 | # self.opened should be true if the file has been opened, and |
2105 | # self.fileobj can override returned fileobj |
2106 | self.opened, self.fileobj = None, None |
2107 | @@ -530,7 +530,7 @@ |
2108 | self.setdata() |
2109 | |
2110 | def setdata(self): |
2111 | - """Refresh stat cache""" |
2112 | + u"""Refresh stat cache""" |
2113 | try: |
2114 | # We may be asked to look at the target of symlinks rather than |
2115 | # the link itself. |
2116 | @@ -540,7 +540,7 @@ |
2117 | self.stat = os.lstat(self.name) |
2118 | except OSError as e: |
2119 | err_string = errno.errorcode[e[0]] |
2120 | - if err_string in ["ENOENT", "ENOTDIR", "ELOOP", "ENOTCONN"]: |
2121 | + if err_string in [u"ENOENT", u"ENOTDIR", u"ELOOP", u"ENOTCONN"]: |
2122 | self.stat, self.type = None, None # file doesn't exist |
2123 | self.mode = None |
2124 | else: |
2125 | @@ -551,23 +551,23 @@ |
2126 | self.symtext = os.readlink(self.name) |
2127 | |
2128 | def append(self, ext): |
2129 | - """Return new Path with ext added to index""" |
2130 | + u"""Return new Path with ext added to index""" |
2131 | return self.__class__(self.base, self.index + (ext,)) |
2132 | |
2133 | def new_index(self, index): |
2134 | - """Return new Path with index index""" |
2135 | + u"""Return new Path with index index""" |
2136 | return self.__class__(self.base, index) |
2137 | |
2138 | def listdir(self): |
2139 | - """Return list generated by os.listdir""" |
2140 | + u"""Return list generated by os.listdir""" |
2141 | return os.listdir(self.name) |
2142 | |
2143 | def isemptydir(self): |
2144 | - """Return true if path is a directory and is empty""" |
2145 | + u"""Return true if path is a directory and is empty""" |
2146 | return self.isdir() and not self.listdir() |
2147 | |
2148 | - def open(self, mode="rb"): |
2149 | - """ |
2150 | + def open(self, mode=u"rb"): |
2151 | + u""" |
2152 | Return fileobj associated with self |
2153 | |
2154 | Usually this is just the file data on disk, but can be |
2155 | @@ -581,25 +581,25 @@ |
2156 | return result |
2157 | |
2158 | def makedev(self, type, major, minor): |
2159 | - """Make a device file with specified type, major/minor nums""" |
2160 | - cmdlist = ['mknod', self.name, type, str(major), str(minor)] |
2161 | - if os.spawnvp(os.P_WAIT, 'mknod', cmdlist) != 0: |
2162 | - raise PathException("Error running %s" % cmdlist) |
2163 | + u"""Make a device file with specified type, major/minor nums""" |
2164 | + cmdlist = [u'mknod', self.name, type, str(major), str(minor)] |
2165 | + if os.spawnvp(os.P_WAIT, u'mknod', cmdlist) != 0: |
2166 | + raise PathException(u"Error running %s" % cmdlist) |
2167 | self.setdata() |
2168 | |
2169 | def mkdir(self): |
2170 | - """Make directory(s) at specified path""" |
2171 | - log.Info(_("Making directory %s") % self.uc_name) |
2172 | + u"""Make directory(s) at specified path""" |
2173 | + log.Info(_(u"Making directory %s") % self.uc_name) |
2174 | try: |
2175 | os.makedirs(self.name) |
2176 | except OSError: |
2177 | if (not globals.force): |
2178 | - raise PathException("Error creating directory %s" % self.uc_name, 7) |
2179 | + raise PathException(u"Error creating directory %s" % self.uc_name, 7) |
2180 | self.setdata() |
2181 | |
2182 | def delete(self): |
2183 | - """Remove this file""" |
2184 | - log.Info(_("Deleting %s") % self.uc_name) |
2185 | + u"""Remove this file""" |
2186 | + log.Info(_(u"Deleting %s") % self.uc_name) |
2187 | if self.isdir(): |
2188 | util.ignore_missing(os.rmdir, self.name) |
2189 | else: |
2190 | @@ -607,15 +607,15 @@ |
2191 | self.setdata() |
2192 | |
2193 | def touch(self): |
2194 | - """Open the file, write 0 bytes, close""" |
2195 | - log.Info(_("Touching %s") % self.uc_name) |
2196 | - fp = self.open("wb") |
2197 | + u"""Open the file, write 0 bytes, close""" |
2198 | + log.Info(_(u"Touching %s") % self.uc_name) |
2199 | + fp = self.open(u"wb") |
2200 | fp.close() |
2201 | |
2202 | def deltree(self): |
2203 | - """Remove self by recursively deleting files under it""" |
2204 | + u"""Remove self by recursively deleting files under it""" |
2205 | from duplicity import selection # todo: avoid circ. dep. issue |
2206 | - log.Info(_("Deleting tree %s") % self.uc_name) |
2207 | + log.Info(_(u"Deleting tree %s") % self.uc_name) |
2208 | itr = IterTreeReducer(PathDeleter, []) |
2209 | for path in selection.Select(self).set_iter(): |
2210 | itr(path.index, path) |
2211 | @@ -623,50 +623,50 @@ |
2212 | self.setdata() |
2213 | |
2214 | def get_parent_dir(self): |
2215 | - """Return directory that self is in""" |
2216 | + u"""Return directory that self is in""" |
2217 | if self.index: |
2218 | return Path(self.base, self.index[:-1]) |
2219 | else: |
2220 | - components = self.base.split("/") |
2221 | + components = self.base.split(u"/") |
2222 | if len(components) == 2 and not components[0]: |
2223 | - return Path("/") # already in root directory |
2224 | + return Path(u"/") # already in root directory |
2225 | else: |
2226 | - return Path("/".join(components[:-1])) |
2227 | + return Path(u"/".join(components[:-1])) |
2228 | |
2229 | def writefileobj(self, fin): |
2230 | - """Copy file object fin to self. Close both when done.""" |
2231 | - fout = self.open("wb") |
2232 | + u"""Copy file object fin to self. Close both when done.""" |
2233 | + fout = self.open(u"wb") |
2234 | while 1: |
2235 | buf = fin.read(_copy_blocksize) |
2236 | if not buf: |
2237 | break |
2238 | fout.write(buf) |
2239 | if fin.close() or fout.close(): |
2240 | - raise PathException("Error closing file object") |
2241 | + raise PathException(u"Error closing file object") |
2242 | self.setdata() |
2243 | |
2244 | def rename(self, new_path): |
2245 | - """Rename file at current path to new_path.""" |
2246 | + u"""Rename file at current path to new_path.""" |
2247 | shutil.move(self.name, new_path.name) |
2248 | self.setdata() |
2249 | new_path.setdata() |
2250 | |
2251 | def move(self, new_path): |
2252 | - """Like rename but destination may be on different file system""" |
2253 | + u"""Like rename but destination may be on different file system""" |
2254 | self.copy(new_path) |
2255 | self.delete() |
2256 | |
2257 | def chmod(self, mode): |
2258 | - """Change permissions of the path""" |
2259 | + u"""Change permissions of the path""" |
2260 | os.chmod(self.name, mode) |
2261 | self.setdata() |
2262 | |
2263 | def patch_with_attribs(self, diff_ropath): |
2264 | - """Patch self with diff and then copy attributes over""" |
2265 | + u"""Patch self with diff and then copy attributes over""" |
2266 | assert self.isreg() and diff_ropath.isreg() |
2267 | temp_path = self.get_temp_in_same_dir() |
2268 | - fbase = self.open("rb") |
2269 | - fdiff = diff_ropath.open("rb") |
2270 | + fbase = self.open(u"rb") |
2271 | + fdiff = diff_ropath.open(u"rb") |
2272 | patch_fileobj = librsync.PatchedFile(fbase, fdiff) |
2273 | temp_path.writefileobj(patch_fileobj) |
2274 | assert not fbase.close() |
2275 | @@ -675,11 +675,11 @@ |
2276 | temp_path.rename(self) |
2277 | |
2278 | def get_temp_in_same_dir(self): |
2279 | - """Return temp non existent path in same directory as self""" |
2280 | + u"""Return temp non existent path in same directory as self""" |
2281 | global _tmp_path_counter |
2282 | parent_dir = self.get_parent_dir() |
2283 | while 1: |
2284 | - temp_path = parent_dir.append("duplicity_temp." + |
2285 | + temp_path = parent_dir.append(u"duplicity_temp." + |
2286 | str(_tmp_path_counter)) |
2287 | if not temp_path.type: |
2288 | return temp_path |
2289 | @@ -688,18 +688,18 @@ |
2290 | u"Warning too many temp files created for " + self.uc_name |
2291 | |
2292 | def compare_recursive(self, other, verbose=None): |
2293 | - """Compare self to other Path, descending down directories""" |
2294 | + u"""Compare self to other Path, descending down directories""" |
2295 | from duplicity import selection # todo: avoid circ. dep. issue |
2296 | selfsel = selection.Select(self).set_iter() |
2297 | othersel = selection.Select(other).set_iter() |
2298 | return Iter.equal(selfsel, othersel, verbose) |
2299 | |
2300 | def __repr__(self): |
2301 | - """Return string representation""" |
2302 | - return "(%s %s %s)" % (self.index, self.name, self.type) |
2303 | + u"""Return string representation""" |
2304 | + return u"(%s %s %s)" % (self.index, self.name, self.type) |
2305 | |
2306 | def quote(self, s=None): |
2307 | - """ |
2308 | + u""" |
2309 | Return quoted version of s (defaults to self.name) |
2310 | |
2311 | The output is meant to be interpreted with shells, so can be |
2312 | @@ -707,15 +707,15 @@ |
2313 | """ |
2314 | if not s: |
2315 | s = self.name |
2316 | - return '"%s"' % self.regex_chars_to_quote.sub(lambda m: "\\" + m.group(0), s) |
2317 | + return u'"%s"' % self.regex_chars_to_quote.sub(lambda m: u"\\" + m.group(0), s) |
2318 | |
2319 | def unquote(self, s): |
2320 | - """Return unquoted version of string s, as quoted by above quote()""" |
2321 | - assert s[0] == s[-1] == "\"" # string must be quoted by above |
2322 | - result = "" |
2323 | + u"""Return unquoted version of string s, as quoted by above quote()""" |
2324 | + assert s[0] == s[-1] == u"\"" # string must be quoted by above |
2325 | + result = u"" |
2326 | i = 1 |
2327 | while i < len(s) - 1: |
2328 | - if s[i] == "\\": |
2329 | + if s[i] == u"\\": |
2330 | result += s[i + 1] |
2331 | i += 2 |
2332 | else: |
2333 | @@ -724,38 +724,38 @@ |
2334 | return result |
2335 | |
2336 | def get_filename(self): |
2337 | - """Return filename of last component""" |
2338 | - components = self.name.split("/") |
2339 | + u"""Return filename of last component""" |
2340 | + components = self.name.split(u"/") |
2341 | assert components and components[-1] |
2342 | return components[-1] |
2343 | |
2344 | def get_canonical(self): |
2345 | - """ |
2346 | + u""" |
2347 | Return string of canonical version of path |
2348 | |
2349 | Remove ".", and trailing slashes where possible. Note that |
2350 | it's harder to remove "..", as "foo/bar/.." is not necessarily |
2351 | "foo", so we can't use path.normpath() |
2352 | """ |
2353 | - newpath = "/".join(filter(lambda x: x and x != ".", |
2354 | - self.name.split("/"))) |
2355 | - if self.name[0] == "/": |
2356 | - return "/" + newpath |
2357 | + newpath = u"/".join(filter(lambda x: x and x != u".", |
2358 | + self.name.split(u"/"))) |
2359 | + if self.name[0] == u"/": |
2360 | + return u"/" + newpath |
2361 | elif newpath: |
2362 | return newpath |
2363 | else: |
2364 | - return "." |
2365 | + return u"." |
2366 | |
2367 | |
2368 | class DupPath(Path): |
2369 | - """ |
2370 | + u""" |
2371 | Represent duplicity data files |
2372 | |
2373 | Based on the file name, files that are compressed or encrypted |
2374 | will have different open() methods. |
2375 | """ |
2376 | def __init__(self, base, index=(), parseresults=None): |
2377 | - """ |
2378 | + u""" |
2379 | DupPath initializer |
2380 | |
2381 | The actual filename (no directory) must be the single element |
2382 | @@ -767,12 +767,12 @@ |
2383 | else: |
2384 | assert len(index) == 1 |
2385 | self.pr = file_naming.parse(index[0]) |
2386 | - assert self.pr, "must be a recognizable duplicity file" |
2387 | + assert self.pr, u"must be a recognizable duplicity file" |
2388 | |
2389 | Path.__init__(self, base, index) |
2390 | |
2391 | - def filtered_open(self, mode="rb", gpg_profile=None): |
2392 | - """ |
2393 | + def filtered_open(self, mode=u"rb", gpg_profile=None): |
2394 | + u""" |
2395 | Return fileobj with appropriate encryption/compression |
2396 | |
2397 | If encryption is specified but no gpg_profile, use |
2398 | @@ -788,16 +788,16 @@ |
2399 | elif self.pr.encrypted: |
2400 | if not gpg_profile: |
2401 | gpg_profile = globals.gpg_profile |
2402 | - if mode == "rb": |
2403 | + if mode == u"rb": |
2404 | return gpg.GPGFile(False, self, gpg_profile) |
2405 | - elif mode == "wb": |
2406 | + elif mode == u"wb": |
2407 | return gpg.GPGFile(True, self, gpg_profile) |
2408 | else: |
2409 | return self.open(mode) |
2410 | |
2411 | |
2412 | class PathDeleter(ITRBranch): |
2413 | - """Delete a directory. Called by Path.deltree""" |
2414 | + u"""Delete a directory. Called by Path.deltree""" |
2415 | def start_process(self, index, path): |
2416 | self.path = path |
2417 | |
2418 | |
2419 | === modified file 'duplicity/statistics.py' |
2420 | --- duplicity/statistics.py 2018-07-24 11:52:33 +0000 |
2421 | +++ duplicity/statistics.py 2018-09-24 21:19:45 +0000 |
2422 | @@ -19,7 +19,7 @@ |
2423 | # along with duplicity; if not, write to the Free Software Foundation, |
2424 | # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA |
2425 | |
2426 | -"""Generate and process backup statistics""" |
2427 | +u"""Generate and process backup statistics""" |
2428 | |
2429 | from future_builtins import map |
2430 | |
2431 | @@ -35,66 +35,66 @@ |
2432 | |
2433 | |
2434 | class StatsObj: |
2435 | - """Contains various statistics, provide string conversion functions""" |
2436 | + u"""Contains various statistics, provide string conversion functions""" |
2437 | # used when quoting files in get_stats_line |
2438 | - space_regex = re.compile(" ") |
2439 | + space_regex = re.compile(u" ") |
2440 | |
2441 | - stat_file_attrs = ('SourceFiles', |
2442 | - 'SourceFileSize', |
2443 | - 'NewFiles', |
2444 | - 'NewFileSize', |
2445 | - 'DeletedFiles', |
2446 | - 'ChangedFiles', |
2447 | - 'ChangedFileSize', |
2448 | - 'ChangedDeltaSize', |
2449 | - 'DeltaEntries', |
2450 | - 'RawDeltaSize') |
2451 | - stat_misc_attrs = ('Errors', |
2452 | - 'TotalDestinationSizeChange') |
2453 | - stat_time_attrs = ('StartTime', |
2454 | - 'EndTime', |
2455 | - 'ElapsedTime') |
2456 | - stat_attrs = (('Filename',) + stat_time_attrs + |
2457 | + stat_file_attrs = (u'SourceFiles', |
2458 | + u'SourceFileSize', |
2459 | + u'NewFiles', |
2460 | + u'NewFileSize', |
2461 | + u'DeletedFiles', |
2462 | + u'ChangedFiles', |
2463 | + u'ChangedFileSize', |
2464 | + u'ChangedDeltaSize', |
2465 | + u'DeltaEntries', |
2466 | + u'RawDeltaSize') |
2467 | + stat_misc_attrs = (u'Errors', |
2468 | + u'TotalDestinationSizeChange') |
2469 | + stat_time_attrs = (u'StartTime', |
2470 | + u'EndTime', |
2471 | + u'ElapsedTime') |
2472 | + stat_attrs = ((u'Filename',) + stat_time_attrs + |
2473 | stat_misc_attrs + stat_file_attrs) |
2474 | |
2475 | # Below, the second value in each pair is true iff the value |
2476 | # indicates a number of bytes |
2477 | - stat_file_pairs = (('SourceFiles', False), |
2478 | - ('SourceFileSize', True), |
2479 | - ('NewFiles', False), |
2480 | - ('NewFileSize', True), |
2481 | - ('DeletedFiles', False), |
2482 | - ('ChangedFiles', False), |
2483 | - ('ChangedFileSize', True), |
2484 | - ('ChangedDeltaSize', True), |
2485 | - ('DeltaEntries', False), |
2486 | - ('RawDeltaSize', True)) |
2487 | + stat_file_pairs = ((u'SourceFiles', False), |
2488 | + (u'SourceFileSize', True), |
2489 | + (u'NewFiles', False), |
2490 | + (u'NewFileSize', True), |
2491 | + (u'DeletedFiles', False), |
2492 | + (u'ChangedFiles', False), |
2493 | + (u'ChangedFileSize', True), |
2494 | + (u'ChangedDeltaSize', True), |
2495 | + (u'DeltaEntries', False), |
2496 | + (u'RawDeltaSize', True)) |
2497 | |
2498 | # This is used in get_byte_summary_string below |
2499 | - byte_abbrev_list = ((1024 * 1024 * 1024 * 1024, "TB"), |
2500 | - (1024 * 1024 * 1024, "GB"), |
2501 | - (1024 * 1024, "MB"), |
2502 | - (1024, "KB")) |
2503 | + byte_abbrev_list = ((1024 * 1024 * 1024 * 1024, u"TB"), |
2504 | + (1024 * 1024 * 1024, u"GB"), |
2505 | + (1024 * 1024, u"MB"), |
2506 | + (1024, u"KB")) |
2507 | |
2508 | def __init__(self): |
2509 | - """Set attributes to None""" |
2510 | + u"""Set attributes to None""" |
2511 | for attr in self.stat_attrs: |
2512 | self.__dict__[attr] = None |
2513 | |
2514 | def get_stat(self, attribute): |
2515 | - """Get a statistic""" |
2516 | + u"""Get a statistic""" |
2517 | return self.__dict__[attribute] |
2518 | |
2519 | def set_stat(self, attr, value): |
2520 | - """Set attribute to given value""" |
2521 | + u"""Set attribute to given value""" |
2522 | self.__dict__[attr] = value |
2523 | |
2524 | def increment_stat(self, attr): |
2525 | - """Add 1 to value of attribute""" |
2526 | + u"""Add 1 to value of attribute""" |
2527 | self.__dict__[attr] += 1 |
2528 | |
2529 | def get_total_dest_size_change(self): |
2530 | - """Return total destination size change |
2531 | + u"""Return total destination size change |
2532 | |
2533 | This represents the total increase in the size of the |
2534 | duplicity destination directory, or None if not available. |
2535 | @@ -103,25 +103,25 @@ |
2536 | return 0 # this needs to be re-done for duplicity |
2537 | |
2538 | def get_stats_line(self, index, use_repr=1): |
2539 | - """Return one line abbreviated version of full stats string""" |
2540 | + u"""Return one line abbreviated version of full stats string""" |
2541 | file_attrs = [str(self.get_stat(a)) for a in self.stat_file_attrs] |
2542 | if not index: |
2543 | - filename = "." |
2544 | + filename = u"." |
2545 | else: |
2546 | filename = os.path.join(*index) |
2547 | if use_repr: |
2548 | # use repr to quote newlines in relative filename, then |
2549 | # take of leading and trailing quote and quote spaces. |
2550 | - filename = self.space_regex.sub("\\x20", repr(filename)[1:-1]) |
2551 | - return " ".join([filename, ] + file_attrs) |
2552 | + filename = self.space_regex.sub(u"\\x20", repr(filename)[1:-1]) |
2553 | + return u" ".join([filename, ] + file_attrs) |
2554 | |
2555 | def set_stats_from_line(self, line): |
2556 | - """Set statistics from given line""" |
2557 | + u"""Set statistics from given line""" |
2558 | def error(): |
2559 | - raise StatsException("Bad line '%s'" % line) |
2560 | - if line[-1] == "\n": |
2561 | + raise StatsException(u"Bad line '%s'" % line) |
2562 | + if line[-1] == u"\n": |
2563 | line = line[:-1] |
2564 | - lineparts = line.split(" ") |
2565 | + lineparts = line.split(u" ") |
2566 | if len(lineparts) < len(self.stat_file_attrs): |
2567 | error() |
2568 | for attr, val_string in zip(self.stat_file_attrs, |
2569 | @@ -137,62 +137,62 @@ |
2570 | return self |
2571 | |
2572 | def get_stats_string(self): |
2573 | - """Return extended string printing out statistics""" |
2574 | - return "%s%s%s" % (self.get_timestats_string(), |
2575 | - self.get_filestats_string(), |
2576 | - self.get_miscstats_string()) |
2577 | + u"""Return extended string printing out statistics""" |
2578 | + return u"%s%s%s" % (self.get_timestats_string(), |
2579 | + self.get_filestats_string(), |
2580 | + self.get_miscstats_string()) |
2581 | |
2582 | def get_timestats_string(self): |
2583 | - """Return portion of statistics string dealing with time""" |
2584 | + u"""Return portion of statistics string dealing with time""" |
2585 | timelist = [] |
2586 | if self.StartTime is not None: |
2587 | - timelist.append("StartTime %.2f (%s)\n" % |
2588 | + timelist.append(u"StartTime %.2f (%s)\n" % |
2589 | (self.StartTime, dup_time.timetopretty(self.StartTime))) |
2590 | if self.EndTime is not None: |
2591 | - timelist.append("EndTime %.2f (%s)\n" % |
2592 | + timelist.append(u"EndTime %.2f (%s)\n" % |
2593 | (self.EndTime, dup_time.timetopretty(self.EndTime))) |
2594 | if self.ElapsedTime or (self.StartTime is not None and |
2595 | self.EndTime is not None): |
2596 | if self.ElapsedTime is None: |
2597 | self.ElapsedTime = self.EndTime - self.StartTime |
2598 | - timelist.append("ElapsedTime %.2f (%s)\n" % |
2599 | + timelist.append(u"ElapsedTime %.2f (%s)\n" % |
2600 | (self.ElapsedTime, dup_time.inttopretty(self.ElapsedTime))) |
2601 | - return "".join(timelist) |
2602 | + return u"".join(timelist) |
2603 | |
2604 | def get_filestats_string(self): |
2605 | - """Return portion of statistics string about files and bytes""" |
2606 | + u"""Return portion of statistics string about files and bytes""" |
2607 | def fileline(stat_file_pair): |
2608 | - """Return zero or one line of the string""" |
2609 | + u"""Return zero or one line of the string""" |
2610 | attr, in_bytes = stat_file_pair |
2611 | val = self.get_stat(attr) |
2612 | if val is None: |
2613 | - return "" |
2614 | + return u"" |
2615 | if in_bytes: |
2616 | - return "%s %s (%s)\n" % (attr, val, |
2617 | - self.get_byte_summary_string(val)) |
2618 | + return u"%s %s (%s)\n" % (attr, val, |
2619 | + self.get_byte_summary_string(val)) |
2620 | else: |
2621 | - return "%s %s\n" % (attr, val) |
2622 | + return u"%s %s\n" % (attr, val) |
2623 | |
2624 | - return "".join(map(fileline, self.stat_file_pairs)) |
2625 | + return u"".join(map(fileline, self.stat_file_pairs)) |
2626 | |
2627 | def get_miscstats_string(self): |
2628 | - """Return portion of extended stat string about misc attributes""" |
2629 | - misc_string = "" |
2630 | + u"""Return portion of extended stat string about misc attributes""" |
2631 | + misc_string = u"" |
2632 | tdsc = self.TotalDestinationSizeChange |
2633 | if tdsc is not None: |
2634 | - misc_string += ("TotalDestinationSizeChange %s (%s)\n" % |
2635 | + misc_string += (u"TotalDestinationSizeChange %s (%s)\n" % |
2636 | (tdsc, self.get_byte_summary_string(tdsc))) |
2637 | if self.Errors is not None: |
2638 | - misc_string += "Errors %d\n" % self.Errors |
2639 | + misc_string += u"Errors %d\n" % self.Errors |
2640 | return misc_string |
2641 | |
2642 | def get_byte_summary_string(self, byte_count): |
2643 | - """Turn byte count into human readable string like "7.23GB" """ |
2644 | + u"""Turn byte count into human readable string like "7.23GB" """ |
2645 | if byte_count < 0: |
2646 | - sign = "-" |
2647 | + sign = u"-" |
2648 | byte_count = -byte_count |
2649 | else: |
2650 | - sign = "" |
2651 | + sign = u"" |
2652 | |
2653 | for abbrev_bytes, abbrev_string in self.byte_abbrev_list: |
2654 | if byte_count >= abbrev_bytes: |
2655 | @@ -204,26 +204,26 @@ |
2656 | precision = 1 |
2657 | else: |
2658 | precision = 2 |
2659 | - return "%s%%.%df %s" % (sign, precision, abbrev_string) \ |
2660 | + return u"%s%%.%df %s" % (sign, precision, abbrev_string) \ |
2661 | % (abbrev_count,) |
2662 | byte_count = round(byte_count) |
2663 | if byte_count == 1: |
2664 | - return sign + "1 byte" |
2665 | + return sign + u"1 byte" |
2666 | else: |
2667 | - return "%s%d bytes" % (sign, byte_count) |
2668 | + return u"%s%d bytes" % (sign, byte_count) |
2669 | |
2670 | def get_stats_logstring(self, title): |
2671 | - """Like get_stats_string, but add header and footer""" |
2672 | - header = "--------------[ %s ]--------------" % title |
2673 | - footer = "-" * len(header) |
2674 | - return "%s\n%s%s\n" % (header, self.get_stats_string(), footer) |
2675 | + u"""Like get_stats_string, but add header and footer""" |
2676 | + header = u"--------------[ %s ]--------------" % title |
2677 | + footer = u"-" * len(header) |
2678 | + return u"%s\n%s%s\n" % (header, self.get_stats_string(), footer) |
2679 | |
2680 | def set_stats_from_string(self, s): |
2681 | - """Initialize attributes from string, return self for convenience""" |
2682 | + u"""Initialize attributes from string, return self for convenience""" |
2683 | def error(line): |
2684 | - raise StatsException("Bad line '%s'" % line) |
2685 | + raise StatsException(u"Bad line '%s'" % line) |
2686 | |
2687 | - for line in s.split("\n"): |
2688 | + for line in s.split(u"\n"): |
2689 | if not line: |
2690 | continue |
2691 | line_parts = line.split() |
2692 | @@ -247,20 +247,20 @@ |
2693 | return self |
2694 | |
2695 | def write_stats_to_path(self, path): |
2696 | - """Write statistics string to given path""" |
2697 | - fin = path.open("w") |
2698 | + u"""Write statistics string to given path""" |
2699 | + fin = path.open(u"w") |
2700 | fin.write(self.get_stats_string()) |
2701 | assert not fin.close() |
2702 | |
2703 | def read_stats_from_path(self, path): |
2704 | - """Set statistics from path, return self for convenience""" |
2705 | - fp = path.open("r") |
2706 | + u"""Set statistics from path, return self for convenience""" |
2707 | + fp = path.open(u"r") |
2708 | self.set_stats_from_string(fp.read()) |
2709 | assert not fp.close() |
2710 | return self |
2711 | |
2712 | def stats_equal(self, s): |
2713 | - """Return true if s has same statistics as self""" |
2714 | + u"""Return true if s has same statistics as self""" |
2715 | assert isinstance(s, StatsObj) |
2716 | for attr in self.stat_file_attrs: |
2717 | if self.get_stat(attr) != s.get_stat(attr): |
2718 | @@ -268,7 +268,7 @@ |
2719 | return 1 |
2720 | |
2721 | def set_to_average(self, statobj_list): |
2722 | - """Set self's attributes to average of those in statobj_list""" |
2723 | + u"""Set self's attributes to average of those in statobj_list""" |
2724 | for attr in self.stat_attrs: |
2725 | self.set_stat(attr, 0) |
2726 | for statobj in statobj_list: |
2727 | @@ -290,7 +290,7 @@ |
2728 | return self |
2729 | |
2730 | def get_statsobj_copy(self): |
2731 | - """Return new StatsObj object with same stats as self""" |
2732 | + u"""Return new StatsObj object with same stats as self""" |
2733 | s = StatsObj() |
2734 | for attr in self.stat_attrs: |
2735 | s.set_stat(attr, self.get_stat(attr)) |
2736 | @@ -298,9 +298,9 @@ |
2737 | |
2738 | |
2739 | class StatsDeltaProcess(StatsObj): |
2740 | - """Keep track of statistics during DirDelta process""" |
2741 | + u"""Keep track of statistics during DirDelta process""" |
2742 | def __init__(self): |
2743 | - """StatsDeltaProcess initializer - zero file attributes""" |
2744 | + u"""StatsDeltaProcess initializer - zero file attributes""" |
2745 | StatsObj.__init__(self) |
2746 | for attr in StatsObj.stat_file_attrs: |
2747 | self.__dict__[attr] = 0 |
2748 | @@ -309,39 +309,39 @@ |
2749 | self.files_changed = [] |
2750 | |
2751 | def add_new_file(self, path): |
2752 | - """Add stats of new file path to statistics""" |
2753 | + u"""Add stats of new file path to statistics""" |
2754 | filesize = path.getsize() |
2755 | self.SourceFiles += 1 |
2756 | # SourceFileSize is added-to incrementally as read |
2757 | self.NewFiles += 1 |
2758 | self.NewFileSize += filesize |
2759 | self.DeltaEntries += 1 |
2760 | - self.add_delta_entries_file(path, 'new') |
2761 | + self.add_delta_entries_file(path, b'new') |
2762 | |
2763 | def add_changed_file(self, path): |
2764 | - """Add stats of file that has changed since last backup""" |
2765 | + u"""Add stats of file that has changed since last backup""" |
2766 | filesize = path.getsize() |
2767 | self.SourceFiles += 1 |
2768 | # SourceFileSize is added-to incrementally as read |
2769 | self.ChangedFiles += 1 |
2770 | self.ChangedFileSize += filesize |
2771 | self.DeltaEntries += 1 |
2772 | - self.add_delta_entries_file(path, 'changed') |
2773 | + self.add_delta_entries_file(path, b'changed') |
2774 | |
2775 | def add_deleted_file(self, path): |
2776 | - """Add stats of file no longer in source directory""" |
2777 | + u"""Add stats of file no longer in source directory""" |
2778 | self.DeletedFiles += 1 # can't add size since not available |
2779 | self.DeltaEntries += 1 |
2780 | - self.add_delta_entries_file(path, 'deleted') |
2781 | + self.add_delta_entries_file(path, b'deleted') |
2782 | |
2783 | def add_unchanged_file(self, path): |
2784 | - """Add stats of file that hasn't changed since last backup""" |
2785 | + u"""Add stats of file that hasn't changed since last backup""" |
2786 | filesize = path.getsize() |
2787 | self.SourceFiles += 1 |
2788 | self.SourceFileSize += filesize |
2789 | |
2790 | def close(self): |
2791 | - """End collection of data, set EndTime""" |
2792 | + u"""End collection of data, set EndTime""" |
2793 | self.EndTime = time.time() |
2794 | |
2795 | def add_delta_entries_file(self, path, action_type): |
2796 | |
2797 | === modified file 'duplicity/util.py' |
2798 | --- duplicity/util.py 2018-07-24 11:52:33 +0000 |
2799 | +++ duplicity/util.py 2018-09-24 21:19:45 +0000 |
2800 | @@ -19,7 +19,7 @@ |
2801 | # along with duplicity; if not, write to the Free Software Foundation, |
2802 | # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA |
2803 | |
2804 | -""" |
2805 | +u""" |
2806 | Miscellaneous utilities. |
2807 | """ |
2808 | |
2809 | @@ -51,23 +51,23 @@ |
2810 | # ToDo: Revisit this once we drop Python 2 support/the backport is complete |
2811 | |
2812 | def fsencode(unicode_filename): |
2813 | - """Convert a unicode filename to a filename encoded in the system encoding""" |
2814 | + u"""Convert a unicode filename to a filename encoded in the system encoding""" |
2815 | # For paths, just use path.name rather than converting with this |
2816 | # If we are not doing any cleverness with non-unicode filename bytes, |
2817 | # encoding to system encoding is good enough |
2818 | - return unicode_filename.encode(sys.getfilesystemencoding(), "replace") |
2819 | + return unicode_filename.encode(sys.getfilesystemencoding(), u"replace") |
2820 | |
2821 | def fsdecode(bytes_filename): |
2822 | - """Convert a filename encoded in the system encoding to unicode""" |
2823 | + u"""Convert a filename encoded in the system encoding to unicode""" |
2824 | # For paths, just use path.uc_name rather than converting with this |
2825 | # If we are not doing any cleverness with non-unicode filename bytes, |
2826 | # decoding using system encoding is good enough. Use "ignore" as |
2827 | # Linux paths can contain non-Unicode characters |
2828 | - return bytes_filename.decode(globals.fsencoding, "replace") |
2829 | + return bytes_filename.decode(globals.fsencoding, u"replace") |
2830 | |
2831 | |
2832 | def exception_traceback(limit=50): |
2833 | - """ |
2834 | + u""" |
2835 | @return A string representation in typical Python format of the |
2836 | currently active/raised exception. |
2837 | """ |
2838 | @@ -76,20 +76,20 @@ |
2839 | lines = traceback.format_tb(tb, limit) |
2840 | lines.extend(traceback.format_exception_only(type, value)) |
2841 | |
2842 | - msg = "Traceback (innermost last):\n" |
2843 | - msg = msg + "%-20s %s" % (string.join(lines[:-1], ""), lines[-1]) |
2844 | + msg = u"Traceback (innermost last):\n" |
2845 | + msg = msg + u"%-20s %s" % (string.join(lines[:-1], u""), lines[-1]) |
2846 | |
2847 | - return msg.decode('unicode-escape', 'replace') |
2848 | + return msg.decode(u'unicode-escape', u'replace') |
2849 | |
2850 | |
2851 | def escape(string): |
2852 | - "Convert a (bytes) filename to a format suitable for logging (quoted utf8)" |
2853 | - string = fsdecode(string).encode('unicode-escape', 'replace') |
2854 | - return u"'%s'" % string.decode('utf8', 'replace') |
2855 | + u"Convert a (bytes) filename to a format suitable for logging (quoted utf8)" |
2856 | + string = fsdecode(string).encode(u'unicode-escape', u'replace') |
2857 | + return u"'%s'" % string.decode(u'utf8', u'replace') |
2858 | |
2859 | |
2860 | def uindex(index): |
2861 | - "Convert an index (a tuple of path parts) to unicode for printing" |
2862 | + u"Convert an index (a tuple of path parts) to unicode for printing" |
2863 | if index: |
2864 | return os.path.join(*list(map(fsdecode, index))) |
2865 | else: |
2866 | @@ -101,11 +101,11 @@ |
2867 | # non-ascii will cause a UnicodeDecodeError when implicitly decoding to |
2868 | # unicode. So we decode manually, using the filesystem encoding. |
2869 | # 99.99% of the time, this will be a fine encoding to use. |
2870 | - return fsdecode(unicode(e).encode('utf-8')) |
2871 | + return fsdecode(unicode(e).encode(u'utf-8')) |
2872 | |
2873 | |
2874 | def maybe_ignore_errors(fn): |
2875 | - """ |
2876 | + u""" |
2877 | Execute fn. If the global configuration setting ignore_errors is |
2878 | set to True, catch errors and log them but do continue (and return |
2879 | None). |
2880 | @@ -117,7 +117,7 @@ |
2881 | return fn() |
2882 | except Exception as e: |
2883 | if globals.ignore_errors: |
2884 | - log.Warn(_("IGNORED_ERROR: Warning: ignoring error as requested: %s: %s") |
2885 | + log.Warn(_(u"IGNORED_ERROR: Warning: ignoring error as requested: %s: %s") |
2886 | % (e.__class__.__name__, uexc(e))) |
2887 | return None |
2888 | else: |
2889 | @@ -145,7 +145,7 @@ |
2890 | # yet. So we want to ignore ReadError exceptions, which are used to signal |
2891 | # this. |
2892 | try: |
2893 | - tf = tarfile.TarFile("arbitrary", mode, fp) |
2894 | + tf = tarfile.TarFile(u"arbitrary", mode, fp) |
2895 | # Now we cause TarFile to not cache TarInfo objects. It would end up |
2896 | # consuming a lot of memory over the lifetime of our long-lasting |
2897 | # signature files otherwise. |
2898 | @@ -159,14 +159,14 @@ |
2899 | # Python versions before 2.6 ensure that directories end with /, but 2.6 |
2900 | # and later ensure they they *don't* have /. ::shrug:: Internally, we |
2901 | # continue to use pre-2.6 method. |
2902 | - if ti.isdir() and not ti.name.endswith("/"): |
2903 | - return ti.name + "/" |
2904 | + if ti.isdir() and not ti.name.endswith(b"/"): |
2905 | + return ti.name + b"/" |
2906 | else: |
2907 | return ti.name |
2908 | |
2909 | |
2910 | def ignore_missing(fn, filename): |
2911 | - """ |
2912 | + u""" |
2913 | Execute fn on filename. Ignore ENOENT errors, otherwise raise exception. |
2914 | |
2915 | @param fn: callable |
2916 | @@ -184,7 +184,7 @@ |
2917 | @atexit.register |
2918 | def release_lockfile(): |
2919 | if globals.lockfile: |
2920 | - log.Debug(_("Releasing lockfile %s") % globals.lockpath) |
2921 | + log.Debug(_(u"Releasing lockfile %s") % globals.lockpath) |
2922 | try: |
2923 | globals.lockfile.release() |
2924 | except Exception: |
2925 | @@ -192,7 +192,7 @@ |
2926 | |
2927 | |
2928 | def copyfileobj(infp, outfp, byte_count=-1): |
2929 | - """Copy byte_count bytes from infp to outfp, or all if byte_count < 0 |
2930 | + u"""Copy byte_count bytes from infp to outfp, or all if byte_count < 0 |
2931 | |
2932 | Returns the number of bytes actually written (may be less than |
2933 | byte_count if find eof. Does not close either fileobj. |
2934 | @@ -221,7 +221,7 @@ |
2935 | |
2936 | |
2937 | def which(program): |
2938 | - """ |
2939 | + u""" |
2940 | Return absolute path for program name. |
2941 | Returns None if program not found. |
2942 | """ |
2943 | @@ -234,8 +234,8 @@ |
2944 | if is_exe(program): |
2945 | return program |
2946 | else: |
2947 | - for path in os.getenv("PATH").split(os.pathsep): |
2948 | - path = path.strip('"') |
2949 | + for path in os.getenv(u"PATH").split(os.pathsep): |
2950 | + path = path.strip(u'"') |
2951 | exe_file = os.path.abspath(os.path.join(path, program)) |
2952 | if is_exe(exe_file): |
2953 | return exe_file |
2954 | |
2955 | === modified file 'po/duplicity.pot' |
2956 | --- po/duplicity.pot 2018-09-17 21:03:06 +0000 |
2957 | +++ po/duplicity.pot 2018-09-24 21:19:45 +0000 |
2958 | @@ -8,7 +8,7 @@ |
2959 | msgstr "" |
2960 | "Project-Id-Version: PACKAGE VERSION\n" |
2961 | "Report-Msgid-Bugs-To: Kenneth Loafman <kenneth@loafman.com>\n" |
2962 | -"POT-Creation-Date: 2018-09-17 15:38-0500\n" |
2963 | +"POT-Creation-Date: 2018-09-24 11:46-0500\n" |
2964 | "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" |
2965 | "Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" |
2966 | "Language-Team: LANGUAGE <LL@li.org>\n" |
2967 | |
2968 | === modified file 'testing/test_code.py' |
2969 | --- testing/test_code.py 2018-09-17 21:03:06 +0000 |
2970 | +++ testing/test_code.py 2018-09-24 21:19:45 +0000 |
2971 | @@ -113,12 +113,6 @@ |
2972 | os.path.join(_top_dir, u'setup.py'), |
2973 | os.path.join(_top_dir, u'duplicity', u'__init__.py'), |
2974 | os.path.join(_top_dir, u'duplicity', u'compilec.py'), |
2975 | - os.path.join(_top_dir, u'duplicity', u'diffdir.py'), |
2976 | - os.path.join(_top_dir, u'duplicity', u'manifest.py'), |
2977 | - os.path.join(_top_dir, u'duplicity', u'patchdir.py'), |
2978 | - os.path.join(_top_dir, u'duplicity', u'path.py'), |
2979 | - os.path.join(_top_dir, u'duplicity', u'statistics.py'), |
2980 | - os.path.join(_top_dir, u'duplicity', u'util.py'), |
2981 | os.path.join(_top_dir, u'testing', u'overrides', u'gettext.py'), |
2982 | os.path.join(_top_dir, u'testing', u'test_unadorned.py'), |
2983 | os.path.join(_top_dir, u'testing', u'unit', u'test_statistics.py'), |
Taking into account the comment related to util.fsencode(), I think that tarinfo names need to be bytes, since they can be used to generate paths, and manifest strings also need to be bytes, since they can be read back in to generate a manifest. At the same time, I'm not sure if a __str__ function should be returning bytes, although I'm not seeing a good solution.