Merge lp:~mgorse/duplicity/0.8-series into lp:~duplicity-team/duplicity/0.8-series

Proposed by Mgorse
Status: Merged
Merged at revision: 1335
Proposed branch: lp:~mgorse/duplicity/0.8-series
Merge into: lp:~duplicity-team/duplicity/0.8-series
Diff against target: 2983 lines (+560/-566)
8 files modified
duplicity/diffdir.py (+100/-100)
duplicity/manifest.py (+109/-109)
duplicity/patchdir.py (+75/-75)
duplicity/path.py (+154/-154)
duplicity/statistics.py (+96/-96)
duplicity/util.py (+25/-25)
po/duplicity.pot (+1/-1)
testing/test_code.py (+0/-6)
To merge this branch: bzr merge lp:~mgorse/duplicity/0.8-series
Reviewer Review Type Date Requested Status
duplicity-team Pending
Review via email: mp+355568@code.launchpad.net

Description of the change

Annotate more strings in duplicity/*.py

To post a comment you must log in.
Revision history for this message
Mgorse (mgorse) wrote :

Taking into account the comment related to util.fsencode(), I think that tarinfo names need to be bytes, since they can be used to generate paths, and manifest strings also need to be bytes, since they can be read back in to generate a manifest. At the same time, I'm not sure if a __str__ function should be returning bytes, although I'm not seeing a good solution.

lp:~mgorse/duplicity/0.8-series updated
1335. By Kenneth Loafman

* Merged in lp:~mgorse/duplicity/0.8-series
  - Adorn more strings in duplicity/*.py

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
=== modified file 'duplicity/diffdir.py'
--- duplicity/diffdir.py 2018-07-27 02:18:12 +0000
+++ duplicity/diffdir.py 2018-09-24 21:19:45 +0000
@@ -19,7 +19,7 @@
19# along with duplicity; if not, write to the Free Software Foundation,19# along with duplicity; if not, write to the Free Software Foundation,
20# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA20# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
2121
22"""22u"""
23Functions for producing signatures and deltas of directories23Functions for producing signatures and deltas of directories
2424
25Note that the main processes of this module have two parts. In the25Note that the main processes of this module have two parts. In the
@@ -49,14 +49,14 @@
4949
5050
51def DirSig(path_iter):51def DirSig(path_iter):
52 """52 u"""
53 Alias for SigTarBlockIter below53 Alias for SigTarBlockIter below
54 """54 """
55 return SigTarBlockIter(path_iter)55 return SigTarBlockIter(path_iter)
5656
5757
58def DirFull(path_iter):58def DirFull(path_iter):
59 """59 u"""
60 Return a tarblock full backup of items in path_iter60 Return a tarblock full backup of items in path_iter
6161
62 A full backup is just a diff starting from nothing (it may be less62 A full backup is just a diff starting from nothing (it may be less
@@ -64,18 +64,18 @@
64 will be easy to split up the tar and make the volumes the same64 will be easy to split up the tar and make the volumes the same
65 sizes).65 sizes).
66 """66 """
67 return DirDelta(path_iter, cStringIO.StringIO(""))67 return DirDelta(path_iter, cStringIO.StringIO(u""))
6868
6969
70def DirFull_WriteSig(path_iter, sig_outfp):70def DirFull_WriteSig(path_iter, sig_outfp):
71 """71 u"""
72 Return full backup like above, but also write signature to sig_outfp72 Return full backup like above, but also write signature to sig_outfp
73 """73 """
74 return DirDelta_WriteSig(path_iter, cStringIO.StringIO(""), sig_outfp)74 return DirDelta_WriteSig(path_iter, cStringIO.StringIO(u""), sig_outfp)
7575
7676
77def DirDelta(path_iter, dirsig_fileobj_list):77def DirDelta(path_iter, dirsig_fileobj_list):
78 """78 u"""
79 Produce tarblock diff given dirsig_fileobj_list and pathiter79 Produce tarblock diff given dirsig_fileobj_list and pathiter
8080
81 dirsig_fileobj_list should either be a tar fileobj or a list of81 dirsig_fileobj_list should either be a tar fileobj or a list of
@@ -96,7 +96,7 @@
9696
9797
98def delta_iter_error_handler(exc, new_path, sig_path, sig_tar=None):98def delta_iter_error_handler(exc, new_path, sig_path, sig_tar=None):
99 """99 u"""
100 Called by get_delta_iter, report error in getting delta100 Called by get_delta_iter, report error in getting delta
101 """101 """
102 if new_path:102 if new_path:
@@ -104,13 +104,13 @@
104 elif sig_path:104 elif sig_path:
105 index_string = sig_path.get_relative_path()105 index_string = sig_path.get_relative_path()
106 else:106 else:
107 assert 0, "Both new and sig are None for some reason"107 assert 0, u"Both new and sig are None for some reason"
108 log.Warn(_("Error %s getting delta for %s") % (str(exc), util.fsdecode(index_string)))108 log.Warn(_(u"Error %s getting delta for %s") % (str(exc), util.fsdecode(index_string)))
109 return None109 return None
110110
111111
112def get_delta_path(new_path, sig_path, sigTarFile=None):112def get_delta_path(new_path, sig_path, sigTarFile=None):
113 """113 u"""
114 Return new delta_path which, when read, writes sig to sig_fileobj,114 Return new delta_path which, when read, writes sig to sig_fileobj,
115 if sigTarFile is not None115 if sigTarFile is not None
116 """116 """
@@ -119,35 +119,35 @@
119 ti = new_path.get_tarinfo()119 ti = new_path.get_tarinfo()
120 index = new_path.index120 index = new_path.index
121 delta_path = new_path.get_ropath()121 delta_path = new_path.get_ropath()
122 log.Debug(_("Getting delta of %s and %s") % (new_path, sig_path))122 log.Debug(_(u"Getting delta of %s and %s") % (new_path, sig_path))
123123
124 def callback(sig_string):124 def callback(sig_string):
125 """125 u"""
126 Callback activated when FileWithSignature read to end126 Callback activated when FileWithSignature read to end
127 """127 """
128 ti.size = len(sig_string)128 ti.size = len(sig_string)
129 ti.name = "signature/" + "/".join(index)129 ti.name = b"signature/" + b"/".join(index)
130 sigTarFile.addfile(ti, cStringIO.StringIO(sig_string))130 sigTarFile.addfile(ti, cStringIO.StringIO(sig_string))
131131
132 if new_path.isreg() and sig_path and sig_path.isreg() and sig_path.difftype == "signature":132 if new_path.isreg() and sig_path and sig_path.isreg() and sig_path.difftype == u"signature":
133 delta_path.difftype = "diff"133 delta_path.difftype = u"diff"
134 old_sigfp = sig_path.open("rb")134 old_sigfp = sig_path.open(u"rb")
135 newfp = FileWithReadCounter(new_path.open("rb"))135 newfp = FileWithReadCounter(new_path.open(u"rb"))
136 if sigTarFile:136 if sigTarFile:
137 newfp = FileWithSignature(newfp, callback,137 newfp = FileWithSignature(newfp, callback,
138 new_path.getsize())138 new_path.getsize())
139 delta_path.setfileobj(librsync.DeltaFile(old_sigfp, newfp))139 delta_path.setfileobj(librsync.DeltaFile(old_sigfp, newfp))
140 else:140 else:
141 delta_path.difftype = "snapshot"141 delta_path.difftype = u"snapshot"
142 if sigTarFile:142 if sigTarFile:
143 ti.name = "snapshot/" + "/".join(index)143 ti.name = b"snapshot/" + b"/".join(index)
144 if not new_path.isreg():144 if not new_path.isreg():
145 if sigTarFile:145 if sigTarFile:
146 sigTarFile.addfile(ti)146 sigTarFile.addfile(ti)
147 if stats:147 if stats:
148 stats.SourceFileSize += delta_path.getsize()148 stats.SourceFileSize += delta_path.getsize()
149 else:149 else:
150 newfp = FileWithReadCounter(new_path.open("rb"))150 newfp = FileWithReadCounter(new_path.open(u"rb"))
151 if sigTarFile:151 if sigTarFile:
152 newfp = FileWithSignature(newfp, callback,152 newfp = FileWithSignature(newfp, callback,
153 new_path.getsize())153 new_path.getsize())
@@ -158,27 +158,27 @@
158158
159159
160def log_delta_path(delta_path, new_path=None, stats=None):160def log_delta_path(delta_path, new_path=None, stats=None):
161 """161 u"""
162 Look at delta path and log delta. Add stats if new_path is set162 Look at delta path and log delta. Add stats if new_path is set
163 """163 """
164 if delta_path.difftype == "snapshot":164 if delta_path.difftype == u"snapshot":
165 if new_path and stats:165 if new_path and stats:
166 stats.add_new_file(new_path)166 stats.add_new_file(new_path)
167 log.Info(_("A %s") %167 log.Info(_(u"A %s") %
168 (util.fsdecode(delta_path.get_relative_path())),168 (util.fsdecode(delta_path.get_relative_path())),
169 log.InfoCode.diff_file_new,169 log.InfoCode.diff_file_new,
170 util.escape(delta_path.get_relative_path()))170 util.escape(delta_path.get_relative_path()))
171 else:171 else:
172 if new_path and stats:172 if new_path and stats:
173 stats.add_changed_file(new_path)173 stats.add_changed_file(new_path)
174 log.Info(_("M %s") %174 log.Info(_(u"M %s") %
175 (util.fsdecode(delta_path.get_relative_path())),175 (util.fsdecode(delta_path.get_relative_path())),
176 log.InfoCode.diff_file_changed,176 log.InfoCode.diff_file_changed,
177 util.escape(delta_path.get_relative_path()))177 util.escape(delta_path.get_relative_path()))
178178
179179
180def get_delta_iter(new_iter, sig_iter, sig_fileobj=None):180def get_delta_iter(new_iter, sig_iter, sig_fileobj=None):
181 """181 u"""
182 Generate delta iter from new Path iter and sig Path iter.182 Generate delta iter from new Path iter and sig Path iter.
183183
184 For each delta path of regular file type, path.difftype with be184 For each delta path of regular file type, path.difftype with be
@@ -189,25 +189,25 @@
189 """189 """
190 collated = collate2iters(new_iter, sig_iter)190 collated = collate2iters(new_iter, sig_iter)
191 if sig_fileobj:191 if sig_fileobj:
192 sigTarFile = util.make_tarfile("w", sig_fileobj)192 sigTarFile = util.make_tarfile(u"w", sig_fileobj)
193 else:193 else:
194 sigTarFile = None194 sigTarFile = None
195 for new_path, sig_path in collated:195 for new_path, sig_path in collated:
196 log.Debug(_("Comparing %s and %s") % (new_path and util.uindex(new_path.index),196 log.Debug(_(u"Comparing %s and %s") % (new_path and util.uindex(new_path.index),
197 sig_path and util.uindex(sig_path.index)))197 sig_path and util.uindex(sig_path.index)))
198 if not new_path or not new_path.type:198 if not new_path or not new_path.type:
199 # File doesn't exist (but ignore attempts to delete base dir;199 # File doesn't exist (but ignore attempts to delete base dir;
200 # old versions of duplicity could have written out the sigtar in200 # old versions of duplicity could have written out the sigtar in
201 # such a way as to fool us; LP: #929067)201 # such a way as to fool us; LP: #929067)
202 if sig_path and sig_path.exists() and sig_path.index != ():202 if sig_path and sig_path.exists() and sig_path.index != ():
203 # but signature says it did203 # but signature says it did
204 log.Info(_("D %s") %204 log.Info(_(u"D %s") %
205 (util.fsdecode(sig_path.get_relative_path())),205 (util.fsdecode(sig_path.get_relative_path())),
206 log.InfoCode.diff_file_deleted,206 log.InfoCode.diff_file_deleted,
207 util.escape(sig_path.get_relative_path()))207 util.escape(sig_path.get_relative_path()))
208 if sigTarFile:208 if sigTarFile:
209 ti = ROPath(sig_path.index).get_tarinfo()209 ti = ROPath(sig_path.index).get_tarinfo()
210 ti.name = "deleted/" + "/".join(sig_path.index)210 ti.name = u"deleted/" + u"/".join(sig_path.index)
211 sigTarFile.addfile(ti)211 sigTarFile.addfile(ti)
212 stats.add_deleted_file(sig_path)212 stats.add_deleted_file(sig_path)
213 yield ROPath(sig_path.index)213 yield ROPath(sig_path.index)
@@ -231,28 +231,28 @@
231231
232232
233def sigtar2path_iter(sigtarobj):233def sigtar2path_iter(sigtarobj):
234 """234 u"""
235 Convert signature tar file object open for reading into path iter235 Convert signature tar file object open for reading into path iter
236 """236 """
237 tf = util.make_tarfile("r", sigtarobj)237 tf = util.make_tarfile(u"r", sigtarobj)
238 tf.debug = 1238 tf.debug = 1
239 for tarinfo in tf:239 for tarinfo in tf:
240 tiname = util.get_tarinfo_name(tarinfo)240 tiname = util.get_tarinfo_name(tarinfo)
241 for prefix in ["signature/", "snapshot/", "deleted/"]:241 for prefix in [b"signature/", b"snapshot/", b"deleted/"]:
242 if tiname.startswith(prefix):242 if tiname.startswith(prefix):
243 # strip prefix and '/' from name and set it to difftype243 # strip prefix and '/' from name and set it to difftype
244 name, difftype = tiname[len(prefix):], prefix[:-1]244 name, difftype = tiname[len(prefix):], prefix[:-1]
245 break245 break
246 else:246 else:
247 raise DiffDirException("Bad tarinfo name %s" % (tiname,))247 raise DiffDirException(u"Bad tarinfo name %s" % (tiname,))
248248
249 index = tuple(name.split("/"))249 index = tuple(name.split(u"/"))
250 if not index[-1]:250 if not index[-1]:
251 index = index[:-1] # deal with trailing /, ""251 index = index[:-1] # deal with trailing /, ""
252252
253 ropath = ROPath(index)253 ropath = ROPath(index)
254 ropath.difftype = difftype254 ropath.difftype = difftype
255 if difftype == "signature" or difftype == "snapshot":255 if difftype == u"signature" or difftype == u"snapshot":
256 ropath.init_from_tarinfo(tarinfo)256 ropath.init_from_tarinfo(tarinfo)
257 if ropath.isreg():257 if ropath.isreg():
258 ropath.setfileobj(tf.extractfile(tarinfo))258 ropath.setfileobj(tf.extractfile(tarinfo))
@@ -261,7 +261,7 @@
261261
262262
263def collate2iters(riter1, riter2):263def collate2iters(riter1, riter2):
264 """264 u"""
265 Collate two iterators.265 Collate two iterators.
266266
267 The elements yielded by each iterator must be have an index267 The elements yielded by each iterator must be have an index
@@ -305,7 +305,7 @@
305305
306306
307def combine_path_iters(path_iter_list):307def combine_path_iters(path_iter_list):
308 """308 u"""
309 Produce new iterator by combining the iterators in path_iter_list309 Produce new iterator by combining the iterators in path_iter_list
310310
311 This new iter will iterate every path that is in path_iter_list in311 This new iter will iterate every path that is in path_iter_list in
@@ -320,7 +320,7 @@
320 path_iter_list.reverse()320 path_iter_list.reverse()
321321
322 def get_triple(iter_index):322 def get_triple(iter_index):
323 """323 u"""
324 Represent the next element as a triple, to help sorting324 Represent the next element as a triple, to help sorting
325 """325 """
326 try:326 try:
@@ -330,7 +330,7 @@
330 return (path.index, iter_index, path)330 return (path.index, iter_index, path)
331331
332 def refresh_triple_list(triple_list):332 def refresh_triple_list(triple_list):
333 """333 u"""
334 Update all elements with path_index same as first element334 Update all elements with path_index same as first element
335 """335 """
336 path_index = triple_list[0][0]336 path_index = triple_list[0][0]
@@ -355,7 +355,7 @@
355355
356356
357def DirDelta_WriteSig(path_iter, sig_infp_list, newsig_outfp):357def DirDelta_WriteSig(path_iter, sig_infp_list, newsig_outfp):
358 """358 u"""
359 Like DirDelta but also write signature into sig_fileobj359 Like DirDelta but also write signature into sig_fileobj
360360
361 Like DirDelta, sig_infp_list can be a tar fileobj or a sorted list361 Like DirDelta, sig_infp_list can be a tar fileobj or a sorted list
@@ -376,26 +376,26 @@
376376
377377
378def get_combined_path_iter(sig_infp_list):378def get_combined_path_iter(sig_infp_list):
379 """379 u"""
380 Return path iter combining signatures in list of open sig files380 Return path iter combining signatures in list of open sig files
381 """381 """
382 return combine_path_iters([sigtar2path_iter(x) for x in sig_infp_list])382 return combine_path_iters([sigtar2path_iter(x) for x in sig_infp_list])
383383
384384
385class FileWithReadCounter:385class FileWithReadCounter:
386 """386 u"""
387 File-like object which also computes amount read as it is read387 File-like object which also computes amount read as it is read
388 """388 """
389 def __init__(self, infile):389 def __init__(self, infile):
390 """FileWithReadCounter initializer"""390 u"""FileWithReadCounter initializer"""
391 self.infile = infile391 self.infile = infile
392392
393 def read(self, length=-1):393 def read(self, length=-1):
394 try:394 try:
395 buf = self.infile.read(length)395 buf = self.infile.read(length)
396 except IOError as ex:396 except IOError as ex:
397 buf = ""397 buf = u""
398 log.Warn(_("Error %s getting delta for %s") % (str(ex), self.infile.uc_name))398 log.Warn(_(u"Error %s getting delta for %s") % (str(ex), self.infile.uc_name))
399 if stats:399 if stats:
400 stats.SourceFileSize += len(buf)400 stats.SourceFileSize += len(buf)
401 return buf401 return buf
@@ -405,13 +405,13 @@
405405
406406
407class FileWithSignature:407class FileWithSignature:
408 """408 u"""
409 File-like object which also computes signature as it is read409 File-like object which also computes signature as it is read
410 """410 """
411 blocksize = 32 * 1024411 blocksize = 32 * 1024
412412
413 def __init__(self, infile, callback, filelen, *extra_args):413 def __init__(self, infile, callback, filelen, *extra_args):
414 """414 u"""
415 FileTee initializer415 FileTee initializer
416416
417 The object will act like infile, but whenever it is read it417 The object will act like infile, but whenever it is read it
@@ -442,11 +442,11 @@
442442
443443
444class TarBlock:444class TarBlock:
445 """445 u"""
446 Contain information to add next file to tar446 Contain information to add next file to tar
447 """447 """
448 def __init__(self, index, data):448 def __init__(self, index, data):
449 """449 u"""
450 TarBlock initializer - just store data450 TarBlock initializer - just store data
451 """451 """
452 self.index = index452 self.index = index
@@ -454,7 +454,7 @@
454454
455455
456class TarBlockIter:456class TarBlockIter:
457 """457 u"""
458 A bit like an iterator, yield tar blocks given input iterator458 A bit like an iterator, yield tar blocks given input iterator
459459
460 Unlike an iterator, however, control over the maximum size of a460 Unlike an iterator, however, control over the maximum size of a
@@ -462,7 +462,7 @@
462 get_footer() is available.462 get_footer() is available.
463 """463 """
464 def __init__(self, input_iter):464 def __init__(self, input_iter):
465 """465 u"""
466 TarBlockIter initializer466 TarBlockIter initializer
467 """467 """
468 self.input_iter = input_iter468 self.input_iter = input_iter
@@ -476,28 +476,28 @@
476 self.remember_block = None # holds block of next block476 self.remember_block = None # holds block of next block
477 self.queued_data = None # data to return in next next() call477 self.queued_data = None # data to return in next next() call
478478
479 def tarinfo2tarblock(self, index, tarinfo, file_data=""):479 def tarinfo2tarblock(self, index, tarinfo, file_data=b""):
480 """480 u"""
481 Make tarblock out of tarinfo and file data481 Make tarblock out of tarinfo and file data
482 """482 """
483 tarinfo.size = len(file_data)483 tarinfo.size = len(file_data)
484 headers = tarinfo.tobuf(errors='replace')484 headers = tarinfo.tobuf(errors=u'replace')
485 blocks, remainder = divmod(tarinfo.size, tarfile.BLOCKSIZE) # @UnusedVariable485 blocks, remainder = divmod(tarinfo.size, tarfile.BLOCKSIZE) # @UnusedVariable
486 if remainder > 0:486 if remainder > 0:
487 filler_data = "\0" * (tarfile.BLOCKSIZE - remainder)487 filler_data = b"\0" * (tarfile.BLOCKSIZE - remainder)
488 else:488 else:
489 filler_data = ""489 filler_data = b""
490 return TarBlock(index, "%s%s%s" % (headers, file_data, filler_data))490 return TarBlock(index, b"%s%s%s" % (headers, file_data, filler_data))
491491
492 def process(self, val):492 def process(self, val):
493 """493 u"""
494 Turn next value of input_iter into a TarBlock494 Turn next value of input_iter into a TarBlock
495 """495 """
496 assert not self.process_waiting496 assert not self.process_waiting
497 XXX # Override in subclass @UndefinedVariable497 XXX # Override in subclass @UndefinedVariable
498498
499 def process_continued(self):499 def process_continued(self):
500 """500 u"""
501 Get more tarblocks501 Get more tarblocks
502502
503 If processing val above would produce more than one TarBlock,503 If processing val above would produce more than one TarBlock,
@@ -507,7 +507,7 @@
507 XXX # Override in subclass @UndefinedVariable507 XXX # Override in subclass @UndefinedVariable
508508
509 def next(self):509 def next(self):
510 """510 u"""
511 Return next block and update offset511 Return next block and update offset
512 """512 """
513 if self.queued_data is not None:513 if self.queued_data is not None:
@@ -539,19 +539,19 @@
539 return 64 * 1024539 return 64 * 1024
540540
541 def get_previous_index(self):541 def get_previous_index(self):
542 """542 u"""
543 Return index of last tarblock, or None if no previous index543 Return index of last tarblock, or None if no previous index
544 """544 """
545 return self.previous_index, self.previous_block545 return self.previous_index, self.previous_block
546546
547 def queue_index_data(self, data):547 def queue_index_data(self, data):
548 """548 u"""
549 Next time next() is called, we will return data instead of processing549 Next time next() is called, we will return data instead of processing
550 """550 """
551 self.queued_data = data551 self.queued_data = data
552552
553 def remember_next_index(self):553 def remember_next_index(self):
554 """554 u"""
555 When called, remember the index of the next block iterated555 When called, remember the index of the next block iterated
556 """556 """
557 self.remember_next = True557 self.remember_next = True
@@ -559,29 +559,29 @@
559 self.remember_block = None559 self.remember_block = None
560560
561 def recall_index(self):561 def recall_index(self):
562 """562 u"""
563 Retrieve index remembered with remember_next_index563 Retrieve index remembered with remember_next_index
564 """564 """
565 return self.remember_value, self.remember_block565 return self.remember_value, self.remember_block
566566
567 def get_footer(self):567 def get_footer(self):
568 """568 u"""
569 Return closing string for tarfile, reset offset569 Return closing string for tarfile, reset offset
570 """570 """
571 blocks, remainder = divmod(self.offset, tarfile.RECORDSIZE) # @UnusedVariable571 blocks, remainder = divmod(self.offset, tarfile.RECORDSIZE) # @UnusedVariable
572 self.offset = 0572 self.offset = 0
573 return '\0' * (tarfile.RECORDSIZE - remainder) # remainder can be 0573 return u'\0' * (tarfile.RECORDSIZE - remainder) # remainder can be 0
574574
575 def __iter__(self):575 def __iter__(self):
576 return self576 return self
577577
578578
579class DummyBlockIter(TarBlockIter):579class DummyBlockIter(TarBlockIter):
580 """580 u"""
581 TarBlockIter that does no file reading581 TarBlockIter that does no file reading
582 """582 """
583 def process(self, delta_ropath):583 def process(self, delta_ropath):
584 """584 u"""
585 Get a fake tarblock from delta_ropath585 Get a fake tarblock from delta_ropath
586 """586 """
587 ti = delta_ropath.get_tarinfo()587 ti = delta_ropath.get_tarinfo()
@@ -601,28 +601,28 @@
601601
602602
603class SigTarBlockIter(TarBlockIter):603class SigTarBlockIter(TarBlockIter):
604 """604 u"""
605 TarBlockIter that yields blocks of a signature tar from path_iter605 TarBlockIter that yields blocks of a signature tar from path_iter
606 """606 """
607 def process(self, path):607 def process(self, path):
608 """608 u"""
609 Return associated signature TarBlock from path609 Return associated signature TarBlock from path
610 """610 """
611 ti = path.get_tarinfo()611 ti = path.get_tarinfo()
612 if path.isreg():612 if path.isreg():
613 sfp = librsync.SigFile(path.open("rb"),613 sfp = librsync.SigFile(path.open(u"rb"),
614 get_block_size(path.getsize()))614 get_block_size(path.getsize()))
615 sigbuf = sfp.read()615 sigbuf = sfp.read()
616 sfp.close()616 sfp.close()
617 ti.name = "signature/" + "/".join(path.index)617 ti.name = u"signature/" + u"/".join(path.index)
618 return self.tarinfo2tarblock(path.index, ti, sigbuf)618 return self.tarinfo2tarblock(path.index, ti, sigbuf)
619 else:619 else:
620 ti.name = "snapshot/" + "/".join(path.index)620 ti.name = u"snapshot/" + u"/".join(path.index)
621 return self.tarinfo2tarblock(path.index, ti)621 return self.tarinfo2tarblock(path.index, ti)
622622
623623
624class DeltaTarBlockIter(TarBlockIter):624class DeltaTarBlockIter(TarBlockIter):
625 """625 u"""
626 TarBlockIter that yields parts of a deltatar file626 TarBlockIter that yields parts of a deltatar file
627627
628 Unlike SigTarBlockIter, the argument to __init__ is a628 Unlike SigTarBlockIter, the argument to __init__ is a
@@ -630,15 +630,15 @@
630 calculated.630 calculated.
631 """631 """
632 def process(self, delta_ropath):632 def process(self, delta_ropath):
633 """633 u"""
634 Get a tarblock from delta_ropath634 Get a tarblock from delta_ropath
635 """635 """
636 def add_prefix(tarinfo, prefix):636 def add_prefix(tarinfo, prefix):
637 """Add prefix to the name of a tarinfo file"""637 u"""Add prefix to the name of a tarinfo file"""
638 if tarinfo.name == ".":638 if tarinfo.name == b".":
639 tarinfo.name = prefix + "/"639 tarinfo.name = prefix + b"/"
640 else:640 else:
641 tarinfo.name = "%s/%s" % (prefix, tarinfo.name)641 tarinfo.name = b"%s/%s" % (prefix, tarinfo.name)
642642
643 ti = delta_ropath.get_tarinfo()643 ti = delta_ropath.get_tarinfo()
644 index = delta_ropath.index644 index = delta_ropath.index
@@ -646,29 +646,29 @@
646 # Return blocks of deleted files or fileless snapshots646 # Return blocks of deleted files or fileless snapshots
647 if not delta_ropath.type or not delta_ropath.fileobj:647 if not delta_ropath.type or not delta_ropath.fileobj:
648 if not delta_ropath.type:648 if not delta_ropath.type:
649 add_prefix(ti, "deleted")649 add_prefix(ti, u"deleted")
650 else:650 else:
651 assert delta_ropath.difftype == "snapshot"651 assert delta_ropath.difftype == u"snapshot"
652 add_prefix(ti, "snapshot")652 add_prefix(ti, b"snapshot")
653 return self.tarinfo2tarblock(index, ti)653 return self.tarinfo2tarblock(index, ti)
654654
655 # Now handle single volume block case655 # Now handle single volume block case
656 fp = delta_ropath.open("rb")656 fp = delta_ropath.open(u"rb")
657 data, last_block = self.get_data_block(fp)657 data, last_block = self.get_data_block(fp)
658 if stats:658 if stats:
659 stats.RawDeltaSize += len(data)659 stats.RawDeltaSize += len(data)
660 if last_block:660 if last_block:
661 if delta_ropath.difftype == "snapshot":661 if delta_ropath.difftype == u"snapshot":
662 add_prefix(ti, "snapshot")662 add_prefix(ti, b"snapshot")
663 elif delta_ropath.difftype == "diff":663 elif delta_ropath.difftype == u"diff":
664 add_prefix(ti, "diff")664 add_prefix(ti, b"diff")
665 else:665 else:
666 assert 0, "Unknown difftype"666 assert 0, u"Unknown difftype"
667 return self.tarinfo2tarblock(index, ti, data)667 return self.tarinfo2tarblock(index, ti, data)
668668
669 # Finally, do multivol snapshot or diff case669 # Finally, do multivol snapshot or diff case
670 full_name = "multivol_%s/%s" % (delta_ropath.difftype, ti.name)670 full_name = u"multivol_%s/%s" % (delta_ropath.difftype, ti.name)
671 ti.name = full_name + "/1"671 ti.name = full_name + u"/1"
672 self.process_prefix = full_name672 self.process_prefix = full_name
673 self.process_fp = fp673 self.process_fp = fp
674 self.process_ropath = delta_ropath674 self.process_ropath = delta_ropath
@@ -677,26 +677,26 @@
677 return self.tarinfo2tarblock(index, ti, data)677 return self.tarinfo2tarblock(index, ti, data)
678678
679 def get_data_block(self, fp):679 def get_data_block(self, fp):
680 """680 u"""
681 Return pair (next data block, boolean last data block)681 Return pair (next data block, boolean last data block)
682 """682 """
683 read_size = self.get_read_size()683 read_size = self.get_read_size()
684 buf = fp.read(read_size)684 buf = fp.read(read_size)
685 if len(buf) < read_size:685 if len(buf) < read_size:
686 if fp.close():686 if fp.close():
687 raise DiffDirException("Error closing file")687 raise DiffDirException(u"Error closing file")
688 return (buf, True)688 return (buf, True)
689 else:689 else:
690 return (buf, False)690 return (buf, False)
691691
692 def process_continued(self):692 def process_continued(self):
693 """693 u"""
694 Return next volume in multivol diff or snapshot694 Return next volume in multivol diff or snapshot
695 """695 """
696 assert self.process_waiting696 assert self.process_waiting
697 ropath = self.process_ropath697 ropath = self.process_ropath
698 ti, index = ropath.get_tarinfo(), ropath.index698 ti, index = ropath.get_tarinfo(), ropath.index
699 ti.name = "%s/%d" % (self.process_prefix, self.process_next_vol_number)699 ti.name = u"%s/%d" % (self.process_prefix, self.process_next_vol_number)
700 data, last_block = self.get_data_block(self.process_fp)700 data, last_block = self.get_data_block(self.process_fp)
701 if stats:701 if stats:
702 stats.RawDeltaSize += len(data)702 stats.RawDeltaSize += len(data)
@@ -712,13 +712,13 @@
712712
713713
714def write_block_iter(block_iter, out_obj):714def write_block_iter(block_iter, out_obj):
715 """715 u"""
716 Write block_iter to filename, path, or file object716 Write block_iter to filename, path, or file object
717 """717 """
718 if isinstance(out_obj, Path):718 if isinstance(out_obj, Path):
719 fp = open(out_obj.name, "wb")719 fp = open(out_obj.name, u"wb")
720 elif isinstance(out_obj, types.StringTypes):720 elif isinstance(out_obj, types.StringTypes):
721 fp = open(out_obj, "wb")721 fp = open(out_obj, u"wb")
722 else:722 else:
723 fp = out_obj723 fp = out_obj
724 for block in block_iter:724 for block in block_iter:
@@ -730,7 +730,7 @@
730730
731731
732def get_block_size(file_len):732def get_block_size(file_len):
733 """733 u"""
734 Return a reasonable block size to use on files of length file_len734 Return a reasonable block size to use on files of length file_len
735735
736 If the block size is too big, deltas will be bigger than is736 If the block size is too big, deltas will be bigger than is
737737
=== modified file 'duplicity/manifest.py'
--- duplicity/manifest.py 2018-09-06 11:14:11 +0000
+++ duplicity/manifest.py 2018-09-24 21:19:45 +0000
@@ -19,7 +19,7 @@
19# along with duplicity; if not, write to the Free Software Foundation,19# along with duplicity; if not, write to the Free Software Foundation,
20# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA20# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
2121
22"""Create and edit manifest for session contents"""22u"""Create and edit manifest for session contents"""
2323
24from future_builtins import filter24from future_builtins import filter
2525
@@ -32,18 +32,18 @@
3232
3333
34class ManifestError(Exception):34class ManifestError(Exception):
35 """35 u"""
36 Exception raised when problem with manifest36 Exception raised when problem with manifest
37 """37 """
38 pass38 pass
3939
4040
41class Manifest:41class Manifest:
42 """42 u"""
43 List of volumes and information about each one43 List of volumes and information about each one
44 """44 """
45 def __init__(self, fh=None):45 def __init__(self, fh=None):
46 """46 u"""
47 Create blank Manifest47 Create blank Manifest
4848
49 @param fh: fileobj for manifest49 @param fh: fileobj for manifest
@@ -59,7 +59,7 @@
59 self.files_changed = []59 self.files_changed = []
6060
61 def set_dirinfo(self):61 def set_dirinfo(self):
62 """62 u"""
63 Set information about directory from globals,63 Set information about directory from globals,
64 and write to manifest file.64 and write to manifest file.
6565
@@ -70,13 +70,13 @@
70 self.local_dirname = globals.local_path.name # @UndefinedVariable70 self.local_dirname = globals.local_path.name # @UndefinedVariable
71 if self.fh:71 if self.fh:
72 if self.hostname:72 if self.hostname:
73 self.fh.write("Hostname %s\n" % self.hostname)73 self.fh.write(u"Hostname %s\n" % self.hostname)
74 if self.local_dirname:74 if self.local_dirname:
75 self.fh.write("Localdir %s\n" % Quote(self.local_dirname))75 self.fh.write(u"Localdir %s\n" % Quote(self.local_dirname))
76 return self76 return self
7777
78 def check_dirinfo(self):78 def check_dirinfo(self):
79 """79 u"""
80 Return None if dirinfo is the same, otherwise error message80 Return None if dirinfo is the same, otherwise error message
8181
82 Does not raise an error message if hostname or local_dirname82 Does not raise an error message if hostname or local_dirname
@@ -89,41 +89,41 @@
89 return89 return
9090
91 if self.hostname and self.hostname != globals.hostname:91 if self.hostname and self.hostname != globals.hostname:
92 errmsg = _("Fatal Error: Backup source host has changed.\n"92 errmsg = _(u"Fatal Error: Backup source host has changed.\n"
93 "Current hostname: %s\n"93 u"Current hostname: %s\n"
94 "Previous hostname: %s") % (globals.hostname, self.hostname)94 u"Previous hostname: %s") % (globals.hostname, self.hostname)
95 code = log.ErrorCode.hostname_mismatch95 code = log.ErrorCode.hostname_mismatch
96 code_extra = "%s %s" % (util.escape(globals.hostname), util.escape(self.hostname))96 code_extra = u"%s %s" % (util.escape(globals.hostname), util.escape(self.hostname))
9797
98 elif (self.local_dirname and self.local_dirname != globals.local_path.name): # @UndefinedVariable98 elif (self.local_dirname and self.local_dirname != globals.local_path.name): # @UndefinedVariable
99 errmsg = _("Fatal Error: Backup source directory has changed.\n"99 errmsg = _(u"Fatal Error: Backup source directory has changed.\n"
100 "Current directory: %s\n"100 u"Current directory: %s\n"
101 "Previous directory: %s") % (globals.local_path.name, self.local_dirname) # @UndefinedVariable101 u"Previous directory: %s") % (globals.local_path.name, self.local_dirname) # @UndefinedVariable
102 code = log.ErrorCode.source_dir_mismatch102 code = log.ErrorCode.source_dir_mismatch
103 code_extra = "%s %s" % (util.escape(globals.local_path.name),103 code_extra = u"%s %s" % (util.escape(globals.local_path.name),
104 util.escape(self.local_dirname)) # @UndefinedVariable104 util.escape(self.local_dirname)) # @UndefinedVariable
105 else:105 else:
106 return106 return
107107
108 log.FatalError(errmsg + "\n\n" +108 log.FatalError(errmsg + u"\n\n" +
109 _("Aborting because you may have accidentally tried to "109 _(u"Aborting because you may have accidentally tried to "
110 "backup two different data sets to the same remote "110 u"backup two different data sets to the same remote "
111 "location, or using the same archive directory. If "111 u"location, or using the same archive directory. If "
112 "this is not a mistake, use the "112 u"this is not a mistake, use the "
113 "--allow-source-mismatch switch to avoid seeing this "113 u"--allow-source-mismatch switch to avoid seeing this "
114 "message"), code, code_extra)114 u"message"), code, code_extra)
115115
116 def set_files_changed_info(self, files_changed):116 def set_files_changed_info(self, files_changed):
117 if files_changed:117 if files_changed:
118 self.files_changed = files_changed118 self.files_changed = files_changed
119119
120 if self.fh:120 if self.fh:
121 self.fh.write("Filelist %d\n" % len(self.files_changed))121 self.fh.write(u"Filelist %d\n" % len(self.files_changed))
122 for fileinfo in self.files_changed:122 for fileinfo in self.files_changed:
123 self.fh.write(" %-7s %s\n" % (fileinfo[1], Quote(fileinfo[0])))123 self.fh.write(b" %-7s %s\n" % (fileinfo[1], Quote(fileinfo[0])))
124124
125 def add_volume_info(self, vi):125 def add_volume_info(self, vi):
126 """126 u"""
127 Add volume info vi to manifest and write to manifest127 Add volume info vi to manifest and write to manifest
128128
129 @param vi: volume info to add129 @param vi: volume info to add
@@ -134,10 +134,10 @@
134 vol_num = vi.volume_number134 vol_num = vi.volume_number
135 self.volume_info_dict[vol_num] = vi135 self.volume_info_dict[vol_num] = vi
136 if self.fh:136 if self.fh:
137 self.fh.write(vi.to_string() + "\n")137 self.fh.write(vi.to_string() + b"\n")
138138
139 def del_volume_info(self, vol_num):139 def del_volume_info(self, vol_num):
140 """140 u"""
141 Remove volume vol_num from the manifest141 Remove volume vol_num from the manifest
142142
143 @param vol_num: volume number to delete143 @param vol_num: volume number to delete
@@ -148,87 +148,87 @@
148 try:148 try:
149 del self.volume_info_dict[vol_num]149 del self.volume_info_dict[vol_num]
150 except Exception:150 except Exception:
151 raise ManifestError("Volume %d not present in manifest" % (vol_num,))151 raise ManifestError(u"Volume %d not present in manifest" % (vol_num,))
152152
153 def to_string(self):153 def to_string(self):
154 """154 u"""
155 Return string version of self (just concatenate vi strings)155 Return string version of self (just concatenate vi strings)
156156
157 @rtype: string157 @rtype: string
158 @return: self in string form158 @return: self in string form
159 """159 """
160 result = ""160 result = u""
161 if self.hostname:161 if self.hostname:
162 result += "Hostname %s\n" % self.hostname162 result += b"Hostname %s\n" % self.hostname
163 if self.local_dirname:163 if self.local_dirname:
164 result += "Localdir %s\n" % Quote(self.local_dirname)164 result += b"Localdir %s\n" % Quote(self.local_dirname)
165165
166 result += "Filelist %d\n" % len(self.files_changed)166 result += b"Filelist %d\n" % len(self.files_changed)
167 for fileinfo in self.files_changed:167 for fileinfo in self.files_changed:
168 result += " %-7s %s\n" % (fileinfo[1], Quote(fileinfo[0]))168 result += b" %-7s %s\n" % (fileinfo[1], Quote(fileinfo[0]))
169169
170 vol_num_list = self.volume_info_dict.keys()170 vol_num_list = self.volume_info_dict.keys()
171 vol_num_list.sort()171 vol_num_list.sort()
172172
173 def vol_num_to_string(vol_num):173 def vol_num_to_string(vol_num):
174 return self.volume_info_dict[vol_num].to_string()174 return self.volume_info_dict[vol_num].to_string()
175 result = "%s%s\n" % (result,175 result = b"%s%s\n" % (result,
176 "\n".join(map(vol_num_to_string, vol_num_list)))176 b"\n".join(map(vol_num_to_string, vol_num_list)))
177 return result177 return result
178178
179 __str__ = to_string179 __str__ = to_string
180180
181 def from_string(self, s):181 def from_string(self, s):
182 """182 u"""
183 Initialize self from string s, return self183 Initialize self from string s, return self
184 """184 """
185 def get_field(fieldname):185 def get_field(fieldname):
186 """186 u"""
187 Return the value of a field by parsing s, or None if no field187 Return the value of a field by parsing s, or None if no field
188 """188 """
189 m = re.search("(^|\\n)%s\\s(.*?)\n" % fieldname, s, re.I)189 m = re.search(u"(^|\\n)%s\\s(.*?)\n" % fieldname, s, re.I)
190 if not m:190 if not m:
191 return None191 return None
192 else:192 else:
193 return Unquote(m.group(2))193 return Unquote(m.group(2))
194 self.hostname = get_field("hostname")194 self.hostname = get_field(u"hostname")
195 self.local_dirname = get_field("localdir")195 self.local_dirname = get_field(u"localdir")
196196
197 highest_vol = 0197 highest_vol = 0
198 latest_vol = 0198 latest_vol = 0
199 vi_regexp = re.compile("(?:^|\\n)(volume\\s.*(?:\\n.*)*?)(?=\\nvolume\\s|$)", re.I)199 vi_regexp = re.compile(u"(?:^|\\n)(volume\\s.*(?:\\n.*)*?)(?=\\nvolume\\s|$)", re.I)
200 vi_iterator = vi_regexp.finditer(s)200 vi_iterator = vi_regexp.finditer(s)
201 for match in vi_iterator:201 for match in vi_iterator:
202 vi = VolumeInfo().from_string(match.group(1))202 vi = VolumeInfo().from_string(match.group(1))
203 self.add_volume_info(vi)203 self.add_volume_info(vi)
204 latest_vol = vi.volume_number204 latest_vol = vi.volume_number
205 highest_vol = max(highest_vol, latest_vol)205 highest_vol = max(highest_vol, latest_vol)
206 log.Debug(_("Found manifest volume %s") % latest_vol)206 log.Debug(_(u"Found manifest volume %s") % latest_vol)
207 # If we restarted after losing some remote volumes, the highest volume207 # If we restarted after losing some remote volumes, the highest volume
208 # seen may be higher than the last volume recorded. That is, the208 # seen may be higher than the last volume recorded. That is, the
209 # manifest could contain "vol1, vol2, vol3, vol2." If so, we don't209 # manifest could contain "vol1, vol2, vol3, vol2." If so, we don't
210 # want to keep vol3's info.210 # want to keep vol3's info.
211 for i in range(latest_vol + 1, highest_vol + 1):211 for i in range(latest_vol + 1, highest_vol + 1):
212 self.del_volume_info(i)212 self.del_volume_info(i)
213 log.Info(_("Found %s volumes in manifest") % latest_vol)213 log.Info(_(u"Found %s volumes in manifest") % latest_vol)
214214
215 # Get file changed list - not needed if --file-changed not present215 # Get file changed list - not needed if --file-changed not present
216 filecount = 0216 filecount = 0
217 if globals.file_changed is not None:217 if globals.file_changed is not None:
218 filelist_regexp = re.compile("(^|\\n)filelist\\s([0-9]+)\\n(.*?)(\\nvolume\\s|$)", re.I | re.S)218 filelist_regexp = re.compile(u"(^|\\n)filelist\\s([0-9]+)\\n(.*?)(\\nvolume\\s|$)", re.I | re.S)
219 match = filelist_regexp.search(s)219 match = filelist_regexp.search(s)
220 if match:220 if match:
221 filecount = int(match.group(2))221 filecount = int(match.group(2))
222 if filecount > 0:222 if filecount > 0:
223 def parse_fileinfo(line):223 def parse_fileinfo(line):
224 fileinfo = line.strip().split()224 fileinfo = line.strip().split()
225 return (fileinfo[0], ''.join(fileinfo[1:]))225 return (fileinfo[0], u''.join(fileinfo[1:]))
226226
227 self.files_changed = list(map(parse_fileinfo, match.group(3).split('\n')))227 self.files_changed = list(map(parse_fileinfo, match.group(3).split(u'\n')))
228228
229 if filecount != len(self.files_changed):229 if filecount != len(self.files_changed):
230 log.Error(_("Manifest file '%s' is corrupt: File count says %d, File list contains %d" %230 log.Error(_(u"Manifest file '%s' is corrupt: File count says %d, File list contains %d" %
231 (self.fh.base if self.fh else "", filecount, len(self.files_changed))))231 (self.fh.base if self.fh else u"", filecount, len(self.files_changed))))
232 self.corrupt_filelist = True232 self.corrupt_filelist = True
233233
234 return self234 return self
@@ -237,7 +237,7 @@
237 return self.files_changed237 return self.files_changed
238238
239 def __eq__(self, other):239 def __eq__(self, other):
240 """240 u"""
241 Two manifests are equal if they contain the same volume infos241 Two manifests are equal if they contain the same volume infos
242 """242 """
243 vi_list1 = self.volume_info_dict.keys()243 vi_list1 = self.volume_info_dict.keys()
@@ -246,39 +246,39 @@
246 vi_list2.sort()246 vi_list2.sort()
247247
248 if vi_list1 != vi_list2:248 if vi_list1 != vi_list2:
249 log.Notice(_("Manifests not equal because different volume numbers"))249 log.Notice(_(u"Manifests not equal because different volume numbers"))
250 return False250 return False
251251
252 for i in range(len(vi_list1)):252 for i in range(len(vi_list1)):
253 if not vi_list1[i] == vi_list2[i]:253 if not vi_list1[i] == vi_list2[i]:
254 log.Notice(_("Manifests not equal because volume lists differ"))254 log.Notice(_(u"Manifests not equal because volume lists differ"))
255 return False255 return False
256256
257 if (self.hostname != other.hostname or257 if (self.hostname != other.hostname or
258 self.local_dirname != other.local_dirname):258 self.local_dirname != other.local_dirname):
259 log.Notice(_("Manifests not equal because hosts or directories differ"))259 log.Notice(_(u"Manifests not equal because hosts or directories differ"))
260 return False260 return False
261261
262 return True262 return True
263263
264 def __ne__(self, other):264 def __ne__(self, other):
265 """265 u"""
266 Defines !=. Not doing this always leads to annoying bugs...266 Defines !=. Not doing this always leads to annoying bugs...
267 """267 """
268 return not self.__eq__(other)268 return not self.__eq__(other)
269269
270 def write_to_path(self, path):270 def write_to_path(self, path):
271 """271 u"""
272 Write string version of manifest to given path272 Write string version of manifest to given path
273 """273 """
274 assert not path.exists()274 assert not path.exists()
275 fout = path.open("wb")275 fout = path.open(u"wb")
276 fout.write(self.to_string())276 fout.write(self.to_string())
277 assert not fout.close()277 assert not fout.close()
278 path.setdata()278 path.setdata()
279279
280 def get_containing_volumes(self, index_prefix):280 def get_containing_volumes(self, index_prefix):
281 """281 u"""
282 Return list of volume numbers that may contain index_prefix282 Return list of volume numbers that may contain index_prefix
283 """283 """
284 return filter(lambda vol_num:284 return filter(lambda vol_num:
@@ -287,18 +287,18 @@
287287
288288
289class VolumeInfoError(Exception):289class VolumeInfoError(Exception):
290 """290 u"""
291 Raised when there is a problem initializing a VolumeInfo from string291 Raised when there is a problem initializing a VolumeInfo from string
292 """292 """
293 pass293 pass
294294
295295
296class VolumeInfo:296class VolumeInfo:
297 """297 u"""
298 Information about a single volume298 Information about a single volume
299 """299 """
300 def __init__(self):300 def __init__(self):
301 """VolumeInfo initializer"""301 u"""VolumeInfo initializer"""
302 self.volume_number = None302 self.volume_number = None
303 self.start_index = None303 self.start_index = None
304 self.start_block = None304 self.start_block = None
@@ -309,7 +309,7 @@
309 def set_info(self, vol_number,309 def set_info(self, vol_number,
310 start_index, start_block,310 start_index, start_block,
311 end_index, end_block):311 end_index, end_block):
312 """312 u"""
313 Set essential VolumeInfo information, return self313 Set essential VolumeInfo information, return self
314314
315 Call with starting and ending paths stored in the volume. If315 Call with starting and ending paths stored in the volume. If
@@ -325,13 +325,13 @@
325 return self325 return self
326326
327 def set_hash(self, hash_name, data):327 def set_hash(self, hash_name, data):
328 """328 u"""
329 Set the value of hash hash_name (e.g. "MD5") to data329 Set the value of hash hash_name (e.g. "MD5") to data
330 """330 """
331 self.hashes[hash_name] = data331 self.hashes[hash_name] = data
332332
333 def get_best_hash(self):333 def get_best_hash(self):
334 """334 u"""
335 Return pair (hash_type, hash_data)335 Return pair (hash_type, hash_data)
336336
337 SHA1 is the best hash, and MD5 is the second best hash. None337 SHA1 is the best hash, and MD5 is the second best hash. None
@@ -340,59 +340,59 @@
340 if not self.hashes:340 if not self.hashes:
341 return None341 return None
342 try:342 try:
343 return ("SHA1", self.hashes['SHA1'])343 return (u"SHA1", self.hashes[u'SHA1'])
344 except KeyError:344 except KeyError:
345 pass345 pass
346 try:346 try:
347 return ("MD5", self.hashes['MD5'])347 return (u"MD5", self.hashes[u'MD5'])
348 except KeyError:348 except KeyError:
349 pass349 pass
350 return self.hashes.items()[0]350 return self.hashes.items()[0]
351351
352 def to_string(self):352 def to_string(self):
353 """353 u"""
354 Return nicely formatted string reporting all information354 Return nicely formatted string reporting all information
355 """355 """
356 def index_to_string(index):356 def index_to_string(index):
357 """Return printable version of index without any whitespace"""357 u"""Return printable version of index without any whitespace"""
358 if index:358 if index:
359 s = "/".join(index)359 s = b"/".join(index)
360 return Quote(s)360 return Quote(s)
361 else:361 else:
362 return "."362 return b"."
363363
364 slist = ["Volume %d:" % self.volume_number]364 slist = [b"Volume %d:" % self.volume_number]
365 whitespace = " "365 whitespace = b" "
366 slist.append("%sStartingPath %s %s" %366 slist.append(b"%sStartingPath %s %s" %
367 (whitespace, index_to_string(self.start_index), (self.start_block or " ")))367 (whitespace, index_to_string(self.start_index), (self.start_block or b" ")))
368 slist.append("%sEndingPath %s %s" %368 slist.append(b"%sEndingPath %s %s" %
369 (whitespace, index_to_string(self.end_index), (self.end_block or " ")))369 (whitespace, index_to_string(self.end_index), (self.end_block or b" ")))
370 for key in self.hashes:370 for key in self.hashes:
371 slist.append("%sHash %s %s" %371 slist.append(b"%sHash %s %s" %
372 (whitespace, key.encode(), self.hashes[key]))372 (whitespace, key.encode(), self.hashes[key]))
373 return "\n".join(slist)373 return b"\n".join(slist)
374374
375 __str__ = to_string375 __str__ = to_string
376376
377 def from_string(self, s):377 def from_string(self, s):
378 """378 u"""
379 Initialize self from string s as created by to_string379 Initialize self from string s as created by to_string
380 """380 """
381 def string_to_index(s):381 def string_to_index(s):
382 """382 u"""
383 Return tuple index from string383 Return tuple index from string
384 """384 """
385 s = Unquote(s)385 s = Unquote(s)
386 if s == ".":386 if s == b".":
387 return ()387 return ()
388 return tuple(s.split("/"))388 return tuple(s.split(b"/"))
389389
390 linelist = s.strip().split("\n")390 linelist = s.strip().split(b"\n")
391391
392 # Set volume number392 # Set volume number
393 m = re.search("^Volume ([0-9]+):", linelist[0], re.I)393 m = re.search(u"^Volume ([0-9]+):", linelist[0], re.I)
394 if not m:394 if not m:
395 raise VolumeInfoError("Bad first line '%s'" % (linelist[0],))395 raise VolumeInfoError(u"Bad first line '%s'" % (linelist[0],))
396 self.volume_number = int(m.group(1))396 self.volume_number = int(m.group(1))
397397
398 # Set other fields398 # Set other fields
@@ -402,61 +402,61 @@
402 line_split = line.strip().split()402 line_split = line.strip().split()
403 field_name = line_split[0].lower()403 field_name = line_split[0].lower()
404 other_fields = line_split[1:]404 other_fields = line_split[1:]
405 if field_name == "Volume":405 if field_name == u"Volume":
406 log.Warn(_("Warning, found extra Volume identifier"))406 log.Warn(_(u"Warning, found extra Volume identifier"))
407 break407 break
408 elif field_name == "startingpath":408 elif field_name == u"startingpath":
409 self.start_index = string_to_index(other_fields[0])409 self.start_index = string_to_index(other_fields[0])
410 if len(other_fields) > 1:410 if len(other_fields) > 1:
411 self.start_block = int(other_fields[1])411 self.start_block = int(other_fields[1])
412 else:412 else:
413 self.start_block = None413 self.start_block = None
414 elif field_name == "endingpath":414 elif field_name == u"endingpath":
415 self.end_index = string_to_index(other_fields[0])415 self.end_index = string_to_index(other_fields[0])
416 if len(other_fields) > 1:416 if len(other_fields) > 1:
417 self.end_block = int(other_fields[1])417 self.end_block = int(other_fields[1])
418 else:418 else:
419 self.end_block = None419 self.end_block = None
420 elif field_name == "hash":420 elif field_name == u"hash":
421 self.set_hash(other_fields[0], other_fields[1])421 self.set_hash(other_fields[0], other_fields[1])
422422
423 if self.start_index is None or self.end_index is None:423 if self.start_index is None or self.end_index is None:
424 raise VolumeInfoError("Start or end index not set")424 raise VolumeInfoError(u"Start or end index not set")
425 return self425 return self
426426
427 def __eq__(self, other):427 def __eq__(self, other):
428 """428 u"""
429 Used in test suite429 Used in test suite
430 """430 """
431 if not isinstance(other, VolumeInfo):431 if not isinstance(other, VolumeInfo):
432 log.Notice(_("Other is not VolumeInfo"))432 log.Notice(_(u"Other is not VolumeInfo"))
433 return None433 return None
434 if self.volume_number != other.volume_number:434 if self.volume_number != other.volume_number:
435 log.Notice(_("Volume numbers don't match"))435 log.Notice(_(u"Volume numbers don't match"))
436 return None436 return None
437 if self.start_index != other.start_index:437 if self.start_index != other.start_index:
438 log.Notice(_("start_indicies don't match"))438 log.Notice(_(u"start_indicies don't match"))
439 return None439 return None
440 if self.end_index != other.end_index:440 if self.end_index != other.end_index:
441 log.Notice(_("end_index don't match"))441 log.Notice(_(u"end_index don't match"))
442 return None442 return None
443 hash_list1 = self.hashes.items()443 hash_list1 = self.hashes.items()
444 hash_list1.sort()444 hash_list1.sort()
445 hash_list2 = other.hashes.items()445 hash_list2 = other.hashes.items()
446 hash_list2.sort()446 hash_list2.sort()
447 if hash_list1 != hash_list2:447 if hash_list1 != hash_list2:
448 log.Notice(_("Hashes don't match"))448 log.Notice(_(u"Hashes don't match"))
449 return None449 return None
450 return 1450 return 1
451451
452 def __ne__(self, other):452 def __ne__(self, other):
453 """453 u"""
454 Defines !=454 Defines !=
455 """455 """
456 return not self.__eq__(other)456 return not self.__eq__(other)
457457
458 def contains(self, index_prefix, recursive=1):458 def contains(self, index_prefix, recursive=1):
459 """459 u"""
460 Return true if volume might contain index460 Return true if volume might contain index
461461
462 If recursive is true, then return true if any index starting462 If recursive is true, then return true if any index starting
@@ -471,11 +471,11 @@
471 return self.start_index <= index_prefix <= self.end_index471 return self.start_index <= index_prefix <= self.end_index
472472
473473
474nonnormal_char_re = re.compile("(\\s|[\\\\\"'])")474nonnormal_char_re = re.compile(u"(\\s|[\\\\\"'])")
475475
476476
477def Quote(s):477def Quote(s):
478 """478 u"""
479 Return quoted version of s safe to put in a manifest or volume info479 Return quoted version of s safe to put in a manifest or volume info
480 """480 """
481 if not nonnormal_char_re.search(s):481 if not nonnormal_char_re.search(s):
@@ -483,29 +483,29 @@
483 slist = []483 slist = []
484 for char in s:484 for char in s:
485 if nonnormal_char_re.search(char):485 if nonnormal_char_re.search(char):
486 slist.append("\\x%02x" % ord(char))486 slist.append(b"\\x%02x" % ord(char))
487 else:487 else:
488 slist.append(char)488 slist.append(char)
489 return '"%s"' % "".join(slist)489 return b'"%s"' % u"".join(slist)
490490
491491
492def Unquote(quoted_string):492def Unquote(quoted_string):
493 """493 u"""
494 Return original string from quoted_string produced by above494 Return original string from quoted_string produced by above
495 """495 """
496 if not quoted_string[0] == '"' or quoted_string[0] == "'":496 if not quoted_string[0] == b'"' or quoted_string[0] == b"'":
497 return quoted_string497 return quoted_string
498 assert quoted_string[0] == quoted_string[-1]498 assert quoted_string[0] == quoted_string[-1]
499 return_list = []499 return_list = []
500 i = 1 # skip initial char500 i = 1 # skip initial char
501 while i < len(quoted_string) - 1:501 while i < len(quoted_string) - 1:
502 char = quoted_string[i]502 char = quoted_string[i]
503 if char == "\\":503 if char == b"\\":
504 # quoted section504 # quoted section
505 assert quoted_string[i + 1] == "x"505 assert quoted_string[i + 1] == b"x"
506 return_list.append(chr(int(quoted_string[i + 2:i + 4], 16)))506 return_list.append(chr(int(quoted_string[i + 2:i + 4], 16)))
507 i += 4507 i += 4
508 else:508 else:
509 return_list.append(char)509 return_list.append(char)
510 i += 1510 i += 1
511 return "".join(return_list)511 return b"".join(return_list)
512512
=== modified file 'duplicity/patchdir.py'
--- duplicity/patchdir.py 2018-07-24 11:52:33 +0000
+++ duplicity/patchdir.py 2018-09-24 21:19:45 +0000
@@ -37,7 +37,7 @@
37from duplicity.path import * # @UnusedWildImport37from duplicity.path import * # @UnusedWildImport
38from duplicity.lazy import * # @UnusedWildImport38from duplicity.lazy import * # @UnusedWildImport
3939
40"""Functions for patching of directories"""40u"""Functions for patching of directories"""
4141
4242
43class PatchDirException(Exception):43class PatchDirException(Exception):
@@ -45,20 +45,20 @@
4545
4646
47def Patch(base_path, difftar_fileobj):47def Patch(base_path, difftar_fileobj):
48 """Patch given base_path and file object containing delta"""48 u"""Patch given base_path and file object containing delta"""
49 diff_tarfile = tarfile.TarFile("arbitrary", "r", difftar_fileobj)49 diff_tarfile = tarfile.TarFile(u"arbitrary", u"r", difftar_fileobj)
50 patch_diff_tarfile(base_path, diff_tarfile)50 patch_diff_tarfile(base_path, diff_tarfile)
51 assert not difftar_fileobj.close()51 assert not difftar_fileobj.close()
5252
5353
54def Patch_from_iter(base_path, fileobj_iter, restrict_index=()):54def Patch_from_iter(base_path, fileobj_iter, restrict_index=()):
55 """Patch given base_path and iterator of delta file objects"""55 u"""Patch given base_path and iterator of delta file objects"""
56 diff_tarfile = TarFile_FromFileobjs(fileobj_iter)56 diff_tarfile = TarFile_FromFileobjs(fileobj_iter)
57 patch_diff_tarfile(base_path, diff_tarfile, restrict_index)57 patch_diff_tarfile(base_path, diff_tarfile, restrict_index)
5858
5959
60def patch_diff_tarfile(base_path, diff_tarfile, restrict_index=()):60def patch_diff_tarfile(base_path, diff_tarfile, restrict_index=()):
61 """Patch given Path object using delta tarfile (as in tarfile.TarFile)61 u"""Patch given Path object using delta tarfile (as in tarfile.TarFile)
6262
63 If restrict_index is set, ignore any deltas in diff_tarfile that63 If restrict_index is set, ignore any deltas in diff_tarfile that
64 don't start with restrict_index.64 don't start with restrict_index.
@@ -77,12 +77,12 @@
77 ITR = IterTreeReducer(PathPatcher, [base_path])77 ITR = IterTreeReducer(PathPatcher, [base_path])
78 for basis_path, diff_ropath in collated:78 for basis_path, diff_ropath in collated:
79 if basis_path:79 if basis_path:
80 log.Info(_("Patching %s") % (util.fsdecode(basis_path.get_relative_path())),80 log.Info(_(u"Patching %s") % (util.fsdecode(basis_path.get_relative_path())),
81 log.InfoCode.patch_file_patching,81 log.InfoCode.patch_file_patching,
82 util.escape(basis_path.get_relative_path()))82 util.escape(basis_path.get_relative_path()))
83 ITR(basis_path.index, basis_path, diff_ropath)83 ITR(basis_path.index, basis_path, diff_ropath)
84 else:84 else:
85 log.Info(_("Patching %s") % (util.fsdecode(diff_ropath.get_relative_path())),85 log.Info(_(u"Patching %s") % (util.fsdecode(diff_ropath.get_relative_path())),
86 log.InfoCode.patch_file_patching,86 log.InfoCode.patch_file_patching,
87 util.escape(diff_ropath.get_relative_path()))87 util.escape(diff_ropath.get_relative_path()))
88 ITR(diff_ropath.index, basis_path, diff_ropath)88 ITR(diff_ropath.index, basis_path, diff_ropath)
@@ -96,7 +96,7 @@
9696
9797
98def filter_path_iter(path_iter, index):98def filter_path_iter(path_iter, index):
99 """Rewrite path elements of path_iter so they start with index99 u"""Rewrite path elements of path_iter so they start with index
100100
101 Discard any that doesn't start with index, and remove the index101 Discard any that doesn't start with index, and remove the index
102 prefix from the rest.102 prefix from the rest.
@@ -111,7 +111,7 @@
111111
112112
113def difftar2path_iter(diff_tarfile):113def difftar2path_iter(diff_tarfile):
114 """Turn file-like difftarobj into iterator of ROPaths"""114 u"""Turn file-like difftarobj into iterator of ROPaths"""
115 tar_iter = iter(diff_tarfile)115 tar_iter = iter(diff_tarfile)
116 multivol_fileobj = None116 multivol_fileobj = None
117117
@@ -132,7 +132,7 @@
132 ropath = ROPath(index)132 ropath = ROPath(index)
133 ropath.init_from_tarinfo(tarinfo_list[0])133 ropath.init_from_tarinfo(tarinfo_list[0])
134 ropath.difftype = difftype134 ropath.difftype = difftype
135 if difftype == "deleted":135 if difftype == u"deleted":
136 ropath.type = None136 ropath.type = None
137 elif ropath.isreg():137 elif ropath.isreg():
138 if multivol:138 if multivol:
@@ -148,61 +148,61 @@
148148
149149
150def get_index_from_tarinfo(tarinfo):150def get_index_from_tarinfo(tarinfo):
151 """Return (index, difftype, multivol) pair from tarinfo object"""151 u"""Return (index, difftype, multivol) pair from tarinfo object"""
152 for prefix in ["snapshot/", "diff/", "deleted/",152 for prefix in [b"snapshot/", b"diff/", b"deleted/",
153 "multivol_diff/", "multivol_snapshot/"]:153 b"multivol_diff/", b"multivol_snapshot/"]:
154 tiname = util.get_tarinfo_name(tarinfo)154 tiname = util.get_tarinfo_name(tarinfo)
155 if tiname.startswith(prefix):155 if tiname.startswith(prefix):
156 name = tiname[len(prefix):] # strip prefix156 name = tiname[len(prefix):] # strip prefix
157 if prefix.startswith("multivol"):157 if prefix.startswith(u"multivol"):
158 if prefix == "multivol_diff/":158 if prefix == u"multivol_diff/":
159 difftype = "diff"159 difftype = u"diff"
160 else:160 else:
161 difftype = "snapshot"161 difftype = u"snapshot"
162 multivol = 1162 multivol = 1
163 name, num_subs = \163 name, num_subs = \
164 re.subn("(?s)^multivol_(diff|snapshot)/?(.*)/[0-9]+$",164 re.subn(b"(?s)^multivol_(diff|snapshot)/?(.*)/[0-9]+$",
165 "\\2", tiname)165 b"\\2", tiname)
166 if num_subs != 1:166 if num_subs != 1:
167 raise PatchDirException(u"Unrecognized diff entry %s" %167 raise PatchDirException(u"Unrecognized diff entry %s" %
168 util.fsdecode(tiname))168 util.fsdecode(tiname))
169 else:169 else:
170 difftype = prefix[:-1] # strip trailing /170 difftype = prefix[:-1] # strip trailing /
171 name = tiname[len(prefix):]171 name = tiname[len(prefix):]
172 if name.endswith("/"):172 if name.endswith(b"/"):
173 name = name[:-1] # strip trailing /'s173 name = name[:-1] # strip trailing /'s
174 multivol = 0174 multivol = 0
175 break175 break
176 else:176 else:
177 raise PatchDirException(u"Unrecognized diff entry %s" %177 raise PatchDirException(u"Unrecognized diff entry %s" %
178 util.fsdecode(tiname))178 util.fsdecode(tiname))
179 if name == "." or name == "":179 if name == b"." or name == b"":
180 index = ()180 index = ()
181 else:181 else:
182 index = tuple(name.split("/"))182 index = tuple(name.split(b"/"))
183 if '..' in index:183 if b'..' in index:
184 raise PatchDirException(u"Tar entry %s contains '..'. Security "184 raise PatchDirException(u"Tar entry %s contains '..'. Security "
185 "violation" % util.fsdecode(tiname))185 u"violation" % util.fsdecode(tiname))
186 return (index, difftype, multivol)186 return (index, difftype, multivol)
187187
188188
189class Multivol_Filelike:189class Multivol_Filelike:
190 """Emulate a file like object from multivols190 u"""Emulate a file like object from multivols
191191
192 Maintains a buffer about the size of a volume. When it is read()192 Maintains a buffer about the size of a volume. When it is read()
193 to the end, pull in more volumes as desired.193 to the end, pull in more volumes as desired.
194194
195 """195 """
196 def __init__(self, tf, tar_iter, tarinfo_list, index):196 def __init__(self, tf, tar_iter, tarinfo_list, index):
197 """Initializer. tf is TarFile obj, tarinfo is first tarinfo"""197 u"""Initializer. tf is TarFile obj, tarinfo is first tarinfo"""
198 self.tf, self.tar_iter = tf, tar_iter198 self.tf, self.tar_iter = tf, tar_iter
199 self.tarinfo_list = tarinfo_list # must store as list for write access199 self.tarinfo_list = tarinfo_list # must store as list for write access
200 self.index = index200 self.index = index
201 self.buffer = ""201 self.buffer = b""
202 self.at_end = 0202 self.at_end = 0
203203
204 def read(self, length=-1):204 def read(self, length=-1):
205 """Read length bytes from file"""205 u"""Read length bytes from file"""
206 if length < 0:206 if length < 0:
207 while self.addtobuffer():207 while self.addtobuffer():
208 pass208 pass
@@ -218,7 +218,7 @@
218 return result218 return result
219219
220 def addtobuffer(self):220 def addtobuffer(self):
221 """Add next chunk to buffer"""221 u"""Add next chunk to buffer"""
222 if self.at_end:222 if self.at_end:
223 return None223 return None
224 index, difftype, multivol = get_index_from_tarinfo( # @UnusedVariable224 index, difftype, multivol = get_index_from_tarinfo( # @UnusedVariable
@@ -242,24 +242,24 @@
242 return 1242 return 1
243243
244 def close(self):244 def close(self):
245 """If not at end, read remaining data"""245 u"""If not at end, read remaining data"""
246 if not self.at_end:246 if not self.at_end:
247 while 1:247 while 1:
248 self.buffer = ""248 self.buffer = b""
249 if not self.addtobuffer():249 if not self.addtobuffer():
250 break250 break
251 self.at_end = 1251 self.at_end = 1
252252
253253
254class PathPatcher(ITRBranch):254class PathPatcher(ITRBranch):
255 """Used by DirPatch, process the given basis and diff"""255 u"""Used by DirPatch, process the given basis and diff"""
256 def __init__(self, base_path):256 def __init__(self, base_path):
257 """Set base_path, Path of root of tree"""257 u"""Set base_path, Path of root of tree"""
258 self.base_path = base_path258 self.base_path = base_path
259 self.dir_diff_ropath = None259 self.dir_diff_ropath = None
260260
261 def start_process(self, index, basis_path, diff_ropath):261 def start_process(self, index, basis_path, diff_ropath):
262 """Start processing when diff_ropath is a directory"""262 u"""Start processing when diff_ropath is a directory"""
263 if not (diff_ropath and diff_ropath.isdir()):263 if not (diff_ropath and diff_ropath.isdir()):
264 assert index == (), util.uindex(index) # should only happen for first elem264 assert index == (), util.uindex(index) # should only happen for first elem
265 self.fast_process(index, basis_path, diff_ropath)265 self.fast_process(index, basis_path, diff_ropath)
@@ -276,44 +276,44 @@
276 self.dir_diff_ropath = diff_ropath276 self.dir_diff_ropath = diff_ropath
277277
278 def end_process(self):278 def end_process(self):
279 """Copy directory permissions when leaving tree"""279 u"""Copy directory permissions when leaving tree"""
280 if self.dir_diff_ropath:280 if self.dir_diff_ropath:
281 self.dir_diff_ropath.copy_attribs(self.dir_basis_path)281 self.dir_diff_ropath.copy_attribs(self.dir_basis_path)
282282
283 def can_fast_process(self, index, basis_path, diff_ropath):283 def can_fast_process(self, index, basis_path, diff_ropath):
284 """No need to recurse if diff_ropath isn't a directory"""284 u"""No need to recurse if diff_ropath isn't a directory"""
285 return not (diff_ropath and diff_ropath.isdir())285 return not (diff_ropath and diff_ropath.isdir())
286286
287 def fast_process(self, index, basis_path, diff_ropath):287 def fast_process(self, index, basis_path, diff_ropath):
288 """For use when neither is a directory"""288 u"""For use when neither is a directory"""
289 if not diff_ropath:289 if not diff_ropath:
290 return # no change290 return # no change
291 elif not basis_path:291 elif not basis_path:
292 if diff_ropath.difftype == "deleted":292 if diff_ropath.difftype == u"deleted":
293 pass # already deleted293 pass # already deleted
294 else:294 else:
295 # just copy snapshot over295 # just copy snapshot over
296 diff_ropath.copy(self.base_path.new_index(index))296 diff_ropath.copy(self.base_path.new_index(index))
297 elif diff_ropath.difftype == "deleted":297 elif diff_ropath.difftype == u"deleted":
298 if basis_path.isdir():298 if basis_path.isdir():
299 basis_path.deltree()299 basis_path.deltree()
300 else:300 else:
301 basis_path.delete()301 basis_path.delete()
302 elif not basis_path.isreg() or (basis_path.isreg() and diff_ropath.difftype == "snapshot"):302 elif not basis_path.isreg() or (basis_path.isreg() and diff_ropath.difftype == u"snapshot"):
303 if basis_path.isdir():303 if basis_path.isdir():
304 basis_path.deltree()304 basis_path.deltree()
305 else:305 else:
306 basis_path.delete()306 basis_path.delete()
307 diff_ropath.copy(basis_path)307 diff_ropath.copy(basis_path)
308 else:308 else:
309 assert diff_ropath.difftype == "diff", diff_ropath.difftype309 assert diff_ropath.difftype == u"diff", diff_ropath.difftype
310 basis_path.patch_with_attribs(diff_ropath)310 basis_path.patch_with_attribs(diff_ropath)
311311
312312
313class TarFile_FromFileobjs:313class TarFile_FromFileobjs:
314 """Like a tarfile.TarFile iterator, but read from multiple fileobjs"""314 u"""Like a tarfile.TarFile iterator, but read from multiple fileobjs"""
315 def __init__(self, fileobj_iter):315 def __init__(self, fileobj_iter):
316 """Make new tarinfo iterator316 u"""Make new tarinfo iterator
317317
318 fileobj_iter should be an iterator of file objects opened for318 fileobj_iter should be an iterator of file objects opened for
319 reading. They will be closed at end of reading.319 reading. They will be closed at end of reading.
@@ -327,11 +327,11 @@
327 return self327 return self
328328
329 def set_tarfile(self):329 def set_tarfile(self):
330 """Set tarfile from next file object, or raise StopIteration"""330 u"""Set tarfile from next file object, or raise StopIteration"""
331 if self.current_fp:331 if self.current_fp:
332 assert not self.current_fp.close()332 assert not self.current_fp.close()
333 self.current_fp = next(self.fileobj_iter)333 self.current_fp = next(self.fileobj_iter)
334 self.tarfile = util.make_tarfile("r", self.current_fp)334 self.tarfile = util.make_tarfile(u"r", self.current_fp)
335 self.tar_iter = iter(self.tarfile)335 self.tar_iter = iter(self.tarfile)
336336
337 def next(self):337 def next(self):
@@ -345,12 +345,12 @@
345 return next(self.tar_iter)345 return next(self.tar_iter)
346346
347 def extractfile(self, tarinfo):347 def extractfile(self, tarinfo):
348 """Return data associated with given tarinfo"""348 u"""Return data associated with given tarinfo"""
349 return self.tarfile.extractfile(tarinfo)349 return self.tarfile.extractfile(tarinfo)
350350
351351
352def collate_iters(iter_list):352def collate_iters(iter_list):
353 """Collate iterators by index353 u"""Collate iterators by index
354354
355 Input is a list of n iterators each of which must iterate elements355 Input is a list of n iterators each of which must iterate elements
356 with an index attribute. The elements must come out in increasing356 with an index attribute. The elements must come out in increasing
@@ -371,7 +371,7 @@
371 elems = overflow[:]371 elems = overflow[:]
372372
373 def setrorps(overflow, elems):373 def setrorps(overflow, elems):
374 """Set the overflow and rorps list"""374 u"""Set the overflow and rorps list"""
375 for i in range(iter_num):375 for i in range(iter_num):
376 if not overflow[i] and elems[i] is None:376 if not overflow[i] and elems[i] is None:
377 try:377 try:
@@ -381,7 +381,7 @@
381 elems[i] = None381 elems[i] = None
382382
383 def getleastindex(elems):383 def getleastindex(elems):
384 """Return the first index in elems, assuming elems isn't empty"""384 u"""Return the first index in elems, assuming elems isn't empty"""
385 return min(map(lambda elem: elem.index, filter(lambda x: x, elems)))385 return min(map(lambda elem: elem.index, filter(lambda x: x, elems)))
386386
387 def yield_tuples(iter_num, overflow, elems):387 def yield_tuples(iter_num, overflow, elems):
@@ -403,7 +403,7 @@
403403
404404
405class IndexedTuple:405class IndexedTuple:
406 """Like a tuple, but has .index (used previously by collate_iters)"""406 u"""Like a tuple, but has .index (used previously by collate_iters)"""
407 def __init__(self, index, sequence):407 def __init__(self, index, sequence):
408 self.index = index408 self.index = index
409 self.data = tuple(sequence)409 self.data = tuple(sequence)
@@ -412,7 +412,7 @@
412 return len(self.data)412 return len(self.data)
413413
414 def __getitem__(self, key):414 def __getitem__(self, key):
415 """This only works for numerical keys (easier this way)"""415 u"""This only works for numerical keys (easier this way)"""
416 return self.data[key]416 return self.data[key]
417417
418 def __lt__(self, other):418 def __lt__(self, other):
@@ -448,11 +448,11 @@
448 return None448 return None
449449
450 def __str__(self):450 def __str__(self):
451 return "(%s).%s" % (", ".join(map(str, self.data)), self.index)451 return u"(%s).%s" % (u", ".join(map(str, self.data)), self.index)
452452
453453
454def normalize_ps(patch_sequence):454def normalize_ps(patch_sequence):
455 """Given an sequence of ROPath deltas, remove blank and unnecessary455 u"""Given an sequence of ROPath deltas, remove blank and unnecessary
456456
457 The sequence is assumed to be in patch order (later patches apply457 The sequence is assumed to be in patch order (later patches apply
458 to earlier ones). A patch is unnecessary if a later one doesn't458 to earlier ones). A patch is unnecessary if a later one doesn't
@@ -467,29 +467,29 @@
467 if delta is not None:467 if delta is not None:
468 # skip blank entries468 # skip blank entries
469 result_list.insert(0, delta)469 result_list.insert(0, delta)
470 if delta.difftype != "diff":470 if delta.difftype != u"diff":
471 break471 break
472 i -= 1472 i -= 1
473 return result_list473 return result_list
474474
475475
476def patch_seq2ropath(patch_seq):476def patch_seq2ropath(patch_seq):
477 """Apply the patches in patch_seq, return single ropath"""477 u"""Apply the patches in patch_seq, return single ropath"""
478 first = patch_seq[0]478 first = patch_seq[0]
479 assert first.difftype != "diff", "First patch in sequence " \479 assert first.difftype != u"diff", u"First patch in sequence " \
480 "%s was a diff" % patch_seq480 u"%s was a diff" % patch_seq
481 if not first.isreg():481 if not first.isreg():
482 # No need to bother with data if not regular file482 # No need to bother with data if not regular file
483 assert len(patch_seq) == 1, "Patch sequence isn't regular, but " \483 assert len(patch_seq) == 1, u"Patch sequence isn't regular, but " \
484 "has %d entries" % len(patch_seq)484 u"has %d entries" % len(patch_seq)
485 return first.get_ropath()485 return first.get_ropath()
486486
487 current_file = first.open("rb")487 current_file = first.open(u"rb")
488488
489 for delta_ropath in patch_seq[1:]:489 for delta_ropath in patch_seq[1:]:
490 assert delta_ropath.difftype == "diff", delta_ropath.difftype490 assert delta_ropath.difftype == u"diff", delta_ropath.difftype
491 if not isinstance(current_file, file):491 if not isinstance(current_file, file):
492 """492 u"""
493 librsync insists on a real file object, which we create manually493 librsync insists on a real file object, which we create manually
494 by using the duplicity.tempdir to tell us where.494 by using the duplicity.tempdir to tell us where.
495495
@@ -503,14 +503,14 @@
503 tempfp.seek(0)503 tempfp.seek(0)
504 current_file = tempfp504 current_file = tempfp
505 current_file = librsync.PatchedFile(current_file,505 current_file = librsync.PatchedFile(current_file,
506 delta_ropath.open("rb"))506 delta_ropath.open(u"rb"))
507 result = patch_seq[-1].get_ropath()507 result = patch_seq[-1].get_ropath()
508 result.setfileobj(current_file)508 result.setfileobj(current_file)
509 return result509 return result
510510
511511
512def integrate_patch_iters(iter_list):512def integrate_patch_iters(iter_list):
513 """Combine a list of iterators of ropath patches513 u"""Combine a list of iterators of ropath patches
514514
515 The iter_list should be sorted in patch order, and the elements in515 The iter_list should be sorted in patch order, and the elements in
516 each iter_list need to be orderd by index. The output will be an516 each iter_list need to be orderd by index. The output will be an
@@ -527,14 +527,14 @@
527 yield final_ropath527 yield final_ropath
528 except Exception as e:528 except Exception as e:
529 filename = normalized[-1].get_ropath().get_relative_path()529 filename = normalized[-1].get_ropath().get_relative_path()
530 log.Warn(_("Error '%s' patching %s") %530 log.Warn(_(u"Error '%s' patching %s") %
531 (util.uexc(e), util.fsdecode(filename)),531 (util.uexc(e), util.fsdecode(filename)),
532 log.WarningCode.cannot_process,532 log.WarningCode.cannot_process,
533 util.escape(filename))533 util.escape(filename))
534534
535535
536def tarfiles2rop_iter(tarfile_list, restrict_index=()):536def tarfiles2rop_iter(tarfile_list, restrict_index=()):
537 """Integrate tarfiles of diffs into single ROPath iter537 u"""Integrate tarfiles of diffs into single ROPath iter
538538
539 Then filter out all the diffs in that index which don't start with539 Then filter out all the diffs in that index which don't start with
540 the restrict_index.540 the restrict_index.
@@ -548,7 +548,7 @@
548548
549549
550def Write_ROPaths(base_path, rop_iter):550def Write_ROPaths(base_path, rop_iter):
551 """Write out ropaths in rop_iter starting at base_path551 u"""Write out ropaths in rop_iter starting at base_path
552552
553 Returns 1 if something was actually written, 0 otherwise.553 Returns 1 if something was actually written, 0 otherwise.
554554
@@ -564,20 +564,20 @@
564564
565565
566class ROPath_IterWriter(ITRBranch):566class ROPath_IterWriter(ITRBranch):
567 """Used in Write_ROPaths above567 u"""Used in Write_ROPaths above
568568
569 We need to use an ITR because we have to update the569 We need to use an ITR because we have to update the
570 permissions/times of directories after we write the files in them.570 permissions/times of directories after we write the files in them.
571571
572 """572 """
573 def __init__(self, base_path):573 def __init__(self, base_path):
574 """Set base_path, Path of root of tree"""574 u"""Set base_path, Path of root of tree"""
575 self.base_path = base_path575 self.base_path = base_path
576 self.dir_diff_ropath = None576 self.dir_diff_ropath = None
577 self.dir_new_path = None577 self.dir_new_path = None
578578
579 def start_process(self, index, ropath):579 def start_process(self, index, ropath):
580 """Write ropath. Only handles the directory case"""580 u"""Write ropath. Only handles the directory case"""
581 if not ropath.isdir():581 if not ropath.isdir():
582 # Base may not be a directory, but rest should582 # Base may not be a directory, but rest should
583 assert ropath.index == (), ropath.index583 assert ropath.index == (), ropath.index
@@ -596,19 +596,19 @@
596 self.dir_diff_ropath = ropath596 self.dir_diff_ropath = ropath
597597
598 def end_process(self):598 def end_process(self):
599 """Update information of a directory when leaving it"""599 u"""Update information of a directory when leaving it"""
600 if self.dir_diff_ropath:600 if self.dir_diff_ropath:
601 self.dir_diff_ropath.copy_attribs(self.dir_new_path)601 self.dir_diff_ropath.copy_attribs(self.dir_new_path)
602602
603 def can_fast_process(self, index, ropath):603 def can_fast_process(self, index, ropath):
604 """Can fast process (no recursion) if ropath isn't a directory"""604 u"""Can fast process (no recursion) if ropath isn't a directory"""
605 log.Info(_("Writing %s of type %s") %605 log.Info(_(u"Writing %s of type %s") %
606 (util.fsdecode(ropath.get_relative_path()), ropath.type),606 (util.fsdecode(ropath.get_relative_path()), ropath.type),
607 log.InfoCode.patch_file_writing,607 log.InfoCode.patch_file_writing,
608 "%s %s" % (util.escape(ropath.get_relative_path()), ropath.type))608 u"%s %s" % (util.escape(ropath.get_relative_path()), ropath.type))
609 return not ropath.isdir()609 return not ropath.isdir()
610610
611 def fast_process(self, index, ropath):611 def fast_process(self, index, ropath):
612 """Write non-directory ropath to destination"""612 u"""Write non-directory ropath to destination"""
613 if ropath.exists():613 if ropath.exists():
614 ropath.copy(self.base_path.new_index(index))614 ropath.copy(self.base_path.new_index(index))
615615
=== modified file 'duplicity/path.py'
--- duplicity/path.py 2018-07-24 11:52:33 +0000
+++ duplicity/path.py 2018-09-24 21:19:45 +0000
@@ -19,7 +19,7 @@
19# along with duplicity; if not, write to the Free Software Foundation,19# along with duplicity; if not, write to the Free Software Foundation,
20# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA20# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
2121
22"""Wrapper class around a file like "/usr/bin/env"22u"""Wrapper class around a file like "/usr/bin/env"
2323
24This class makes certain file operations more convenient and24This class makes certain file operations more convenient and
25associates stat information with filenames25associates stat information with filenames
@@ -53,7 +53,7 @@
5353
5454
55class StatResult:55class StatResult:
56 """Used to emulate the output of os.stat() and related"""56 u"""Used to emulate the output of os.stat() and related"""
57 # st_mode is required by the TarInfo class, but it's unclear how57 # st_mode is required by the TarInfo class, but it's unclear how
58 # to generate it from file permissions.58 # to generate it from file permissions.
59 st_mode = 059 st_mode = 0
@@ -64,158 +64,158 @@
6464
6565
66class ROPath:66class ROPath:
67 """Read only Path67 u"""Read only Path
6868
69 Objects of this class doesn't represent real files, so they don't69 Objects of this class doesn't represent real files, so they don't
70 have a name. They are required to be indexed though.70 have a name. They are required to be indexed though.
7171
72 """72 """
73 def __init__(self, index, stat=None):73 def __init__(self, index, stat=None):
74 """ROPath initializer"""74 u"""ROPath initializer"""
75 self.opened, self.fileobj = None, None75 self.opened, self.fileobj = None, None
76 self.index = index76 self.index = index
77 self.stat, self.type = None, None77 self.stat, self.type = None, None
78 self.mode, self.devnums = None, None78 self.mode, self.devnums = None, None
7979
80 def set_from_stat(self):80 def set_from_stat(self):
81 """Set the value of self.type, self.mode from self.stat"""81 u"""Set the value of self.type, self.mode from self.stat"""
82 if not self.stat:82 if not self.stat:
83 self.type = None83 self.type = None
8484
85 st_mode = self.stat.st_mode85 st_mode = self.stat.st_mode
86 if stat.S_ISREG(st_mode):86 if stat.S_ISREG(st_mode):
87 self.type = "reg"87 self.type = u"reg"
88 elif stat.S_ISDIR(st_mode):88 elif stat.S_ISDIR(st_mode):
89 self.type = "dir"89 self.type = u"dir"
90 elif stat.S_ISLNK(st_mode):90 elif stat.S_ISLNK(st_mode):
91 self.type = "sym"91 self.type = u"sym"
92 elif stat.S_ISFIFO(st_mode):92 elif stat.S_ISFIFO(st_mode):
93 self.type = "fifo"93 self.type = u"fifo"
94 elif stat.S_ISSOCK(st_mode):94 elif stat.S_ISSOCK(st_mode):
95 raise PathException(util.fsdecode(self.get_relative_path()) +95 raise PathException(util.fsdecode(self.get_relative_path()) +
96 u"is a socket, unsupported by tar")96 u"is a socket, unsupported by tar")
97 self.type = "sock"97 self.type = u"sock"
98 elif stat.S_ISCHR(st_mode):98 elif stat.S_ISCHR(st_mode):
99 self.type = "chr"99 self.type = u"chr"
100 elif stat.S_ISBLK(st_mode):100 elif stat.S_ISBLK(st_mode):
101 self.type = "blk"101 self.type = u"blk"
102 else:102 else:
103 raise PathException("Unknown type")103 raise PathException(u"Unknown type")
104104
105 self.mode = stat.S_IMODE(st_mode)105 self.mode = stat.S_IMODE(st_mode)
106 if self.type in ("chr", "blk"):106 if self.type in (u"chr", u"blk"):
107 try:107 try:
108 self.devnums = (os.major(self.stat.st_rdev),108 self.devnums = (os.major(self.stat.st_rdev),
109 os.minor(self.stat.st_rdev))109 os.minor(self.stat.st_rdev))
110 except:110 except:
111 log.Warn(_("Warning: %s invalid devnums (0x%X), treating as (0, 0).")111 log.Warn(_(u"Warning: %s invalid devnums (0x%X), treating as (0, 0).")
112 % (util.fsdecode(self.get_relative_path()), self.stat.st_rdev))112 % (util.fsdecode(self.get_relative_path()), self.stat.st_rdev))
113 self.devnums = (0, 0)113 self.devnums = (0, 0)
114114
115 def blank(self):115 def blank(self):
116 """Black out self - set type and stat to None"""116 u"""Black out self - set type and stat to None"""
117 self.type, self.stat = None, None117 self.type, self.stat = None, None
118118
119 def exists(self):119 def exists(self):
120 """True if corresponding file exists"""120 u"""True if corresponding file exists"""
121 return self.type121 return self.type
122122
123 def isreg(self):123 def isreg(self):
124 """True if self corresponds to regular file"""124 u"""True if self corresponds to regular file"""
125 return self.type == "reg"125 return self.type == u"reg"
126126
127 def isdir(self):127 def isdir(self):
128 """True if self is dir"""128 u"""True if self is dir"""
129 return self.type == "dir"129 return self.type == u"dir"
130130
131 def issym(self):131 def issym(self):
132 """True if self is sym"""132 u"""True if self is sym"""
133 return self.type == "sym"133 return self.type == u"sym"
134134
135 def isfifo(self):135 def isfifo(self):
136 """True if self is fifo"""136 u"""True if self is fifo"""
137 return self.type == "fifo"137 return self.type == u"fifo"
138138
139 def issock(self):139 def issock(self):
140 """True is self is socket"""140 u"""True is self is socket"""
141 return self.type == "sock"141 return self.type == u"sock"
142142
143 def isdev(self):143 def isdev(self):
144 """True is self is a device file"""144 u"""True is self is a device file"""
145 return self.type == "chr" or self.type == "blk"145 return self.type == u"chr" or self.type == u"blk"
146146
147 def getdevloc(self):147 def getdevloc(self):
148 """Return device number path resides on"""148 u"""Return device number path resides on"""
149 return self.stat.st_dev149 return self.stat.st_dev
150150
151 def getsize(self):151 def getsize(self):
152 """Return length in bytes from stat object"""152 u"""Return length in bytes from stat object"""
153 return self.stat.st_size153 return self.stat.st_size
154154
155 def getmtime(self):155 def getmtime(self):
156 """Return mod time of path in seconds"""156 u"""Return mod time of path in seconds"""
157 return int(self.stat.st_mtime)157 return int(self.stat.st_mtime)
158158
159 def get_relative_path(self):159 def get_relative_path(self):
160 """Return relative path, created from index"""160 u"""Return relative path, created from index"""
161 if self.index:161 if self.index:
162 return "/".join(self.index)162 return b"/".join(self.index)
163 else:163 else:
164 return "."164 return b"."
165165
166 def getperms(self):166 def getperms(self):
167 """Return permissions mode, owner and group"""167 u"""Return permissions mode, owner and group"""
168 s1 = self.stat168 s1 = self.stat
169 return '%s:%s %o' % (s1.st_uid, s1.st_gid, self.mode)169 return u'%s:%s %o' % (s1.st_uid, s1.st_gid, self.mode)
170170
171 def open(self, mode):171 def open(self, mode):
172 """Return fileobj associated with self"""172 u"""Return fileobj associated with self"""
173 assert mode == "rb" and self.fileobj and not self.opened, \173 assert mode == u"rb" and self.fileobj and not self.opened, \
174 "%s %s %s" % (mode, self.fileobj, self.opened)174 u"%s %s %s" % (mode, self.fileobj, self.opened)
175 self.opened = 1175 self.opened = 1
176 return self.fileobj176 return self.fileobj
177177
178 def get_data(self):178 def get_data(self):
179 """Return contents of associated fileobj in string"""179 u"""Return contents of associated fileobj in string"""
180 fin = self.open("rb")180 fin = self.open(u"rb")
181 buf = fin.read()181 buf = fin.read()
182 assert not fin.close()182 assert not fin.close()
183 return buf183 return buf
184184
185 def setfileobj(self, fileobj):185 def setfileobj(self, fileobj):
186 """Set file object returned by open()"""186 u"""Set file object returned by open()"""
187 assert not self.fileobj187 assert not self.fileobj
188 self.fileobj = fileobj188 self.fileobj = fileobj
189 self.opened = None189 self.opened = None
190190
191 def init_from_tarinfo(self, tarinfo):191 def init_from_tarinfo(self, tarinfo):
192 """Set data from tarinfo object (part of tarfile module)"""192 u"""Set data from tarinfo object (part of tarfile module)"""
193 # Set the typepp193 # Set the typepp
194 type = tarinfo.type194 type = tarinfo.type
195 if type == tarfile.REGTYPE or type == tarfile.AREGTYPE:195 if type == tarfile.REGTYPE or type == tarfile.AREGTYPE:
196 self.type = "reg"196 self.type = u"reg"
197 elif type == tarfile.LNKTYPE:197 elif type == tarfile.LNKTYPE:
198 raise PathException("Hard links not supported yet")198 raise PathException(u"Hard links not supported yet")
199 elif type == tarfile.SYMTYPE:199 elif type == tarfile.SYMTYPE:
200 self.type = "sym"200 self.type = u"sym"
201 self.symtext = tarinfo.linkname201 self.symtext = tarinfo.linkname
202 elif type == tarfile.CHRTYPE:202 elif type == tarfile.CHRTYPE:
203 self.type = "chr"203 self.type = u"chr"
204 self.devnums = (tarinfo.devmajor, tarinfo.devminor)204 self.devnums = (tarinfo.devmajor, tarinfo.devminor)
205 elif type == tarfile.BLKTYPE:205 elif type == tarfile.BLKTYPE:
206 self.type = "blk"206 self.type = u"blk"
207 self.devnums = (tarinfo.devmajor, tarinfo.devminor)207 self.devnums = (tarinfo.devmajor, tarinfo.devminor)
208 elif type == tarfile.DIRTYPE:208 elif type == tarfile.DIRTYPE:
209 self.type = "dir"209 self.type = u"dir"
210 elif type == tarfile.FIFOTYPE:210 elif type == tarfile.FIFOTYPE:
211 self.type = "fifo"211 self.type = u"fifo"
212 else:212 else:
213 raise PathException("Unknown tarinfo type %s" % (type,))213 raise PathException(u"Unknown tarinfo type %s" % (type,))
214214
215 self.mode = tarinfo.mode215 self.mode = tarinfo.mode
216 self.stat = StatResult()216 self.stat = StatResult()
217217
218 """ Set user and group id218 u""" Set user and group id
219 use numeric id if name lookup fails219 use numeric id if name lookup fails
220 OR220 OR
221 --numeric-owner is set221 --numeric-owner is set
@@ -235,13 +235,13 @@
235235
236 self.stat.st_mtime = int(tarinfo.mtime)236 self.stat.st_mtime = int(tarinfo.mtime)
237 if self.stat.st_mtime < 0:237 if self.stat.st_mtime < 0:
238 log.Warn(_("Warning: %s has negative mtime, treating as 0.")238 log.Warn(_(u"Warning: %s has negative mtime, treating as 0.")
239 % (tarinfo.uc_name))239 % (tarinfo.uc_name))
240 self.stat.st_mtime = 0240 self.stat.st_mtime = 0
241 self.stat.st_size = tarinfo.size241 self.stat.st_size = tarinfo.size
242242
243 def get_ropath(self):243 def get_ropath(self):
244 """Return ropath copy of self"""244 u"""Return ropath copy of self"""
245 new_ropath = ROPath(self.index, self.stat)245 new_ropath = ROPath(self.index, self.stat)
246 new_ropath.type, new_ropath.mode = self.type, self.mode246 new_ropath.type, new_ropath.mode = self.type, self.mode
247 if self.issym():247 if self.issym():
@@ -253,7 +253,7 @@
253 return new_ropath253 return new_ropath
254254
255 def get_tarinfo(self):255 def get_tarinfo(self):
256 """Generate a tarfile.TarInfo object based on self256 u"""Generate a tarfile.TarInfo object based on self
257257
258 Doesn't set size based on stat, because we may want to replace258 Doesn't set size based on stat, because we may want to replace
259 data wiht other stream. Size should be set separately by259 data wiht other stream. Size should be set separately by
@@ -262,11 +262,11 @@
262 """262 """
263 ti = tarfile.TarInfo()263 ti = tarfile.TarInfo()
264 if self.index:264 if self.index:
265 ti.name = "/".join(self.index)265 ti.name = b"/".join(self.index)
266 else:266 else:
267 ti.name = "."267 ti.name = b"."
268 if self.isdir():268 if self.isdir():
269 ti.name += "/" # tar dir naming convention269 ti.name += b"/" # tar dir naming convention
270270
271 ti.size = 0271 ti.size = 0
272 if self.type:272 if self.type:
@@ -283,18 +283,18 @@
283 ti.type = tarfile.SYMTYPE283 ti.type = tarfile.SYMTYPE
284 ti.linkname = self.symtext284 ti.linkname = self.symtext
285 elif self.isdev():285 elif self.isdev():
286 if self.type == "chr":286 if self.type == u"chr":
287 ti.type = tarfile.CHRTYPE287 ti.type = tarfile.CHRTYPE
288 else:288 else:
289 ti.type = tarfile.BLKTYPE289 ti.type = tarfile.BLKTYPE
290 ti.devmajor, ti.devminor = self.devnums290 ti.devmajor, ti.devminor = self.devnums
291 else:291 else:
292 raise PathException("Unrecognized type " + str(self.type))292 raise PathException(u"Unrecognized type " + str(self.type))
293293
294 ti.mode = self.mode294 ti.mode = self.mode
295 ti.uid, ti.gid = self.stat.st_uid, self.stat.st_gid295 ti.uid, ti.gid = self.stat.st_uid, self.stat.st_gid
296 if self.stat.st_mtime < 0:296 if self.stat.st_mtime < 0:
297 log.Warn(_("Warning: %s has negative mtime, treating as 0.")297 log.Warn(_(u"Warning: %s has negative mtime, treating as 0.")
298 % (util.fsdecode(self.get_relative_path())))298 % (util.fsdecode(self.get_relative_path())))
299 ti.mtime = 0299 ti.mtime = 0
300 else:300 else:
@@ -303,14 +303,14 @@
303 try:303 try:
304 ti.uname = cached_ops.getpwuid(ti.uid)[0]304 ti.uname = cached_ops.getpwuid(ti.uid)[0]
305 except KeyError:305 except KeyError:
306 ti.uname = ''306 ti.uname = u''
307 try:307 try:
308 ti.gname = cached_ops.getgrgid(ti.gid)[0]308 ti.gname = cached_ops.getgrgid(ti.gid)[0]
309 except KeyError:309 except KeyError:
310 ti.gname = ''310 ti.gname = b''
311311
312 if ti.type in (tarfile.CHRTYPE, tarfile.BLKTYPE):312 if ti.type in (tarfile.CHRTYPE, tarfile.BLKTYPE):
313 if hasattr(os, "major") and hasattr(os, "minor"):313 if hasattr(os, u"major") and hasattr(os, u"minor"):
314 ti.devmajor, ti.devminor = self.devnums314 ti.devmajor, ti.devminor = self.devnums
315 else:315 else:
316 # Currently we depend on an uninitiliazed tarinfo file to316 # Currently we depend on an uninitiliazed tarinfo file to
@@ -320,7 +320,7 @@
320 return ti320 return ti
321321
322 def __eq__(self, other):322 def __eq__(self, other):
323 """Used to compare two ROPaths. Doesn't look at fileobjs"""323 u"""Used to compare two ROPaths. Doesn't look at fileobjs"""
324 if not self.type and not other.type:324 if not self.type and not other.type:
325 return 1 # neither exists325 return 1 # neither exists
326 if not self.stat and other.stat or not other.stat and self.stat:326 if not self.stat and other.stat or not other.stat and self.stat:
@@ -348,7 +348,7 @@
348 return not self.__eq__(other)348 return not self.__eq__(other)
349349
350 def compare_verbose(self, other, include_data=0):350 def compare_verbose(self, other, include_data=0):
351 """Compare ROPaths like __eq__, but log reason if different351 u"""Compare ROPaths like __eq__, but log reason if different
352352
353 This is placed in a separate function from __eq__ because353 This is placed in a separate function from __eq__ because
354 __eq__ should be very time sensitive, and logging statements354 __eq__ should be very time sensitive, and logging statements
@@ -358,7 +358,7 @@
358358
359 """359 """
360 def log_diff(log_string):360 def log_diff(log_string):
361 log_str = _("Difference found:") + u" " + log_string361 log_str = _(u"Difference found:") + u" " + log_string
362 log.Notice(log_str % (util.fsdecode(self.get_relative_path())))362 log.Notice(log_str % (util.fsdecode(self.get_relative_path())))
363363
364 if include_data is False:364 if include_data is False:
@@ -367,24 +367,24 @@
367 if not self.type and not other.type:367 if not self.type and not other.type:
368 return 1368 return 1
369 if not self.stat and other.stat:369 if not self.stat and other.stat:
370 log_diff(_("New file %s"))370 log_diff(_(u"New file %s"))
371 return 0371 return 0
372 if not other.stat and self.stat:372 if not other.stat and self.stat:
373 log_diff(_("File %s is missing"))373 log_diff(_(u"File %s is missing"))
374 return 0374 return 0
375 if self.type != other.type:375 if self.type != other.type:
376 log_diff(_("File %%s has type %s, expected %s") %376 log_diff(_(u"File %%s has type %s, expected %s") %
377 (other.type, self.type))377 (other.type, self.type))
378 return 0378 return 0
379379
380 if self.isreg() or self.isdir() or self.isfifo():380 if self.isreg() or self.isdir() or self.isfifo():
381 if not self.perms_equal(other):381 if not self.perms_equal(other):
382 log_diff(_("File %%s has permissions %s, expected %s") %382 log_diff(_(u"File %%s has permissions %s, expected %s") %
383 (other.getperms(), self.getperms()))383 (other.getperms(), self.getperms()))
384 return 0384 return 0
385 if ((int(self.stat.st_mtime) != int(other.stat.st_mtime)) and385 if ((int(self.stat.st_mtime) != int(other.stat.st_mtime)) and
386 (self.stat.st_mtime > 0 or other.stat.st_mtime > 0)):386 (self.stat.st_mtime > 0 or other.stat.st_mtime > 0)):
387 log_diff(_("File %%s has mtime %s, expected %s") %387 log_diff(_(u"File %%s has mtime %s, expected %s") %
388 (dup_time.timetopretty(int(other.stat.st_mtime)),388 (dup_time.timetopretty(int(other.stat.st_mtime)),
389 dup_time.timetopretty(int(self.stat.st_mtime))))389 dup_time.timetopretty(int(self.stat.st_mtime))))
390 return 0390 return 0
@@ -392,33 +392,33 @@
392 if self.compare_data(other):392 if self.compare_data(other):
393 return 1393 return 1
394 else:394 else:
395 log_diff(_("Data for file %s is different"))395 log_diff(_(u"Data for file %s is different"))
396 return 0396 return 0
397 else:397 else:
398 return 1398 return 1
399 elif self.issym():399 elif self.issym():
400 if self.symtext == other.symtext or self.symtext + "/" == other.symtext:400 if self.symtext == other.symtext or self.symtext + u"/" == other.symtext:
401 return 1401 return 1
402 else:402 else:
403 log_diff(_("Symlink %%s points to %s, expected %s") %403 log_diff(_(u"Symlink %%s points to %s, expected %s") %
404 (other.symtext, self.symtext))404 (other.symtext, self.symtext))
405 return 0405 return 0
406 elif self.isdev():406 elif self.isdev():
407 if not self.perms_equal(other):407 if not self.perms_equal(other):
408 log_diff(_("File %%s has permissions %s, expected %s") %408 log_diff(_(u"File %%s has permissions %s, expected %s") %
409 (other.getperms(), self.getperms()))409 (other.getperms(), self.getperms()))
410 return 0410 return 0
411 if self.devnums != other.devnums:411 if self.devnums != other.devnums:
412 log_diff(_("Device file %%s has numbers %s, expected %s")412 log_diff(_(u"Device file %%s has numbers %s, expected %s")
413 % (other.devnums, self.devnums))413 % (other.devnums, self.devnums))
414 return 0414 return 0
415 return 1415 return 1
416 assert 0416 assert 0
417417
418 def compare_data(self, other):418 def compare_data(self, other):
419 """Compare data from two regular files, return true if same"""419 u"""Compare data from two regular files, return true if same"""
420 f1 = self.open("rb")420 f1 = self.open(u"rb")
421 f2 = other.open("rb")421 f2 = other.open(u"rb")
422422
423 def close():423 def close():
424 assert not f1.close()424 assert not f1.close()
@@ -435,15 +435,15 @@
435 return 1435 return 1
436436
437 def perms_equal(self, other):437 def perms_equal(self, other):
438 """True if self and other have same permissions and ownership"""438 u"""True if self and other have same permissions and ownership"""
439 s1, s2 = self.stat, other.stat439 s1, s2 = self.stat, other.stat
440 return (self.mode == other.mode and440 return (self.mode == other.mode and
441 s1.st_gid == s2.st_gid and s1.st_uid == s2.st_uid)441 s1.st_gid == s2.st_gid and s1.st_uid == s2.st_uid)
442442
443 def copy(self, other):443 def copy(self, other):
444 """Copy self to other. Also copies data. Other must be Path"""444 u"""Copy self to other. Also copies data. Other must be Path"""
445 if self.isreg():445 if self.isreg():
446 other.writefileobj(self.open("rb"))446 other.writefileobj(self.open(u"rb"))
447 elif self.isdir():447 elif self.isdir():
448 os.mkdir(other.name)448 os.mkdir(other.name)
449 elif self.issym():449 elif self.issym():
@@ -456,15 +456,15 @@
456 elif self.issock():456 elif self.issock():
457 socket.socket(socket.AF_UNIX).bind(other.name)457 socket.socket(socket.AF_UNIX).bind(other.name)
458 elif self.isdev():458 elif self.isdev():
459 if self.type == "chr":459 if self.type == u"chr":
460 devtype = "c"460 devtype = u"c"
461 else:461 else:
462 devtype = "b"462 devtype = u"b"
463 other.makedev(devtype, *self.devnums)463 other.makedev(devtype, *self.devnums)
464 self.copy_attribs(other)464 self.copy_attribs(other)
465465
466 def copy_attribs(self, other):466 def copy_attribs(self, other):
467 """Only copy attributes from self to other"""467 u"""Only copy attributes from self to other"""
468 if isinstance(other, Path):468 if isinstance(other, Path):
469 if self.stat is not None:469 if self.stat is not None:
470 util.maybe_ignore_errors(lambda: os.chown(other.name, self.stat.st_uid, self.stat.st_gid))470 util.maybe_ignore_errors(lambda: os.chown(other.name, self.stat.st_uid, self.stat.st_gid))
@@ -481,18 +481,18 @@
481 other.mode = self.mode481 other.mode = self.mode
482482
483 def __unicode__(self):483 def __unicode__(self):
484 """Return string representation"""484 u"""Return string representation"""
485 return u"(%s %s)" % (util.uindex(self.index), self.type)485 return u"(%s %s)" % (util.uindex(self.index), self.type)
486486
487487
488class Path(ROPath):488class Path(ROPath):
489 """489 u"""
490 Path class - wrapper around ordinary local files490 Path class - wrapper around ordinary local files
491491
492 Besides caching stat() results, this class organizes various file492 Besides caching stat() results, this class organizes various file
493 code.493 code.
494 """494 """
495 regex_chars_to_quote = re.compile("[\\\\\\\"\\$`]")495 regex_chars_to_quote = re.compile(u"[\\\\\\\"\\$`]")
496496
497 def rename_index(self, index):497 def rename_index(self, index):
498 if not globals.rename or not index:498 if not globals.rename or not index:
@@ -508,7 +508,7 @@
508 return index # no rename found508 return index # no rename found
509509
510 def __init__(self, base, index=()):510 def __init__(self, base, index=()):
511 """Path initializer"""511 u"""Path initializer"""
512 # self.opened should be true if the file has been opened, and512 # self.opened should be true if the file has been opened, and
513 # self.fileobj can override returned fileobj513 # self.fileobj can override returned fileobj
514 self.opened, self.fileobj = None, None514 self.opened, self.fileobj = None, None
@@ -530,7 +530,7 @@
530 self.setdata()530 self.setdata()
531531
532 def setdata(self):532 def setdata(self):
533 """Refresh stat cache"""533 u"""Refresh stat cache"""
534 try:534 try:
535 # We may be asked to look at the target of symlinks rather than535 # We may be asked to look at the target of symlinks rather than
536 # the link itself.536 # the link itself.
@@ -540,7 +540,7 @@
540 self.stat = os.lstat(self.name)540 self.stat = os.lstat(self.name)
541 except OSError as e:541 except OSError as e:
542 err_string = errno.errorcode[e[0]]542 err_string = errno.errorcode[e[0]]
543 if err_string in ["ENOENT", "ENOTDIR", "ELOOP", "ENOTCONN"]:543 if err_string in [u"ENOENT", u"ENOTDIR", u"ELOOP", u"ENOTCONN"]:
544 self.stat, self.type = None, None # file doesn't exist544 self.stat, self.type = None, None # file doesn't exist
545 self.mode = None545 self.mode = None
546 else:546 else:
@@ -551,23 +551,23 @@
551 self.symtext = os.readlink(self.name)551 self.symtext = os.readlink(self.name)
552552
553 def append(self, ext):553 def append(self, ext):
554 """Return new Path with ext added to index"""554 u"""Return new Path with ext added to index"""
555 return self.__class__(self.base, self.index + (ext,))555 return self.__class__(self.base, self.index + (ext,))
556556
557 def new_index(self, index):557 def new_index(self, index):
558 """Return new Path with index index"""558 u"""Return new Path with index index"""
559 return self.__class__(self.base, index)559 return self.__class__(self.base, index)
560560
561 def listdir(self):561 def listdir(self):
562 """Return list generated by os.listdir"""562 u"""Return list generated by os.listdir"""
563 return os.listdir(self.name)563 return os.listdir(self.name)
564564
565 def isemptydir(self):565 def isemptydir(self):
566 """Return true if path is a directory and is empty"""566 u"""Return true if path is a directory and is empty"""
567 return self.isdir() and not self.listdir()567 return self.isdir() and not self.listdir()
568568
569 def open(self, mode="rb"):569 def open(self, mode=u"rb"):
570 """570 u"""
571 Return fileobj associated with self571 Return fileobj associated with self
572572
573 Usually this is just the file data on disk, but can be573 Usually this is just the file data on disk, but can be
@@ -581,25 +581,25 @@
581 return result581 return result
582582
583 def makedev(self, type, major, minor):583 def makedev(self, type, major, minor):
584 """Make a device file with specified type, major/minor nums"""584 u"""Make a device file with specified type, major/minor nums"""
585 cmdlist = ['mknod', self.name, type, str(major), str(minor)]585 cmdlist = [u'mknod', self.name, type, str(major), str(minor)]
586 if os.spawnvp(os.P_WAIT, 'mknod', cmdlist) != 0:586 if os.spawnvp(os.P_WAIT, u'mknod', cmdlist) != 0:
587 raise PathException("Error running %s" % cmdlist)587 raise PathException(u"Error running %s" % cmdlist)
588 self.setdata()588 self.setdata()
589589
590 def mkdir(self):590 def mkdir(self):
591 """Make directory(s) at specified path"""591 u"""Make directory(s) at specified path"""
592 log.Info(_("Making directory %s") % self.uc_name)592 log.Info(_(u"Making directory %s") % self.uc_name)
593 try:593 try:
594 os.makedirs(self.name)594 os.makedirs(self.name)
595 except OSError:595 except OSError:
596 if (not globals.force):596 if (not globals.force):
597 raise PathException("Error creating directory %s" % self.uc_name, 7)597 raise PathException(u"Error creating directory %s" % self.uc_name, 7)
598 self.setdata()598 self.setdata()
599599
600 def delete(self):600 def delete(self):
601 """Remove this file"""601 u"""Remove this file"""
602 log.Info(_("Deleting %s") % self.uc_name)602 log.Info(_(u"Deleting %s") % self.uc_name)
603 if self.isdir():603 if self.isdir():
604 util.ignore_missing(os.rmdir, self.name)604 util.ignore_missing(os.rmdir, self.name)
605 else:605 else:
@@ -607,15 +607,15 @@
607 self.setdata()607 self.setdata()
608608
609 def touch(self):609 def touch(self):
610 """Open the file, write 0 bytes, close"""610 u"""Open the file, write 0 bytes, close"""
611 log.Info(_("Touching %s") % self.uc_name)611 log.Info(_(u"Touching %s") % self.uc_name)
612 fp = self.open("wb")612 fp = self.open(u"wb")
613 fp.close()613 fp.close()
614614
615 def deltree(self):615 def deltree(self):
616 """Remove self by recursively deleting files under it"""616 u"""Remove self by recursively deleting files under it"""
617 from duplicity import selection # todo: avoid circ. dep. issue617 from duplicity import selection # todo: avoid circ. dep. issue
618 log.Info(_("Deleting tree %s") % self.uc_name)618 log.Info(_(u"Deleting tree %s") % self.uc_name)
619 itr = IterTreeReducer(PathDeleter, [])619 itr = IterTreeReducer(PathDeleter, [])
620 for path in selection.Select(self).set_iter():620 for path in selection.Select(self).set_iter():
621 itr(path.index, path)621 itr(path.index, path)
@@ -623,50 +623,50 @@
623 self.setdata()623 self.setdata()
624624
625 def get_parent_dir(self):625 def get_parent_dir(self):
626 """Return directory that self is in"""626 u"""Return directory that self is in"""
627 if self.index:627 if self.index:
628 return Path(self.base, self.index[:-1])628 return Path(self.base, self.index[:-1])
629 else:629 else:
630 components = self.base.split("/")630 components = self.base.split(u"/")
631 if len(components) == 2 and not components[0]:631 if len(components) == 2 and not components[0]:
632 return Path("/") # already in root directory632 return Path(u"/") # already in root directory
633 else:633 else:
634 return Path("/".join(components[:-1]))634 return Path(u"/".join(components[:-1]))
635635
636 def writefileobj(self, fin):636 def writefileobj(self, fin):
637 """Copy file object fin to self. Close both when done."""637 u"""Copy file object fin to self. Close both when done."""
638 fout = self.open("wb")638 fout = self.open(u"wb")
639 while 1:639 while 1:
640 buf = fin.read(_copy_blocksize)640 buf = fin.read(_copy_blocksize)
641 if not buf:641 if not buf:
642 break642 break
643 fout.write(buf)643 fout.write(buf)
644 if fin.close() or fout.close():644 if fin.close() or fout.close():
645 raise PathException("Error closing file object")645 raise PathException(u"Error closing file object")
646 self.setdata()646 self.setdata()
647647
648 def rename(self, new_path):648 def rename(self, new_path):
649 """Rename file at current path to new_path."""649 u"""Rename file at current path to new_path."""
650 shutil.move(self.name, new_path.name)650 shutil.move(self.name, new_path.name)
651 self.setdata()651 self.setdata()
652 new_path.setdata()652 new_path.setdata()
653653
654 def move(self, new_path):654 def move(self, new_path):
655 """Like rename but destination may be on different file system"""655 u"""Like rename but destination may be on different file system"""
656 self.copy(new_path)656 self.copy(new_path)
657 self.delete()657 self.delete()
658658
659 def chmod(self, mode):659 def chmod(self, mode):
660 """Change permissions of the path"""660 u"""Change permissions of the path"""
661 os.chmod(self.name, mode)661 os.chmod(self.name, mode)
662 self.setdata()662 self.setdata()
663663
664 def patch_with_attribs(self, diff_ropath):664 def patch_with_attribs(self, diff_ropath):
665 """Patch self with diff and then copy attributes over"""665 u"""Patch self with diff and then copy attributes over"""
666 assert self.isreg() and diff_ropath.isreg()666 assert self.isreg() and diff_ropath.isreg()
667 temp_path = self.get_temp_in_same_dir()667 temp_path = self.get_temp_in_same_dir()
668 fbase = self.open("rb")668 fbase = self.open(u"rb")
669 fdiff = diff_ropath.open("rb")669 fdiff = diff_ropath.open(u"rb")
670 patch_fileobj = librsync.PatchedFile(fbase, fdiff)670 patch_fileobj = librsync.PatchedFile(fbase, fdiff)
671 temp_path.writefileobj(patch_fileobj)671 temp_path.writefileobj(patch_fileobj)
672 assert not fbase.close()672 assert not fbase.close()
@@ -675,11 +675,11 @@
675 temp_path.rename(self)675 temp_path.rename(self)
676676
677 def get_temp_in_same_dir(self):677 def get_temp_in_same_dir(self):
678 """Return temp non existent path in same directory as self"""678 u"""Return temp non existent path in same directory as self"""
679 global _tmp_path_counter679 global _tmp_path_counter
680 parent_dir = self.get_parent_dir()680 parent_dir = self.get_parent_dir()
681 while 1:681 while 1:
682 temp_path = parent_dir.append("duplicity_temp." +682 temp_path = parent_dir.append(u"duplicity_temp." +
683 str(_tmp_path_counter))683 str(_tmp_path_counter))
684 if not temp_path.type:684 if not temp_path.type:
685 return temp_path685 return temp_path
@@ -688,18 +688,18 @@
688 u"Warning too many temp files created for " + self.uc_name688 u"Warning too many temp files created for " + self.uc_name
689689
690 def compare_recursive(self, other, verbose=None):690 def compare_recursive(self, other, verbose=None):
691 """Compare self to other Path, descending down directories"""691 u"""Compare self to other Path, descending down directories"""
692 from duplicity import selection # todo: avoid circ. dep. issue692 from duplicity import selection # todo: avoid circ. dep. issue
693 selfsel = selection.Select(self).set_iter()693 selfsel = selection.Select(self).set_iter()
694 othersel = selection.Select(other).set_iter()694 othersel = selection.Select(other).set_iter()
695 return Iter.equal(selfsel, othersel, verbose)695 return Iter.equal(selfsel, othersel, verbose)
696696
697 def __repr__(self):697 def __repr__(self):
698 """Return string representation"""698 u"""Return string representation"""
699 return "(%s %s %s)" % (self.index, self.name, self.type)699 return u"(%s %s %s)" % (self.index, self.name, self.type)
700700
701 def quote(self, s=None):701 def quote(self, s=None):
702 """702 u"""
703 Return quoted version of s (defaults to self.name)703 Return quoted version of s (defaults to self.name)
704704
705 The output is meant to be interpreted with shells, so can be705 The output is meant to be interpreted with shells, so can be
@@ -707,15 +707,15 @@
707 """707 """
708 if not s:708 if not s:
709 s = self.name709 s = self.name
710 return '"%s"' % self.regex_chars_to_quote.sub(lambda m: "\\" + m.group(0), s)710 return u'"%s"' % self.regex_chars_to_quote.sub(lambda m: u"\\" + m.group(0), s)
711711
712 def unquote(self, s):712 def unquote(self, s):
713 """Return unquoted version of string s, as quoted by above quote()"""713 u"""Return unquoted version of string s, as quoted by above quote()"""
714 assert s[0] == s[-1] == "\"" # string must be quoted by above714 assert s[0] == s[-1] == u"\"" # string must be quoted by above
715 result = ""715 result = u""
716 i = 1716 i = 1
717 while i < len(s) - 1:717 while i < len(s) - 1:
718 if s[i] == "\\":718 if s[i] == u"\\":
719 result += s[i + 1]719 result += s[i + 1]
720 i += 2720 i += 2
721 else:721 else:
@@ -724,38 +724,38 @@
724 return result724 return result
725725
726 def get_filename(self):726 def get_filename(self):
727 """Return filename of last component"""727 u"""Return filename of last component"""
728 components = self.name.split("/")728 components = self.name.split(u"/")
729 assert components and components[-1]729 assert components and components[-1]
730 return components[-1]730 return components[-1]
731731
732 def get_canonical(self):732 def get_canonical(self):
733 """733 u"""
734 Return string of canonical version of path734 Return string of canonical version of path
735735
736 Remove ".", and trailing slashes where possible. Note that736 Remove ".", and trailing slashes where possible. Note that
737 it's harder to remove "..", as "foo/bar/.." is not necessarily737 it's harder to remove "..", as "foo/bar/.." is not necessarily
738 "foo", so we can't use path.normpath()738 "foo", so we can't use path.normpath()
739 """739 """
740 newpath = "/".join(filter(lambda x: x and x != ".",740 newpath = u"/".join(filter(lambda x: x and x != u".",
741 self.name.split("/")))741 self.name.split(u"/")))
742 if self.name[0] == "/":742 if self.name[0] == u"/":
743 return "/" + newpath743 return u"/" + newpath
744 elif newpath:744 elif newpath:
745 return newpath745 return newpath
746 else:746 else:
747 return "."747 return u"."
748748
749749
750class DupPath(Path):750class DupPath(Path):
751 """751 u"""
752 Represent duplicity data files752 Represent duplicity data files
753753
754 Based on the file name, files that are compressed or encrypted754 Based on the file name, files that are compressed or encrypted
755 will have different open() methods.755 will have different open() methods.
756 """756 """
757 def __init__(self, base, index=(), parseresults=None):757 def __init__(self, base, index=(), parseresults=None):
758 """758 u"""
759 DupPath initializer759 DupPath initializer
760760
761 The actual filename (no directory) must be the single element761 The actual filename (no directory) must be the single element
@@ -767,12 +767,12 @@
767 else:767 else:
768 assert len(index) == 1768 assert len(index) == 1
769 self.pr = file_naming.parse(index[0])769 self.pr = file_naming.parse(index[0])
770 assert self.pr, "must be a recognizable duplicity file"770 assert self.pr, u"must be a recognizable duplicity file"
771771
772 Path.__init__(self, base, index)772 Path.__init__(self, base, index)
773773
774 def filtered_open(self, mode="rb", gpg_profile=None):774 def filtered_open(self, mode=u"rb", gpg_profile=None):
775 """775 u"""
776 Return fileobj with appropriate encryption/compression776 Return fileobj with appropriate encryption/compression
777777
778 If encryption is specified but no gpg_profile, use778 If encryption is specified but no gpg_profile, use
@@ -788,16 +788,16 @@
788 elif self.pr.encrypted:788 elif self.pr.encrypted:
789 if not gpg_profile:789 if not gpg_profile:
790 gpg_profile = globals.gpg_profile790 gpg_profile = globals.gpg_profile
791 if mode == "rb":791 if mode == u"rb":
792 return gpg.GPGFile(False, self, gpg_profile)792 return gpg.GPGFile(False, self, gpg_profile)
793 elif mode == "wb":793 elif mode == u"wb":
794 return gpg.GPGFile(True, self, gpg_profile)794 return gpg.GPGFile(True, self, gpg_profile)
795 else:795 else:
796 return self.open(mode)796 return self.open(mode)
797797
798798
799class PathDeleter(ITRBranch):799class PathDeleter(ITRBranch):
800 """Delete a directory. Called by Path.deltree"""800 u"""Delete a directory. Called by Path.deltree"""
801 def start_process(self, index, path):801 def start_process(self, index, path):
802 self.path = path802 self.path = path
803803
804804
=== modified file 'duplicity/statistics.py'
--- duplicity/statistics.py 2018-07-24 11:52:33 +0000
+++ duplicity/statistics.py 2018-09-24 21:19:45 +0000
@@ -19,7 +19,7 @@
19# along with duplicity; if not, write to the Free Software Foundation,19# along with duplicity; if not, write to the Free Software Foundation,
20# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA20# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
2121
22"""Generate and process backup statistics"""22u"""Generate and process backup statistics"""
2323
24from future_builtins import map24from future_builtins import map
2525
@@ -35,66 +35,66 @@
3535
3636
37class StatsObj:37class StatsObj:
38 """Contains various statistics, provide string conversion functions"""38 u"""Contains various statistics, provide string conversion functions"""
39 # used when quoting files in get_stats_line39 # used when quoting files in get_stats_line
40 space_regex = re.compile(" ")40 space_regex = re.compile(u" ")
4141
42 stat_file_attrs = ('SourceFiles',42 stat_file_attrs = (u'SourceFiles',
43 'SourceFileSize',43 u'SourceFileSize',
44 'NewFiles',44 u'NewFiles',
45 'NewFileSize',45 u'NewFileSize',
46 'DeletedFiles',46 u'DeletedFiles',
47 'ChangedFiles',47 u'ChangedFiles',
48 'ChangedFileSize',48 u'ChangedFileSize',
49 'ChangedDeltaSize',49 u'ChangedDeltaSize',
50 'DeltaEntries',50 u'DeltaEntries',
51 'RawDeltaSize')51 u'RawDeltaSize')
52 stat_misc_attrs = ('Errors',52 stat_misc_attrs = (u'Errors',
53 'TotalDestinationSizeChange')53 u'TotalDestinationSizeChange')
54 stat_time_attrs = ('StartTime',54 stat_time_attrs = (u'StartTime',
55 'EndTime',55 u'EndTime',
56 'ElapsedTime')56 u'ElapsedTime')
57 stat_attrs = (('Filename',) + stat_time_attrs +57 stat_attrs = ((u'Filename',) + stat_time_attrs +
58 stat_misc_attrs + stat_file_attrs)58 stat_misc_attrs + stat_file_attrs)
5959
60 # Below, the second value in each pair is true iff the value60 # Below, the second value in each pair is true iff the value
61 # indicates a number of bytes61 # indicates a number of bytes
62 stat_file_pairs = (('SourceFiles', False),62 stat_file_pairs = ((u'SourceFiles', False),
63 ('SourceFileSize', True),63 (u'SourceFileSize', True),
64 ('NewFiles', False),64 (u'NewFiles', False),
65 ('NewFileSize', True),65 (u'NewFileSize', True),
66 ('DeletedFiles', False),66 (u'DeletedFiles', False),
67 ('ChangedFiles', False),67 (u'ChangedFiles', False),
68 ('ChangedFileSize', True),68 (u'ChangedFileSize', True),
69 ('ChangedDeltaSize', True),69 (u'ChangedDeltaSize', True),
70 ('DeltaEntries', False),70 (u'DeltaEntries', False),
71 ('RawDeltaSize', True))71 (u'RawDeltaSize', True))
7272
73 # This is used in get_byte_summary_string below73 # This is used in get_byte_summary_string below
74 byte_abbrev_list = ((1024 * 1024 * 1024 * 1024, "TB"),74 byte_abbrev_list = ((1024 * 1024 * 1024 * 1024, u"TB"),
75 (1024 * 1024 * 1024, "GB"),75 (1024 * 1024 * 1024, u"GB"),
76 (1024 * 1024, "MB"),76 (1024 * 1024, u"MB"),
77 (1024, "KB"))77 (1024, u"KB"))
7878
79 def __init__(self):79 def __init__(self):
80 """Set attributes to None"""80 u"""Set attributes to None"""
81 for attr in self.stat_attrs:81 for attr in self.stat_attrs:
82 self.__dict__[attr] = None82 self.__dict__[attr] = None
8383
84 def get_stat(self, attribute):84 def get_stat(self, attribute):
85 """Get a statistic"""85 u"""Get a statistic"""
86 return self.__dict__[attribute]86 return self.__dict__[attribute]
8787
88 def set_stat(self, attr, value):88 def set_stat(self, attr, value):
89 """Set attribute to given value"""89 u"""Set attribute to given value"""
90 self.__dict__[attr] = value90 self.__dict__[attr] = value
9191
92 def increment_stat(self, attr):92 def increment_stat(self, attr):
93 """Add 1 to value of attribute"""93 u"""Add 1 to value of attribute"""
94 self.__dict__[attr] += 194 self.__dict__[attr] += 1
9595
96 def get_total_dest_size_change(self):96 def get_total_dest_size_change(self):
97 """Return total destination size change97 u"""Return total destination size change
9898
99 This represents the total increase in the size of the99 This represents the total increase in the size of the
100 duplicity destination directory, or None if not available.100 duplicity destination directory, or None if not available.
@@ -103,25 +103,25 @@
103 return 0 # this needs to be re-done for duplicity103 return 0 # this needs to be re-done for duplicity
104104
105 def get_stats_line(self, index, use_repr=1):105 def get_stats_line(self, index, use_repr=1):
106 """Return one line abbreviated version of full stats string"""106 u"""Return one line abbreviated version of full stats string"""
107 file_attrs = [str(self.get_stat(a)) for a in self.stat_file_attrs]107 file_attrs = [str(self.get_stat(a)) for a in self.stat_file_attrs]
108 if not index:108 if not index:
109 filename = "."109 filename = u"."
110 else:110 else:
111 filename = os.path.join(*index)111 filename = os.path.join(*index)
112 if use_repr:112 if use_repr:
113 # use repr to quote newlines in relative filename, then113 # use repr to quote newlines in relative filename, then
114 # take of leading and trailing quote and quote spaces.114 # take of leading and trailing quote and quote spaces.
115 filename = self.space_regex.sub("\\x20", repr(filename)[1:-1])115 filename = self.space_regex.sub(u"\\x20", repr(filename)[1:-1])
116 return " ".join([filename, ] + file_attrs)116 return u" ".join([filename, ] + file_attrs)
117117
118 def set_stats_from_line(self, line):118 def set_stats_from_line(self, line):
119 """Set statistics from given line"""119 u"""Set statistics from given line"""
120 def error():120 def error():
121 raise StatsException("Bad line '%s'" % line)121 raise StatsException(u"Bad line '%s'" % line)
122 if line[-1] == "\n":122 if line[-1] == u"\n":
123 line = line[:-1]123 line = line[:-1]
124 lineparts = line.split(" ")124 lineparts = line.split(u" ")
125 if len(lineparts) < len(self.stat_file_attrs):125 if len(lineparts) < len(self.stat_file_attrs):
126 error()126 error()
127 for attr, val_string in zip(self.stat_file_attrs,127 for attr, val_string in zip(self.stat_file_attrs,
@@ -137,62 +137,62 @@
137 return self137 return self
138138
139 def get_stats_string(self):139 def get_stats_string(self):
140 """Return extended string printing out statistics"""140 u"""Return extended string printing out statistics"""
141 return "%s%s%s" % (self.get_timestats_string(),141 return u"%s%s%s" % (self.get_timestats_string(),
142 self.get_filestats_string(),142 self.get_filestats_string(),
143 self.get_miscstats_string())143 self.get_miscstats_string())
144144
145 def get_timestats_string(self):145 def get_timestats_string(self):
146 """Return portion of statistics string dealing with time"""146 u"""Return portion of statistics string dealing with time"""
147 timelist = []147 timelist = []
148 if self.StartTime is not None:148 if self.StartTime is not None:
149 timelist.append("StartTime %.2f (%s)\n" %149 timelist.append(u"StartTime %.2f (%s)\n" %
150 (self.StartTime, dup_time.timetopretty(self.StartTime)))150 (self.StartTime, dup_time.timetopretty(self.StartTime)))
151 if self.EndTime is not None:151 if self.EndTime is not None:
152 timelist.append("EndTime %.2f (%s)\n" %152 timelist.append(u"EndTime %.2f (%s)\n" %
153 (self.EndTime, dup_time.timetopretty(self.EndTime)))153 (self.EndTime, dup_time.timetopretty(self.EndTime)))
154 if self.ElapsedTime or (self.StartTime is not None and154 if self.ElapsedTime or (self.StartTime is not None and
155 self.EndTime is not None):155 self.EndTime is not None):
156 if self.ElapsedTime is None:156 if self.ElapsedTime is None:
157 self.ElapsedTime = self.EndTime - self.StartTime157 self.ElapsedTime = self.EndTime - self.StartTime
158 timelist.append("ElapsedTime %.2f (%s)\n" %158 timelist.append(u"ElapsedTime %.2f (%s)\n" %
159 (self.ElapsedTime, dup_time.inttopretty(self.ElapsedTime)))159 (self.ElapsedTime, dup_time.inttopretty(self.ElapsedTime)))
160 return "".join(timelist)160 return u"".join(timelist)
161161
162 def get_filestats_string(self):162 def get_filestats_string(self):
163 """Return portion of statistics string about files and bytes"""163 u"""Return portion of statistics string about files and bytes"""
164 def fileline(stat_file_pair):164 def fileline(stat_file_pair):
165 """Return zero or one line of the string"""165 u"""Return zero or one line of the string"""
166 attr, in_bytes = stat_file_pair166 attr, in_bytes = stat_file_pair
167 val = self.get_stat(attr)167 val = self.get_stat(attr)
168 if val is None:168 if val is None:
169 return ""169 return u""
170 if in_bytes:170 if in_bytes:
171 return "%s %s (%s)\n" % (attr, val,171 return u"%s %s (%s)\n" % (attr, val,
172 self.get_byte_summary_string(val))172 self.get_byte_summary_string(val))
173 else:173 else:
174 return "%s %s\n" % (attr, val)174 return u"%s %s\n" % (attr, val)
175175
176 return "".join(map(fileline, self.stat_file_pairs))176 return u"".join(map(fileline, self.stat_file_pairs))
177177
178 def get_miscstats_string(self):178 def get_miscstats_string(self):
179 """Return portion of extended stat string about misc attributes"""179 u"""Return portion of extended stat string about misc attributes"""
180 misc_string = ""180 misc_string = u""
181 tdsc = self.TotalDestinationSizeChange181 tdsc = self.TotalDestinationSizeChange
182 if tdsc is not None:182 if tdsc is not None:
183 misc_string += ("TotalDestinationSizeChange %s (%s)\n" %183 misc_string += (u"TotalDestinationSizeChange %s (%s)\n" %
184 (tdsc, self.get_byte_summary_string(tdsc)))184 (tdsc, self.get_byte_summary_string(tdsc)))
185 if self.Errors is not None:185 if self.Errors is not None:
186 misc_string += "Errors %d\n" % self.Errors186 misc_string += u"Errors %d\n" % self.Errors
187 return misc_string187 return misc_string
188188
189 def get_byte_summary_string(self, byte_count):189 def get_byte_summary_string(self, byte_count):
190 """Turn byte count into human readable string like "7.23GB" """190 u"""Turn byte count into human readable string like "7.23GB" """
191 if byte_count < 0:191 if byte_count < 0:
192 sign = "-"192 sign = u"-"
193 byte_count = -byte_count193 byte_count = -byte_count
194 else:194 else:
195 sign = ""195 sign = u""
196196
197 for abbrev_bytes, abbrev_string in self.byte_abbrev_list:197 for abbrev_bytes, abbrev_string in self.byte_abbrev_list:
198 if byte_count >= abbrev_bytes:198 if byte_count >= abbrev_bytes:
@@ -204,26 +204,26 @@
204 precision = 1204 precision = 1
205 else:205 else:
206 precision = 2206 precision = 2
207 return "%s%%.%df %s" % (sign, precision, abbrev_string) \207 return u"%s%%.%df %s" % (sign, precision, abbrev_string) \
208 % (abbrev_count,)208 % (abbrev_count,)
209 byte_count = round(byte_count)209 byte_count = round(byte_count)
210 if byte_count == 1:210 if byte_count == 1:
211 return sign + "1 byte"211 return sign + u"1 byte"
212 else:212 else:
213 return "%s%d bytes" % (sign, byte_count)213 return u"%s%d bytes" % (sign, byte_count)
214214
215 def get_stats_logstring(self, title):215 def get_stats_logstring(self, title):
216 """Like get_stats_string, but add header and footer"""216 u"""Like get_stats_string, but add header and footer"""
217 header = "--------------[ %s ]--------------" % title217 header = u"--------------[ %s ]--------------" % title
218 footer = "-" * len(header)218 footer = u"-" * len(header)
219 return "%s\n%s%s\n" % (header, self.get_stats_string(), footer)219 return u"%s\n%s%s\n" % (header, self.get_stats_string(), footer)
220220
221 def set_stats_from_string(self, s):221 def set_stats_from_string(self, s):
222 """Initialize attributes from string, return self for convenience"""222 u"""Initialize attributes from string, return self for convenience"""
223 def error(line):223 def error(line):
224 raise StatsException("Bad line '%s'" % line)224 raise StatsException(u"Bad line '%s'" % line)
225225
226 for line in s.split("\n"):226 for line in s.split(u"\n"):
227 if not line:227 if not line:
228 continue228 continue
229 line_parts = line.split()229 line_parts = line.split()
@@ -247,20 +247,20 @@
247 return self247 return self
248248
249 def write_stats_to_path(self, path):249 def write_stats_to_path(self, path):
250 """Write statistics string to given path"""250 u"""Write statistics string to given path"""
251 fin = path.open("w")251 fin = path.open(u"w")
252 fin.write(self.get_stats_string())252 fin.write(self.get_stats_string())
253 assert not fin.close()253 assert not fin.close()
254254
255 def read_stats_from_path(self, path):255 def read_stats_from_path(self, path):
256 """Set statistics from path, return self for convenience"""256 u"""Set statistics from path, return self for convenience"""
257 fp = path.open("r")257 fp = path.open(u"r")
258 self.set_stats_from_string(fp.read())258 self.set_stats_from_string(fp.read())
259 assert not fp.close()259 assert not fp.close()
260 return self260 return self
261261
262 def stats_equal(self, s):262 def stats_equal(self, s):
263 """Return true if s has same statistics as self"""263 u"""Return true if s has same statistics as self"""
264 assert isinstance(s, StatsObj)264 assert isinstance(s, StatsObj)
265 for attr in self.stat_file_attrs:265 for attr in self.stat_file_attrs:
266 if self.get_stat(attr) != s.get_stat(attr):266 if self.get_stat(attr) != s.get_stat(attr):
@@ -268,7 +268,7 @@
268 return 1268 return 1
269269
270 def set_to_average(self, statobj_list):270 def set_to_average(self, statobj_list):
271 """Set self's attributes to average of those in statobj_list"""271 u"""Set self's attributes to average of those in statobj_list"""
272 for attr in self.stat_attrs:272 for attr in self.stat_attrs:
273 self.set_stat(attr, 0)273 self.set_stat(attr, 0)
274 for statobj in statobj_list:274 for statobj in statobj_list:
@@ -290,7 +290,7 @@
290 return self290 return self
291291
292 def get_statsobj_copy(self):292 def get_statsobj_copy(self):
293 """Return new StatsObj object with same stats as self"""293 u"""Return new StatsObj object with same stats as self"""
294 s = StatsObj()294 s = StatsObj()
295 for attr in self.stat_attrs:295 for attr in self.stat_attrs:
296 s.set_stat(attr, self.get_stat(attr))296 s.set_stat(attr, self.get_stat(attr))
@@ -298,9 +298,9 @@
298298
299299
300class StatsDeltaProcess(StatsObj):300class StatsDeltaProcess(StatsObj):
301 """Keep track of statistics during DirDelta process"""301 u"""Keep track of statistics during DirDelta process"""
302 def __init__(self):302 def __init__(self):
303 """StatsDeltaProcess initializer - zero file attributes"""303 u"""StatsDeltaProcess initializer - zero file attributes"""
304 StatsObj.__init__(self)304 StatsObj.__init__(self)
305 for attr in StatsObj.stat_file_attrs:305 for attr in StatsObj.stat_file_attrs:
306 self.__dict__[attr] = 0306 self.__dict__[attr] = 0
@@ -309,39 +309,39 @@
309 self.files_changed = []309 self.files_changed = []
310310
311 def add_new_file(self, path):311 def add_new_file(self, path):
312 """Add stats of new file path to statistics"""312 u"""Add stats of new file path to statistics"""
313 filesize = path.getsize()313 filesize = path.getsize()
314 self.SourceFiles += 1314 self.SourceFiles += 1
315 # SourceFileSize is added-to incrementally as read315 # SourceFileSize is added-to incrementally as read
316 self.NewFiles += 1316 self.NewFiles += 1
317 self.NewFileSize += filesize317 self.NewFileSize += filesize
318 self.DeltaEntries += 1318 self.DeltaEntries += 1
319 self.add_delta_entries_file(path, 'new')319 self.add_delta_entries_file(path, b'new')
320320
321 def add_changed_file(self, path):321 def add_changed_file(self, path):
322 """Add stats of file that has changed since last backup"""322 u"""Add stats of file that has changed since last backup"""
323 filesize = path.getsize()323 filesize = path.getsize()
324 self.SourceFiles += 1324 self.SourceFiles += 1
325 # SourceFileSize is added-to incrementally as read325 # SourceFileSize is added-to incrementally as read
326 self.ChangedFiles += 1326 self.ChangedFiles += 1
327 self.ChangedFileSize += filesize327 self.ChangedFileSize += filesize
328 self.DeltaEntries += 1328 self.DeltaEntries += 1
329 self.add_delta_entries_file(path, 'changed')329 self.add_delta_entries_file(path, b'changed')
330330
331 def add_deleted_file(self, path):331 def add_deleted_file(self, path):
332 """Add stats of file no longer in source directory"""332 u"""Add stats of file no longer in source directory"""
333 self.DeletedFiles += 1 # can't add size since not available333 self.DeletedFiles += 1 # can't add size since not available
334 self.DeltaEntries += 1334 self.DeltaEntries += 1
335 self.add_delta_entries_file(path, 'deleted')335 self.add_delta_entries_file(path, b'deleted')
336336
337 def add_unchanged_file(self, path):337 def add_unchanged_file(self, path):
338 """Add stats of file that hasn't changed since last backup"""338 u"""Add stats of file that hasn't changed since last backup"""
339 filesize = path.getsize()339 filesize = path.getsize()
340 self.SourceFiles += 1340 self.SourceFiles += 1
341 self.SourceFileSize += filesize341 self.SourceFileSize += filesize
342342
343 def close(self):343 def close(self):
344 """End collection of data, set EndTime"""344 u"""End collection of data, set EndTime"""
345 self.EndTime = time.time()345 self.EndTime = time.time()
346346
347 def add_delta_entries_file(self, path, action_type):347 def add_delta_entries_file(self, path, action_type):
348348
=== modified file 'duplicity/util.py'
--- duplicity/util.py 2018-07-24 11:52:33 +0000
+++ duplicity/util.py 2018-09-24 21:19:45 +0000
@@ -19,7 +19,7 @@
19# along with duplicity; if not, write to the Free Software Foundation,19# along with duplicity; if not, write to the Free Software Foundation,
20# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA20# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
2121
22"""22u"""
23Miscellaneous utilities.23Miscellaneous utilities.
24"""24"""
2525
@@ -51,23 +51,23 @@
51 # ToDo: Revisit this once we drop Python 2 support/the backport is complete51 # ToDo: Revisit this once we drop Python 2 support/the backport is complete
5252
53 def fsencode(unicode_filename):53 def fsencode(unicode_filename):
54 """Convert a unicode filename to a filename encoded in the system encoding"""54 u"""Convert a unicode filename to a filename encoded in the system encoding"""
55 # For paths, just use path.name rather than converting with this55 # For paths, just use path.name rather than converting with this
56 # If we are not doing any cleverness with non-unicode filename bytes,56 # If we are not doing any cleverness with non-unicode filename bytes,
57 # encoding to system encoding is good enough57 # encoding to system encoding is good enough
58 return unicode_filename.encode(sys.getfilesystemencoding(), "replace")58 return unicode_filename.encode(sys.getfilesystemencoding(), u"replace")
5959
60 def fsdecode(bytes_filename):60 def fsdecode(bytes_filename):
61 """Convert a filename encoded in the system encoding to unicode"""61 u"""Convert a filename encoded in the system encoding to unicode"""
62 # For paths, just use path.uc_name rather than converting with this62 # For paths, just use path.uc_name rather than converting with this
63 # If we are not doing any cleverness with non-unicode filename bytes,63 # If we are not doing any cleverness with non-unicode filename bytes,
64 # decoding using system encoding is good enough. Use "ignore" as64 # decoding using system encoding is good enough. Use "ignore" as
65 # Linux paths can contain non-Unicode characters65 # Linux paths can contain non-Unicode characters
66 return bytes_filename.decode(globals.fsencoding, "replace")66 return bytes_filename.decode(globals.fsencoding, u"replace")
6767
6868
69def exception_traceback(limit=50):69def exception_traceback(limit=50):
70 """70 u"""
71 @return A string representation in typical Python format of the71 @return A string representation in typical Python format of the
72 currently active/raised exception.72 currently active/raised exception.
73 """73 """
@@ -76,20 +76,20 @@
76 lines = traceback.format_tb(tb, limit)76 lines = traceback.format_tb(tb, limit)
77 lines.extend(traceback.format_exception_only(type, value))77 lines.extend(traceback.format_exception_only(type, value))
7878
79 msg = "Traceback (innermost last):\n"79 msg = u"Traceback (innermost last):\n"
80 msg = msg + "%-20s %s" % (string.join(lines[:-1], ""), lines[-1])80 msg = msg + u"%-20s %s" % (string.join(lines[:-1], u""), lines[-1])
8181
82 return msg.decode('unicode-escape', 'replace')82 return msg.decode(u'unicode-escape', u'replace')
8383
8484
85def escape(string):85def escape(string):
86 "Convert a (bytes) filename to a format suitable for logging (quoted utf8)"86 u"Convert a (bytes) filename to a format suitable for logging (quoted utf8)"
87 string = fsdecode(string).encode('unicode-escape', 'replace')87 string = fsdecode(string).encode(u'unicode-escape', u'replace')
88 return u"'%s'" % string.decode('utf8', 'replace')88 return u"'%s'" % string.decode(u'utf8', u'replace')
8989
9090
91def uindex(index):91def uindex(index):
92 "Convert an index (a tuple of path parts) to unicode for printing"92 u"Convert an index (a tuple of path parts) to unicode for printing"
93 if index:93 if index:
94 return os.path.join(*list(map(fsdecode, index)))94 return os.path.join(*list(map(fsdecode, index)))
95 else:95 else:
@@ -101,11 +101,11 @@
101 # non-ascii will cause a UnicodeDecodeError when implicitly decoding to101 # non-ascii will cause a UnicodeDecodeError when implicitly decoding to
102 # unicode. So we decode manually, using the filesystem encoding.102 # unicode. So we decode manually, using the filesystem encoding.
103 # 99.99% of the time, this will be a fine encoding to use.103 # 99.99% of the time, this will be a fine encoding to use.
104 return fsdecode(unicode(e).encode('utf-8'))104 return fsdecode(unicode(e).encode(u'utf-8'))
105105
106106
107def maybe_ignore_errors(fn):107def maybe_ignore_errors(fn):
108 """108 u"""
109 Execute fn. If the global configuration setting ignore_errors is109 Execute fn. If the global configuration setting ignore_errors is
110 set to True, catch errors and log them but do continue (and return110 set to True, catch errors and log them but do continue (and return
111 None).111 None).
@@ -117,7 +117,7 @@
117 return fn()117 return fn()
118 except Exception as e:118 except Exception as e:
119 if globals.ignore_errors:119 if globals.ignore_errors:
120 log.Warn(_("IGNORED_ERROR: Warning: ignoring error as requested: %s: %s")120 log.Warn(_(u"IGNORED_ERROR: Warning: ignoring error as requested: %s: %s")
121 % (e.__class__.__name__, uexc(e)))121 % (e.__class__.__name__, uexc(e)))
122 return None122 return None
123 else:123 else:
@@ -145,7 +145,7 @@
145 # yet. So we want to ignore ReadError exceptions, which are used to signal145 # yet. So we want to ignore ReadError exceptions, which are used to signal
146 # this.146 # this.
147 try:147 try:
148 tf = tarfile.TarFile("arbitrary", mode, fp)148 tf = tarfile.TarFile(u"arbitrary", mode, fp)
149 # Now we cause TarFile to not cache TarInfo objects. It would end up149 # Now we cause TarFile to not cache TarInfo objects. It would end up
150 # consuming a lot of memory over the lifetime of our long-lasting150 # consuming a lot of memory over the lifetime of our long-lasting
151 # signature files otherwise.151 # signature files otherwise.
@@ -159,14 +159,14 @@
159 # Python versions before 2.6 ensure that directories end with /, but 2.6159 # Python versions before 2.6 ensure that directories end with /, but 2.6
160 # and later ensure they they *don't* have /. ::shrug:: Internally, we160 # and later ensure they they *don't* have /. ::shrug:: Internally, we
161 # continue to use pre-2.6 method.161 # continue to use pre-2.6 method.
162 if ti.isdir() and not ti.name.endswith("/"):162 if ti.isdir() and not ti.name.endswith(b"/"):
163 return ti.name + "/"163 return ti.name + b"/"
164 else:164 else:
165 return ti.name165 return ti.name
166166
167167
168def ignore_missing(fn, filename):168def ignore_missing(fn, filename):
169 """169 u"""
170 Execute fn on filename. Ignore ENOENT errors, otherwise raise exception.170 Execute fn on filename. Ignore ENOENT errors, otherwise raise exception.
171171
172 @param fn: callable172 @param fn: callable
@@ -184,7 +184,7 @@
184@atexit.register184@atexit.register
185def release_lockfile():185def release_lockfile():
186 if globals.lockfile:186 if globals.lockfile:
187 log.Debug(_("Releasing lockfile %s") % globals.lockpath)187 log.Debug(_(u"Releasing lockfile %s") % globals.lockpath)
188 try:188 try:
189 globals.lockfile.release()189 globals.lockfile.release()
190 except Exception:190 except Exception:
@@ -192,7 +192,7 @@
192192
193193
194def copyfileobj(infp, outfp, byte_count=-1):194def copyfileobj(infp, outfp, byte_count=-1):
195 """Copy byte_count bytes from infp to outfp, or all if byte_count < 0195 u"""Copy byte_count bytes from infp to outfp, or all if byte_count < 0
196196
197 Returns the number of bytes actually written (may be less than197 Returns the number of bytes actually written (may be less than
198 byte_count if find eof. Does not close either fileobj.198 byte_count if find eof. Does not close either fileobj.
@@ -221,7 +221,7 @@
221221
222222
223def which(program):223def which(program):
224 """224 u"""
225 Return absolute path for program name.225 Return absolute path for program name.
226 Returns None if program not found.226 Returns None if program not found.
227 """227 """
@@ -234,8 +234,8 @@
234 if is_exe(program):234 if is_exe(program):
235 return program235 return program
236 else:236 else:
237 for path in os.getenv("PATH").split(os.pathsep):237 for path in os.getenv(u"PATH").split(os.pathsep):
238 path = path.strip('"')238 path = path.strip(u'"')
239 exe_file = os.path.abspath(os.path.join(path, program))239 exe_file = os.path.abspath(os.path.join(path, program))
240 if is_exe(exe_file):240 if is_exe(exe_file):
241 return exe_file241 return exe_file
242242
=== modified file 'po/duplicity.pot'
--- po/duplicity.pot 2018-09-17 21:03:06 +0000
+++ po/duplicity.pot 2018-09-24 21:19:45 +0000
@@ -8,7 +8,7 @@
8msgstr ""8msgstr ""
9"Project-Id-Version: PACKAGE VERSION\n"9"Project-Id-Version: PACKAGE VERSION\n"
10"Report-Msgid-Bugs-To: Kenneth Loafman <kenneth@loafman.com>\n"10"Report-Msgid-Bugs-To: Kenneth Loafman <kenneth@loafman.com>\n"
11"POT-Creation-Date: 2018-09-17 15:38-0500\n"11"POT-Creation-Date: 2018-09-24 11:46-0500\n"
12"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"12"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
13"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"13"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
14"Language-Team: LANGUAGE <LL@li.org>\n"14"Language-Team: LANGUAGE <LL@li.org>\n"
1515
=== modified file 'testing/test_code.py'
--- testing/test_code.py 2018-09-17 21:03:06 +0000
+++ testing/test_code.py 2018-09-24 21:19:45 +0000
@@ -113,12 +113,6 @@
113 os.path.join(_top_dir, u'setup.py'),113 os.path.join(_top_dir, u'setup.py'),
114 os.path.join(_top_dir, u'duplicity', u'__init__.py'),114 os.path.join(_top_dir, u'duplicity', u'__init__.py'),
115 os.path.join(_top_dir, u'duplicity', u'compilec.py'),115 os.path.join(_top_dir, u'duplicity', u'compilec.py'),
116 os.path.join(_top_dir, u'duplicity', u'diffdir.py'),
117 os.path.join(_top_dir, u'duplicity', u'manifest.py'),
118 os.path.join(_top_dir, u'duplicity', u'patchdir.py'),
119 os.path.join(_top_dir, u'duplicity', u'path.py'),
120 os.path.join(_top_dir, u'duplicity', u'statistics.py'),
121 os.path.join(_top_dir, u'duplicity', u'util.py'),
122 os.path.join(_top_dir, u'testing', u'overrides', u'gettext.py'),116 os.path.join(_top_dir, u'testing', u'overrides', u'gettext.py'),
123 os.path.join(_top_dir, u'testing', u'test_unadorned.py'),117 os.path.join(_top_dir, u'testing', u'test_unadorned.py'),
124 os.path.join(_top_dir, u'testing', u'unit', u'test_statistics.py'),118 os.path.join(_top_dir, u'testing', u'unit', u'test_statistics.py'),

Subscribers

People subscribed via source and target branches