Merge lp:~mgorse/duplicity/0.8-series into lp:~duplicity-team/duplicity/0.8-series
- 0.8-series
- Merge into 0.8-series
Proposed by
Mgorse
Status: | Merged | ||||
---|---|---|---|---|---|
Merged at revision: | 1335 | ||||
Proposed branch: | lp:~mgorse/duplicity/0.8-series | ||||
Merge into: | lp:~duplicity-team/duplicity/0.8-series | ||||
Diff against target: |
2983 lines (+560/-566) 8 files modified
duplicity/diffdir.py (+100/-100) duplicity/manifest.py (+109/-109) duplicity/patchdir.py (+75/-75) duplicity/path.py (+154/-154) duplicity/statistics.py (+96/-96) duplicity/util.py (+25/-25) po/duplicity.pot (+1/-1) testing/test_code.py (+0/-6) |
||||
To merge this branch: | bzr merge lp:~mgorse/duplicity/0.8-series | ||||
Related bugs: |
|
||||
Related blueprints: |
Python 3 Support
(High)
|
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
duplicity-team | Pending | ||
Review via email: mp+355568@code.launchpad.net |
Commit message
Description of the change
Annotate more strings in duplicity/*.py
To post a comment you must log in.
Revision history for this message
Mgorse (mgorse) wrote : | # |
lp:~mgorse/duplicity/0.8-series
updated
- 1335. By Kenneth Loafman
-
* Merged in lp:~mgorse/duplicity/0.8-series
- Adorn more strings in duplicity/*.py
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'duplicity/diffdir.py' | |||
2 | --- duplicity/diffdir.py 2018-07-27 02:18:12 +0000 | |||
3 | +++ duplicity/diffdir.py 2018-09-24 21:19:45 +0000 | |||
4 | @@ -19,7 +19,7 @@ | |||
5 | 19 | # along with duplicity; if not, write to the Free Software Foundation, | 19 | # along with duplicity; if not, write to the Free Software Foundation, |
6 | 20 | # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA | 20 | # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA |
7 | 21 | 21 | ||
9 | 22 | """ | 22 | u""" |
10 | 23 | Functions for producing signatures and deltas of directories | 23 | Functions for producing signatures and deltas of directories |
11 | 24 | 24 | ||
12 | 25 | Note that the main processes of this module have two parts. In the | 25 | Note that the main processes of this module have two parts. In the |
13 | @@ -49,14 +49,14 @@ | |||
14 | 49 | 49 | ||
15 | 50 | 50 | ||
16 | 51 | def DirSig(path_iter): | 51 | def DirSig(path_iter): |
18 | 52 | """ | 52 | u""" |
19 | 53 | Alias for SigTarBlockIter below | 53 | Alias for SigTarBlockIter below |
20 | 54 | """ | 54 | """ |
21 | 55 | return SigTarBlockIter(path_iter) | 55 | return SigTarBlockIter(path_iter) |
22 | 56 | 56 | ||
23 | 57 | 57 | ||
24 | 58 | def DirFull(path_iter): | 58 | def DirFull(path_iter): |
26 | 59 | """ | 59 | u""" |
27 | 60 | Return a tarblock full backup of items in path_iter | 60 | Return a tarblock full backup of items in path_iter |
28 | 61 | 61 | ||
29 | 62 | A full backup is just a diff starting from nothing (it may be less | 62 | A full backup is just a diff starting from nothing (it may be less |
30 | @@ -64,18 +64,18 @@ | |||
31 | 64 | will be easy to split up the tar and make the volumes the same | 64 | will be easy to split up the tar and make the volumes the same |
32 | 65 | sizes). | 65 | sizes). |
33 | 66 | """ | 66 | """ |
35 | 67 | return DirDelta(path_iter, cStringIO.StringIO("")) | 67 | return DirDelta(path_iter, cStringIO.StringIO(u"")) |
36 | 68 | 68 | ||
37 | 69 | 69 | ||
38 | 70 | def DirFull_WriteSig(path_iter, sig_outfp): | 70 | def DirFull_WriteSig(path_iter, sig_outfp): |
40 | 71 | """ | 71 | u""" |
41 | 72 | Return full backup like above, but also write signature to sig_outfp | 72 | Return full backup like above, but also write signature to sig_outfp |
42 | 73 | """ | 73 | """ |
44 | 74 | return DirDelta_WriteSig(path_iter, cStringIO.StringIO(""), sig_outfp) | 74 | return DirDelta_WriteSig(path_iter, cStringIO.StringIO(u""), sig_outfp) |
45 | 75 | 75 | ||
46 | 76 | 76 | ||
47 | 77 | def DirDelta(path_iter, dirsig_fileobj_list): | 77 | def DirDelta(path_iter, dirsig_fileobj_list): |
49 | 78 | """ | 78 | u""" |
50 | 79 | Produce tarblock diff given dirsig_fileobj_list and pathiter | 79 | Produce tarblock diff given dirsig_fileobj_list and pathiter |
51 | 80 | 80 | ||
52 | 81 | dirsig_fileobj_list should either be a tar fileobj or a list of | 81 | dirsig_fileobj_list should either be a tar fileobj or a list of |
53 | @@ -96,7 +96,7 @@ | |||
54 | 96 | 96 | ||
55 | 97 | 97 | ||
56 | 98 | def delta_iter_error_handler(exc, new_path, sig_path, sig_tar=None): | 98 | def delta_iter_error_handler(exc, new_path, sig_path, sig_tar=None): |
58 | 99 | """ | 99 | u""" |
59 | 100 | Called by get_delta_iter, report error in getting delta | 100 | Called by get_delta_iter, report error in getting delta |
60 | 101 | """ | 101 | """ |
61 | 102 | if new_path: | 102 | if new_path: |
62 | @@ -104,13 +104,13 @@ | |||
63 | 104 | elif sig_path: | 104 | elif sig_path: |
64 | 105 | index_string = sig_path.get_relative_path() | 105 | index_string = sig_path.get_relative_path() |
65 | 106 | else: | 106 | else: |
68 | 107 | assert 0, "Both new and sig are None for some reason" | 107 | assert 0, u"Both new and sig are None for some reason" |
69 | 108 | log.Warn(_("Error %s getting delta for %s") % (str(exc), util.fsdecode(index_string))) | 108 | log.Warn(_(u"Error %s getting delta for %s") % (str(exc), util.fsdecode(index_string))) |
70 | 109 | return None | 109 | return None |
71 | 110 | 110 | ||
72 | 111 | 111 | ||
73 | 112 | def get_delta_path(new_path, sig_path, sigTarFile=None): | 112 | def get_delta_path(new_path, sig_path, sigTarFile=None): |
75 | 113 | """ | 113 | u""" |
76 | 114 | Return new delta_path which, when read, writes sig to sig_fileobj, | 114 | Return new delta_path which, when read, writes sig to sig_fileobj, |
77 | 115 | if sigTarFile is not None | 115 | if sigTarFile is not None |
78 | 116 | """ | 116 | """ |
79 | @@ -119,35 +119,35 @@ | |||
80 | 119 | ti = new_path.get_tarinfo() | 119 | ti = new_path.get_tarinfo() |
81 | 120 | index = new_path.index | 120 | index = new_path.index |
82 | 121 | delta_path = new_path.get_ropath() | 121 | delta_path = new_path.get_ropath() |
84 | 122 | log.Debug(_("Getting delta of %s and %s") % (new_path, sig_path)) | 122 | log.Debug(_(u"Getting delta of %s and %s") % (new_path, sig_path)) |
85 | 123 | 123 | ||
86 | 124 | def callback(sig_string): | 124 | def callback(sig_string): |
88 | 125 | """ | 125 | u""" |
89 | 126 | Callback activated when FileWithSignature read to end | 126 | Callback activated when FileWithSignature read to end |
90 | 127 | """ | 127 | """ |
91 | 128 | ti.size = len(sig_string) | 128 | ti.size = len(sig_string) |
93 | 129 | ti.name = "signature/" + "/".join(index) | 129 | ti.name = b"signature/" + b"/".join(index) |
94 | 130 | sigTarFile.addfile(ti, cStringIO.StringIO(sig_string)) | 130 | sigTarFile.addfile(ti, cStringIO.StringIO(sig_string)) |
95 | 131 | 131 | ||
100 | 132 | if new_path.isreg() and sig_path and sig_path.isreg() and sig_path.difftype == "signature": | 132 | if new_path.isreg() and sig_path and sig_path.isreg() and sig_path.difftype == u"signature": |
101 | 133 | delta_path.difftype = "diff" | 133 | delta_path.difftype = u"diff" |
102 | 134 | old_sigfp = sig_path.open("rb") | 134 | old_sigfp = sig_path.open(u"rb") |
103 | 135 | newfp = FileWithReadCounter(new_path.open("rb")) | 135 | newfp = FileWithReadCounter(new_path.open(u"rb")) |
104 | 136 | if sigTarFile: | 136 | if sigTarFile: |
105 | 137 | newfp = FileWithSignature(newfp, callback, | 137 | newfp = FileWithSignature(newfp, callback, |
106 | 138 | new_path.getsize()) | 138 | new_path.getsize()) |
107 | 139 | delta_path.setfileobj(librsync.DeltaFile(old_sigfp, newfp)) | 139 | delta_path.setfileobj(librsync.DeltaFile(old_sigfp, newfp)) |
108 | 140 | else: | 140 | else: |
110 | 141 | delta_path.difftype = "snapshot" | 141 | delta_path.difftype = u"snapshot" |
111 | 142 | if sigTarFile: | 142 | if sigTarFile: |
113 | 143 | ti.name = "snapshot/" + "/".join(index) | 143 | ti.name = b"snapshot/" + b"/".join(index) |
114 | 144 | if not new_path.isreg(): | 144 | if not new_path.isreg(): |
115 | 145 | if sigTarFile: | 145 | if sigTarFile: |
116 | 146 | sigTarFile.addfile(ti) | 146 | sigTarFile.addfile(ti) |
117 | 147 | if stats: | 147 | if stats: |
118 | 148 | stats.SourceFileSize += delta_path.getsize() | 148 | stats.SourceFileSize += delta_path.getsize() |
119 | 149 | else: | 149 | else: |
121 | 150 | newfp = FileWithReadCounter(new_path.open("rb")) | 150 | newfp = FileWithReadCounter(new_path.open(u"rb")) |
122 | 151 | if sigTarFile: | 151 | if sigTarFile: |
123 | 152 | newfp = FileWithSignature(newfp, callback, | 152 | newfp = FileWithSignature(newfp, callback, |
124 | 153 | new_path.getsize()) | 153 | new_path.getsize()) |
125 | @@ -158,27 +158,27 @@ | |||
126 | 158 | 158 | ||
127 | 159 | 159 | ||
128 | 160 | def log_delta_path(delta_path, new_path=None, stats=None): | 160 | def log_delta_path(delta_path, new_path=None, stats=None): |
130 | 161 | """ | 161 | u""" |
131 | 162 | Look at delta path and log delta. Add stats if new_path is set | 162 | Look at delta path and log delta. Add stats if new_path is set |
132 | 163 | """ | 163 | """ |
134 | 164 | if delta_path.difftype == "snapshot": | 164 | if delta_path.difftype == u"snapshot": |
135 | 165 | if new_path and stats: | 165 | if new_path and stats: |
136 | 166 | stats.add_new_file(new_path) | 166 | stats.add_new_file(new_path) |
138 | 167 | log.Info(_("A %s") % | 167 | log.Info(_(u"A %s") % |
139 | 168 | (util.fsdecode(delta_path.get_relative_path())), | 168 | (util.fsdecode(delta_path.get_relative_path())), |
140 | 169 | log.InfoCode.diff_file_new, | 169 | log.InfoCode.diff_file_new, |
141 | 170 | util.escape(delta_path.get_relative_path())) | 170 | util.escape(delta_path.get_relative_path())) |
142 | 171 | else: | 171 | else: |
143 | 172 | if new_path and stats: | 172 | if new_path and stats: |
144 | 173 | stats.add_changed_file(new_path) | 173 | stats.add_changed_file(new_path) |
146 | 174 | log.Info(_("M %s") % | 174 | log.Info(_(u"M %s") % |
147 | 175 | (util.fsdecode(delta_path.get_relative_path())), | 175 | (util.fsdecode(delta_path.get_relative_path())), |
148 | 176 | log.InfoCode.diff_file_changed, | 176 | log.InfoCode.diff_file_changed, |
149 | 177 | util.escape(delta_path.get_relative_path())) | 177 | util.escape(delta_path.get_relative_path())) |
150 | 178 | 178 | ||
151 | 179 | 179 | ||
152 | 180 | def get_delta_iter(new_iter, sig_iter, sig_fileobj=None): | 180 | def get_delta_iter(new_iter, sig_iter, sig_fileobj=None): |
154 | 181 | """ | 181 | u""" |
155 | 182 | Generate delta iter from new Path iter and sig Path iter. | 182 | Generate delta iter from new Path iter and sig Path iter. |
156 | 183 | 183 | ||
157 | 184 | For each delta path of regular file type, path.difftype with be | 184 | For each delta path of regular file type, path.difftype with be |
158 | @@ -189,25 +189,25 @@ | |||
159 | 189 | """ | 189 | """ |
160 | 190 | collated = collate2iters(new_iter, sig_iter) | 190 | collated = collate2iters(new_iter, sig_iter) |
161 | 191 | if sig_fileobj: | 191 | if sig_fileobj: |
163 | 192 | sigTarFile = util.make_tarfile("w", sig_fileobj) | 192 | sigTarFile = util.make_tarfile(u"w", sig_fileobj) |
164 | 193 | else: | 193 | else: |
165 | 194 | sigTarFile = None | 194 | sigTarFile = None |
166 | 195 | for new_path, sig_path in collated: | 195 | for new_path, sig_path in collated: |
169 | 196 | log.Debug(_("Comparing %s and %s") % (new_path and util.uindex(new_path.index), | 196 | log.Debug(_(u"Comparing %s and %s") % (new_path and util.uindex(new_path.index), |
170 | 197 | sig_path and util.uindex(sig_path.index))) | 197 | sig_path and util.uindex(sig_path.index))) |
171 | 198 | if not new_path or not new_path.type: | 198 | if not new_path or not new_path.type: |
172 | 199 | # File doesn't exist (but ignore attempts to delete base dir; | 199 | # File doesn't exist (but ignore attempts to delete base dir; |
173 | 200 | # old versions of duplicity could have written out the sigtar in | 200 | # old versions of duplicity could have written out the sigtar in |
174 | 201 | # such a way as to fool us; LP: #929067) | 201 | # such a way as to fool us; LP: #929067) |
175 | 202 | if sig_path and sig_path.exists() and sig_path.index != (): | 202 | if sig_path and sig_path.exists() and sig_path.index != (): |
176 | 203 | # but signature says it did | 203 | # but signature says it did |
178 | 204 | log.Info(_("D %s") % | 204 | log.Info(_(u"D %s") % |
179 | 205 | (util.fsdecode(sig_path.get_relative_path())), | 205 | (util.fsdecode(sig_path.get_relative_path())), |
180 | 206 | log.InfoCode.diff_file_deleted, | 206 | log.InfoCode.diff_file_deleted, |
181 | 207 | util.escape(sig_path.get_relative_path())) | 207 | util.escape(sig_path.get_relative_path())) |
182 | 208 | if sigTarFile: | 208 | if sigTarFile: |
183 | 209 | ti = ROPath(sig_path.index).get_tarinfo() | 209 | ti = ROPath(sig_path.index).get_tarinfo() |
185 | 210 | ti.name = "deleted/" + "/".join(sig_path.index) | 210 | ti.name = u"deleted/" + u"/".join(sig_path.index) |
186 | 211 | sigTarFile.addfile(ti) | 211 | sigTarFile.addfile(ti) |
187 | 212 | stats.add_deleted_file(sig_path) | 212 | stats.add_deleted_file(sig_path) |
188 | 213 | yield ROPath(sig_path.index) | 213 | yield ROPath(sig_path.index) |
189 | @@ -231,28 +231,28 @@ | |||
190 | 231 | 231 | ||
191 | 232 | 232 | ||
192 | 233 | def sigtar2path_iter(sigtarobj): | 233 | def sigtar2path_iter(sigtarobj): |
194 | 234 | """ | 234 | u""" |
195 | 235 | Convert signature tar file object open for reading into path iter | 235 | Convert signature tar file object open for reading into path iter |
196 | 236 | """ | 236 | """ |
198 | 237 | tf = util.make_tarfile("r", sigtarobj) | 237 | tf = util.make_tarfile(u"r", sigtarobj) |
199 | 238 | tf.debug = 1 | 238 | tf.debug = 1 |
200 | 239 | for tarinfo in tf: | 239 | for tarinfo in tf: |
201 | 240 | tiname = util.get_tarinfo_name(tarinfo) | 240 | tiname = util.get_tarinfo_name(tarinfo) |
203 | 241 | for prefix in ["signature/", "snapshot/", "deleted/"]: | 241 | for prefix in [b"signature/", b"snapshot/", b"deleted/"]: |
204 | 242 | if tiname.startswith(prefix): | 242 | if tiname.startswith(prefix): |
205 | 243 | # strip prefix and '/' from name and set it to difftype | 243 | # strip prefix and '/' from name and set it to difftype |
206 | 244 | name, difftype = tiname[len(prefix):], prefix[:-1] | 244 | name, difftype = tiname[len(prefix):], prefix[:-1] |
207 | 245 | break | 245 | break |
208 | 246 | else: | 246 | else: |
210 | 247 | raise DiffDirException("Bad tarinfo name %s" % (tiname,)) | 247 | raise DiffDirException(u"Bad tarinfo name %s" % (tiname,)) |
211 | 248 | 248 | ||
213 | 249 | index = tuple(name.split("/")) | 249 | index = tuple(name.split(u"/")) |
214 | 250 | if not index[-1]: | 250 | if not index[-1]: |
215 | 251 | index = index[:-1] # deal with trailing /, "" | 251 | index = index[:-1] # deal with trailing /, "" |
216 | 252 | 252 | ||
217 | 253 | ropath = ROPath(index) | 253 | ropath = ROPath(index) |
218 | 254 | ropath.difftype = difftype | 254 | ropath.difftype = difftype |
220 | 255 | if difftype == "signature" or difftype == "snapshot": | 255 | if difftype == u"signature" or difftype == u"snapshot": |
221 | 256 | ropath.init_from_tarinfo(tarinfo) | 256 | ropath.init_from_tarinfo(tarinfo) |
222 | 257 | if ropath.isreg(): | 257 | if ropath.isreg(): |
223 | 258 | ropath.setfileobj(tf.extractfile(tarinfo)) | 258 | ropath.setfileobj(tf.extractfile(tarinfo)) |
224 | @@ -261,7 +261,7 @@ | |||
225 | 261 | 261 | ||
226 | 262 | 262 | ||
227 | 263 | def collate2iters(riter1, riter2): | 263 | def collate2iters(riter1, riter2): |
229 | 264 | """ | 264 | u""" |
230 | 265 | Collate two iterators. | 265 | Collate two iterators. |
231 | 266 | 266 | ||
232 | 267 | The elements yielded by each iterator must be have an index | 267 | The elements yielded by each iterator must be have an index |
233 | @@ -305,7 +305,7 @@ | |||
234 | 305 | 305 | ||
235 | 306 | 306 | ||
236 | 307 | def combine_path_iters(path_iter_list): | 307 | def combine_path_iters(path_iter_list): |
238 | 308 | """ | 308 | u""" |
239 | 309 | Produce new iterator by combining the iterators in path_iter_list | 309 | Produce new iterator by combining the iterators in path_iter_list |
240 | 310 | 310 | ||
241 | 311 | This new iter will iterate every path that is in path_iter_list in | 311 | This new iter will iterate every path that is in path_iter_list in |
242 | @@ -320,7 +320,7 @@ | |||
243 | 320 | path_iter_list.reverse() | 320 | path_iter_list.reverse() |
244 | 321 | 321 | ||
245 | 322 | def get_triple(iter_index): | 322 | def get_triple(iter_index): |
247 | 323 | """ | 323 | u""" |
248 | 324 | Represent the next element as a triple, to help sorting | 324 | Represent the next element as a triple, to help sorting |
249 | 325 | """ | 325 | """ |
250 | 326 | try: | 326 | try: |
251 | @@ -330,7 +330,7 @@ | |||
252 | 330 | return (path.index, iter_index, path) | 330 | return (path.index, iter_index, path) |
253 | 331 | 331 | ||
254 | 332 | def refresh_triple_list(triple_list): | 332 | def refresh_triple_list(triple_list): |
256 | 333 | """ | 333 | u""" |
257 | 334 | Update all elements with path_index same as first element | 334 | Update all elements with path_index same as first element |
258 | 335 | """ | 335 | """ |
259 | 336 | path_index = triple_list[0][0] | 336 | path_index = triple_list[0][0] |
260 | @@ -355,7 +355,7 @@ | |||
261 | 355 | 355 | ||
262 | 356 | 356 | ||
263 | 357 | def DirDelta_WriteSig(path_iter, sig_infp_list, newsig_outfp): | 357 | def DirDelta_WriteSig(path_iter, sig_infp_list, newsig_outfp): |
265 | 358 | """ | 358 | u""" |
266 | 359 | Like DirDelta but also write signature into sig_fileobj | 359 | Like DirDelta but also write signature into sig_fileobj |
267 | 360 | 360 | ||
268 | 361 | Like DirDelta, sig_infp_list can be a tar fileobj or a sorted list | 361 | Like DirDelta, sig_infp_list can be a tar fileobj or a sorted list |
269 | @@ -376,26 +376,26 @@ | |||
270 | 376 | 376 | ||
271 | 377 | 377 | ||
272 | 378 | def get_combined_path_iter(sig_infp_list): | 378 | def get_combined_path_iter(sig_infp_list): |
274 | 379 | """ | 379 | u""" |
275 | 380 | Return path iter combining signatures in list of open sig files | 380 | Return path iter combining signatures in list of open sig files |
276 | 381 | """ | 381 | """ |
277 | 382 | return combine_path_iters([sigtar2path_iter(x) for x in sig_infp_list]) | 382 | return combine_path_iters([sigtar2path_iter(x) for x in sig_infp_list]) |
278 | 383 | 383 | ||
279 | 384 | 384 | ||
280 | 385 | class FileWithReadCounter: | 385 | class FileWithReadCounter: |
282 | 386 | """ | 386 | u""" |
283 | 387 | File-like object which also computes amount read as it is read | 387 | File-like object which also computes amount read as it is read |
284 | 388 | """ | 388 | """ |
285 | 389 | def __init__(self, infile): | 389 | def __init__(self, infile): |
287 | 390 | """FileWithReadCounter initializer""" | 390 | u"""FileWithReadCounter initializer""" |
288 | 391 | self.infile = infile | 391 | self.infile = infile |
289 | 392 | 392 | ||
290 | 393 | def read(self, length=-1): | 393 | def read(self, length=-1): |
291 | 394 | try: | 394 | try: |
292 | 395 | buf = self.infile.read(length) | 395 | buf = self.infile.read(length) |
293 | 396 | except IOError as ex: | 396 | except IOError as ex: |
296 | 397 | buf = "" | 397 | buf = u"" |
297 | 398 | log.Warn(_("Error %s getting delta for %s") % (str(ex), self.infile.uc_name)) | 398 | log.Warn(_(u"Error %s getting delta for %s") % (str(ex), self.infile.uc_name)) |
298 | 399 | if stats: | 399 | if stats: |
299 | 400 | stats.SourceFileSize += len(buf) | 400 | stats.SourceFileSize += len(buf) |
300 | 401 | return buf | 401 | return buf |
301 | @@ -405,13 +405,13 @@ | |||
302 | 405 | 405 | ||
303 | 406 | 406 | ||
304 | 407 | class FileWithSignature: | 407 | class FileWithSignature: |
306 | 408 | """ | 408 | u""" |
307 | 409 | File-like object which also computes signature as it is read | 409 | File-like object which also computes signature as it is read |
308 | 410 | """ | 410 | """ |
309 | 411 | blocksize = 32 * 1024 | 411 | blocksize = 32 * 1024 |
310 | 412 | 412 | ||
311 | 413 | def __init__(self, infile, callback, filelen, *extra_args): | 413 | def __init__(self, infile, callback, filelen, *extra_args): |
313 | 414 | """ | 414 | u""" |
314 | 415 | FileTee initializer | 415 | FileTee initializer |
315 | 416 | 416 | ||
316 | 417 | The object will act like infile, but whenever it is read it | 417 | The object will act like infile, but whenever it is read it |
317 | @@ -442,11 +442,11 @@ | |||
318 | 442 | 442 | ||
319 | 443 | 443 | ||
320 | 444 | class TarBlock: | 444 | class TarBlock: |
322 | 445 | """ | 445 | u""" |
323 | 446 | Contain information to add next file to tar | 446 | Contain information to add next file to tar |
324 | 447 | """ | 447 | """ |
325 | 448 | def __init__(self, index, data): | 448 | def __init__(self, index, data): |
327 | 449 | """ | 449 | u""" |
328 | 450 | TarBlock initializer - just store data | 450 | TarBlock initializer - just store data |
329 | 451 | """ | 451 | """ |
330 | 452 | self.index = index | 452 | self.index = index |
331 | @@ -454,7 +454,7 @@ | |||
332 | 454 | 454 | ||
333 | 455 | 455 | ||
334 | 456 | class TarBlockIter: | 456 | class TarBlockIter: |
336 | 457 | """ | 457 | u""" |
337 | 458 | A bit like an iterator, yield tar blocks given input iterator | 458 | A bit like an iterator, yield tar blocks given input iterator |
338 | 459 | 459 | ||
339 | 460 | Unlike an iterator, however, control over the maximum size of a | 460 | Unlike an iterator, however, control over the maximum size of a |
340 | @@ -462,7 +462,7 @@ | |||
341 | 462 | get_footer() is available. | 462 | get_footer() is available. |
342 | 463 | """ | 463 | """ |
343 | 464 | def __init__(self, input_iter): | 464 | def __init__(self, input_iter): |
345 | 465 | """ | 465 | u""" |
346 | 466 | TarBlockIter initializer | 466 | TarBlockIter initializer |
347 | 467 | """ | 467 | """ |
348 | 468 | self.input_iter = input_iter | 468 | self.input_iter = input_iter |
349 | @@ -476,28 +476,28 @@ | |||
350 | 476 | self.remember_block = None # holds block of next block | 476 | self.remember_block = None # holds block of next block |
351 | 477 | self.queued_data = None # data to return in next next() call | 477 | self.queued_data = None # data to return in next next() call |
352 | 478 | 478 | ||
355 | 479 | def tarinfo2tarblock(self, index, tarinfo, file_data=""): | 479 | def tarinfo2tarblock(self, index, tarinfo, file_data=b""): |
356 | 480 | """ | 480 | u""" |
357 | 481 | Make tarblock out of tarinfo and file data | 481 | Make tarblock out of tarinfo and file data |
358 | 482 | """ | 482 | """ |
359 | 483 | tarinfo.size = len(file_data) | 483 | tarinfo.size = len(file_data) |
361 | 484 | headers = tarinfo.tobuf(errors='replace') | 484 | headers = tarinfo.tobuf(errors=u'replace') |
362 | 485 | blocks, remainder = divmod(tarinfo.size, tarfile.BLOCKSIZE) # @UnusedVariable | 485 | blocks, remainder = divmod(tarinfo.size, tarfile.BLOCKSIZE) # @UnusedVariable |
363 | 486 | if remainder > 0: | 486 | if remainder > 0: |
365 | 487 | filler_data = "\0" * (tarfile.BLOCKSIZE - remainder) | 487 | filler_data = b"\0" * (tarfile.BLOCKSIZE - remainder) |
366 | 488 | else: | 488 | else: |
369 | 489 | filler_data = "" | 489 | filler_data = b"" |
370 | 490 | return TarBlock(index, "%s%s%s" % (headers, file_data, filler_data)) | 490 | return TarBlock(index, b"%s%s%s" % (headers, file_data, filler_data)) |
371 | 491 | 491 | ||
372 | 492 | def process(self, val): | 492 | def process(self, val): |
374 | 493 | """ | 493 | u""" |
375 | 494 | Turn next value of input_iter into a TarBlock | 494 | Turn next value of input_iter into a TarBlock |
376 | 495 | """ | 495 | """ |
377 | 496 | assert not self.process_waiting | 496 | assert not self.process_waiting |
378 | 497 | XXX # Override in subclass @UndefinedVariable | 497 | XXX # Override in subclass @UndefinedVariable |
379 | 498 | 498 | ||
380 | 499 | def process_continued(self): | 499 | def process_continued(self): |
382 | 500 | """ | 500 | u""" |
383 | 501 | Get more tarblocks | 501 | Get more tarblocks |
384 | 502 | 502 | ||
385 | 503 | If processing val above would produce more than one TarBlock, | 503 | If processing val above would produce more than one TarBlock, |
386 | @@ -507,7 +507,7 @@ | |||
387 | 507 | XXX # Override in subclass @UndefinedVariable | 507 | XXX # Override in subclass @UndefinedVariable |
388 | 508 | 508 | ||
389 | 509 | def next(self): | 509 | def next(self): |
391 | 510 | """ | 510 | u""" |
392 | 511 | Return next block and update offset | 511 | Return next block and update offset |
393 | 512 | """ | 512 | """ |
394 | 513 | if self.queued_data is not None: | 513 | if self.queued_data is not None: |
395 | @@ -539,19 +539,19 @@ | |||
396 | 539 | return 64 * 1024 | 539 | return 64 * 1024 |
397 | 540 | 540 | ||
398 | 541 | def get_previous_index(self): | 541 | def get_previous_index(self): |
400 | 542 | """ | 542 | u""" |
401 | 543 | Return index of last tarblock, or None if no previous index | 543 | Return index of last tarblock, or None if no previous index |
402 | 544 | """ | 544 | """ |
403 | 545 | return self.previous_index, self.previous_block | 545 | return self.previous_index, self.previous_block |
404 | 546 | 546 | ||
405 | 547 | def queue_index_data(self, data): | 547 | def queue_index_data(self, data): |
407 | 548 | """ | 548 | u""" |
408 | 549 | Next time next() is called, we will return data instead of processing | 549 | Next time next() is called, we will return data instead of processing |
409 | 550 | """ | 550 | """ |
410 | 551 | self.queued_data = data | 551 | self.queued_data = data |
411 | 552 | 552 | ||
412 | 553 | def remember_next_index(self): | 553 | def remember_next_index(self): |
414 | 554 | """ | 554 | u""" |
415 | 555 | When called, remember the index of the next block iterated | 555 | When called, remember the index of the next block iterated |
416 | 556 | """ | 556 | """ |
417 | 557 | self.remember_next = True | 557 | self.remember_next = True |
418 | @@ -559,29 +559,29 @@ | |||
419 | 559 | self.remember_block = None | 559 | self.remember_block = None |
420 | 560 | 560 | ||
421 | 561 | def recall_index(self): | 561 | def recall_index(self): |
423 | 562 | """ | 562 | u""" |
424 | 563 | Retrieve index remembered with remember_next_index | 563 | Retrieve index remembered with remember_next_index |
425 | 564 | """ | 564 | """ |
426 | 565 | return self.remember_value, self.remember_block | 565 | return self.remember_value, self.remember_block |
427 | 566 | 566 | ||
428 | 567 | def get_footer(self): | 567 | def get_footer(self): |
430 | 568 | """ | 568 | u""" |
431 | 569 | Return closing string for tarfile, reset offset | 569 | Return closing string for tarfile, reset offset |
432 | 570 | """ | 570 | """ |
433 | 571 | blocks, remainder = divmod(self.offset, tarfile.RECORDSIZE) # @UnusedVariable | 571 | blocks, remainder = divmod(self.offset, tarfile.RECORDSIZE) # @UnusedVariable |
434 | 572 | self.offset = 0 | 572 | self.offset = 0 |
436 | 573 | return '\0' * (tarfile.RECORDSIZE - remainder) # remainder can be 0 | 573 | return u'\0' * (tarfile.RECORDSIZE - remainder) # remainder can be 0 |
437 | 574 | 574 | ||
438 | 575 | def __iter__(self): | 575 | def __iter__(self): |
439 | 576 | return self | 576 | return self |
440 | 577 | 577 | ||
441 | 578 | 578 | ||
442 | 579 | class DummyBlockIter(TarBlockIter): | 579 | class DummyBlockIter(TarBlockIter): |
444 | 580 | """ | 580 | u""" |
445 | 581 | TarBlockIter that does no file reading | 581 | TarBlockIter that does no file reading |
446 | 582 | """ | 582 | """ |
447 | 583 | def process(self, delta_ropath): | 583 | def process(self, delta_ropath): |
449 | 584 | """ | 584 | u""" |
450 | 585 | Get a fake tarblock from delta_ropath | 585 | Get a fake tarblock from delta_ropath |
451 | 586 | """ | 586 | """ |
452 | 587 | ti = delta_ropath.get_tarinfo() | 587 | ti = delta_ropath.get_tarinfo() |
453 | @@ -601,28 +601,28 @@ | |||
454 | 601 | 601 | ||
455 | 602 | 602 | ||
456 | 603 | class SigTarBlockIter(TarBlockIter): | 603 | class SigTarBlockIter(TarBlockIter): |
458 | 604 | """ | 604 | u""" |
459 | 605 | TarBlockIter that yields blocks of a signature tar from path_iter | 605 | TarBlockIter that yields blocks of a signature tar from path_iter |
460 | 606 | """ | 606 | """ |
461 | 607 | def process(self, path): | 607 | def process(self, path): |
463 | 608 | """ | 608 | u""" |
464 | 609 | Return associated signature TarBlock from path | 609 | Return associated signature TarBlock from path |
465 | 610 | """ | 610 | """ |
466 | 611 | ti = path.get_tarinfo() | 611 | ti = path.get_tarinfo() |
467 | 612 | if path.isreg(): | 612 | if path.isreg(): |
469 | 613 | sfp = librsync.SigFile(path.open("rb"), | 613 | sfp = librsync.SigFile(path.open(u"rb"), |
470 | 614 | get_block_size(path.getsize())) | 614 | get_block_size(path.getsize())) |
471 | 615 | sigbuf = sfp.read() | 615 | sigbuf = sfp.read() |
472 | 616 | sfp.close() | 616 | sfp.close() |
474 | 617 | ti.name = "signature/" + "/".join(path.index) | 617 | ti.name = u"signature/" + u"/".join(path.index) |
475 | 618 | return self.tarinfo2tarblock(path.index, ti, sigbuf) | 618 | return self.tarinfo2tarblock(path.index, ti, sigbuf) |
476 | 619 | else: | 619 | else: |
478 | 620 | ti.name = "snapshot/" + "/".join(path.index) | 620 | ti.name = u"snapshot/" + u"/".join(path.index) |
479 | 621 | return self.tarinfo2tarblock(path.index, ti) | 621 | return self.tarinfo2tarblock(path.index, ti) |
480 | 622 | 622 | ||
481 | 623 | 623 | ||
482 | 624 | class DeltaTarBlockIter(TarBlockIter): | 624 | class DeltaTarBlockIter(TarBlockIter): |
484 | 625 | """ | 625 | u""" |
485 | 626 | TarBlockIter that yields parts of a deltatar file | 626 | TarBlockIter that yields parts of a deltatar file |
486 | 627 | 627 | ||
487 | 628 | Unlike SigTarBlockIter, the argument to __init__ is a | 628 | Unlike SigTarBlockIter, the argument to __init__ is a |
488 | @@ -630,15 +630,15 @@ | |||
489 | 630 | calculated. | 630 | calculated. |
490 | 631 | """ | 631 | """ |
491 | 632 | def process(self, delta_ropath): | 632 | def process(self, delta_ropath): |
493 | 633 | """ | 633 | u""" |
494 | 634 | Get a tarblock from delta_ropath | 634 | Get a tarblock from delta_ropath |
495 | 635 | """ | 635 | """ |
496 | 636 | def add_prefix(tarinfo, prefix): | 636 | def add_prefix(tarinfo, prefix): |
500 | 637 | """Add prefix to the name of a tarinfo file""" | 637 | u"""Add prefix to the name of a tarinfo file""" |
501 | 638 | if tarinfo.name == ".": | 638 | if tarinfo.name == b".": |
502 | 639 | tarinfo.name = prefix + "/" | 639 | tarinfo.name = prefix + b"/" |
503 | 640 | else: | 640 | else: |
505 | 641 | tarinfo.name = "%s/%s" % (prefix, tarinfo.name) | 641 | tarinfo.name = b"%s/%s" % (prefix, tarinfo.name) |
506 | 642 | 642 | ||
507 | 643 | ti = delta_ropath.get_tarinfo() | 643 | ti = delta_ropath.get_tarinfo() |
508 | 644 | index = delta_ropath.index | 644 | index = delta_ropath.index |
509 | @@ -646,29 +646,29 @@ | |||
510 | 646 | # Return blocks of deleted files or fileless snapshots | 646 | # Return blocks of deleted files or fileless snapshots |
511 | 647 | if not delta_ropath.type or not delta_ropath.fileobj: | 647 | if not delta_ropath.type or not delta_ropath.fileobj: |
512 | 648 | if not delta_ropath.type: | 648 | if not delta_ropath.type: |
514 | 649 | add_prefix(ti, "deleted") | 649 | add_prefix(ti, u"deleted") |
515 | 650 | else: | 650 | else: |
518 | 651 | assert delta_ropath.difftype == "snapshot" | 651 | assert delta_ropath.difftype == u"snapshot" |
519 | 652 | add_prefix(ti, "snapshot") | 652 | add_prefix(ti, b"snapshot") |
520 | 653 | return self.tarinfo2tarblock(index, ti) | 653 | return self.tarinfo2tarblock(index, ti) |
521 | 654 | 654 | ||
522 | 655 | # Now handle single volume block case | 655 | # Now handle single volume block case |
524 | 656 | fp = delta_ropath.open("rb") | 656 | fp = delta_ropath.open(u"rb") |
525 | 657 | data, last_block = self.get_data_block(fp) | 657 | data, last_block = self.get_data_block(fp) |
526 | 658 | if stats: | 658 | if stats: |
527 | 659 | stats.RawDeltaSize += len(data) | 659 | stats.RawDeltaSize += len(data) |
528 | 660 | if last_block: | 660 | if last_block: |
533 | 661 | if delta_ropath.difftype == "snapshot": | 661 | if delta_ropath.difftype == u"snapshot": |
534 | 662 | add_prefix(ti, "snapshot") | 662 | add_prefix(ti, b"snapshot") |
535 | 663 | elif delta_ropath.difftype == "diff": | 663 | elif delta_ropath.difftype == u"diff": |
536 | 664 | add_prefix(ti, "diff") | 664 | add_prefix(ti, b"diff") |
537 | 665 | else: | 665 | else: |
539 | 666 | assert 0, "Unknown difftype" | 666 | assert 0, u"Unknown difftype" |
540 | 667 | return self.tarinfo2tarblock(index, ti, data) | 667 | return self.tarinfo2tarblock(index, ti, data) |
541 | 668 | 668 | ||
542 | 669 | # Finally, do multivol snapshot or diff case | 669 | # Finally, do multivol snapshot or diff case |
545 | 670 | full_name = "multivol_%s/%s" % (delta_ropath.difftype, ti.name) | 670 | full_name = u"multivol_%s/%s" % (delta_ropath.difftype, ti.name) |
546 | 671 | ti.name = full_name + "/1" | 671 | ti.name = full_name + u"/1" |
547 | 672 | self.process_prefix = full_name | 672 | self.process_prefix = full_name |
548 | 673 | self.process_fp = fp | 673 | self.process_fp = fp |
549 | 674 | self.process_ropath = delta_ropath | 674 | self.process_ropath = delta_ropath |
550 | @@ -677,26 +677,26 @@ | |||
551 | 677 | return self.tarinfo2tarblock(index, ti, data) | 677 | return self.tarinfo2tarblock(index, ti, data) |
552 | 678 | 678 | ||
553 | 679 | def get_data_block(self, fp): | 679 | def get_data_block(self, fp): |
555 | 680 | """ | 680 | u""" |
556 | 681 | Return pair (next data block, boolean last data block) | 681 | Return pair (next data block, boolean last data block) |
557 | 682 | """ | 682 | """ |
558 | 683 | read_size = self.get_read_size() | 683 | read_size = self.get_read_size() |
559 | 684 | buf = fp.read(read_size) | 684 | buf = fp.read(read_size) |
560 | 685 | if len(buf) < read_size: | 685 | if len(buf) < read_size: |
561 | 686 | if fp.close(): | 686 | if fp.close(): |
563 | 687 | raise DiffDirException("Error closing file") | 687 | raise DiffDirException(u"Error closing file") |
564 | 688 | return (buf, True) | 688 | return (buf, True) |
565 | 689 | else: | 689 | else: |
566 | 690 | return (buf, False) | 690 | return (buf, False) |
567 | 691 | 691 | ||
568 | 692 | def process_continued(self): | 692 | def process_continued(self): |
570 | 693 | """ | 693 | u""" |
571 | 694 | Return next volume in multivol diff or snapshot | 694 | Return next volume in multivol diff or snapshot |
572 | 695 | """ | 695 | """ |
573 | 696 | assert self.process_waiting | 696 | assert self.process_waiting |
574 | 697 | ropath = self.process_ropath | 697 | ropath = self.process_ropath |
575 | 698 | ti, index = ropath.get_tarinfo(), ropath.index | 698 | ti, index = ropath.get_tarinfo(), ropath.index |
577 | 699 | ti.name = "%s/%d" % (self.process_prefix, self.process_next_vol_number) | 699 | ti.name = u"%s/%d" % (self.process_prefix, self.process_next_vol_number) |
578 | 700 | data, last_block = self.get_data_block(self.process_fp) | 700 | data, last_block = self.get_data_block(self.process_fp) |
579 | 701 | if stats: | 701 | if stats: |
580 | 702 | stats.RawDeltaSize += len(data) | 702 | stats.RawDeltaSize += len(data) |
581 | @@ -712,13 +712,13 @@ | |||
582 | 712 | 712 | ||
583 | 713 | 713 | ||
584 | 714 | def write_block_iter(block_iter, out_obj): | 714 | def write_block_iter(block_iter, out_obj): |
586 | 715 | """ | 715 | u""" |
587 | 716 | Write block_iter to filename, path, or file object | 716 | Write block_iter to filename, path, or file object |
588 | 717 | """ | 717 | """ |
589 | 718 | if isinstance(out_obj, Path): | 718 | if isinstance(out_obj, Path): |
591 | 719 | fp = open(out_obj.name, "wb") | 719 | fp = open(out_obj.name, u"wb") |
592 | 720 | elif isinstance(out_obj, types.StringTypes): | 720 | elif isinstance(out_obj, types.StringTypes): |
594 | 721 | fp = open(out_obj, "wb") | 721 | fp = open(out_obj, u"wb") |
595 | 722 | else: | 722 | else: |
596 | 723 | fp = out_obj | 723 | fp = out_obj |
597 | 724 | for block in block_iter: | 724 | for block in block_iter: |
598 | @@ -730,7 +730,7 @@ | |||
599 | 730 | 730 | ||
600 | 731 | 731 | ||
601 | 732 | def get_block_size(file_len): | 732 | def get_block_size(file_len): |
603 | 733 | """ | 733 | u""" |
604 | 734 | Return a reasonable block size to use on files of length file_len | 734 | Return a reasonable block size to use on files of length file_len |
605 | 735 | 735 | ||
606 | 736 | If the block size is too big, deltas will be bigger than is | 736 | If the block size is too big, deltas will be bigger than is |
607 | 737 | 737 | ||
608 | === modified file 'duplicity/manifest.py' | |||
609 | --- duplicity/manifest.py 2018-09-06 11:14:11 +0000 | |||
610 | +++ duplicity/manifest.py 2018-09-24 21:19:45 +0000 | |||
611 | @@ -19,7 +19,7 @@ | |||
612 | 19 | # along with duplicity; if not, write to the Free Software Foundation, | 19 | # along with duplicity; if not, write to the Free Software Foundation, |
613 | 20 | # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA | 20 | # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA |
614 | 21 | 21 | ||
616 | 22 | """Create and edit manifest for session contents""" | 22 | u"""Create and edit manifest for session contents""" |
617 | 23 | 23 | ||
618 | 24 | from future_builtins import filter | 24 | from future_builtins import filter |
619 | 25 | 25 | ||
620 | @@ -32,18 +32,18 @@ | |||
621 | 32 | 32 | ||
622 | 33 | 33 | ||
623 | 34 | class ManifestError(Exception): | 34 | class ManifestError(Exception): |
625 | 35 | """ | 35 | u""" |
626 | 36 | Exception raised when problem with manifest | 36 | Exception raised when problem with manifest |
627 | 37 | """ | 37 | """ |
628 | 38 | pass | 38 | pass |
629 | 39 | 39 | ||
630 | 40 | 40 | ||
631 | 41 | class Manifest: | 41 | class Manifest: |
633 | 42 | """ | 42 | u""" |
634 | 43 | List of volumes and information about each one | 43 | List of volumes and information about each one |
635 | 44 | """ | 44 | """ |
636 | 45 | def __init__(self, fh=None): | 45 | def __init__(self, fh=None): |
638 | 46 | """ | 46 | u""" |
639 | 47 | Create blank Manifest | 47 | Create blank Manifest |
640 | 48 | 48 | ||
641 | 49 | @param fh: fileobj for manifest | 49 | @param fh: fileobj for manifest |
642 | @@ -59,7 +59,7 @@ | |||
643 | 59 | self.files_changed = [] | 59 | self.files_changed = [] |
644 | 60 | 60 | ||
645 | 61 | def set_dirinfo(self): | 61 | def set_dirinfo(self): |
647 | 62 | """ | 62 | u""" |
648 | 63 | Set information about directory from globals, | 63 | Set information about directory from globals, |
649 | 64 | and write to manifest file. | 64 | and write to manifest file. |
650 | 65 | 65 | ||
651 | @@ -70,13 +70,13 @@ | |||
652 | 70 | self.local_dirname = globals.local_path.name # @UndefinedVariable | 70 | self.local_dirname = globals.local_path.name # @UndefinedVariable |
653 | 71 | if self.fh: | 71 | if self.fh: |
654 | 72 | if self.hostname: | 72 | if self.hostname: |
656 | 73 | self.fh.write("Hostname %s\n" % self.hostname) | 73 | self.fh.write(u"Hostname %s\n" % self.hostname) |
657 | 74 | if self.local_dirname: | 74 | if self.local_dirname: |
659 | 75 | self.fh.write("Localdir %s\n" % Quote(self.local_dirname)) | 75 | self.fh.write(u"Localdir %s\n" % Quote(self.local_dirname)) |
660 | 76 | return self | 76 | return self |
661 | 77 | 77 | ||
662 | 78 | def check_dirinfo(self): | 78 | def check_dirinfo(self): |
664 | 79 | """ | 79 | u""" |
665 | 80 | Return None if dirinfo is the same, otherwise error message | 80 | Return None if dirinfo is the same, otherwise error message |
666 | 81 | 81 | ||
667 | 82 | Does not raise an error message if hostname or local_dirname | 82 | Does not raise an error message if hostname or local_dirname |
668 | @@ -89,41 +89,41 @@ | |||
669 | 89 | return | 89 | return |
670 | 90 | 90 | ||
671 | 91 | if self.hostname and self.hostname != globals.hostname: | 91 | if self.hostname and self.hostname != globals.hostname: |
675 | 92 | errmsg = _("Fatal Error: Backup source host has changed.\n" | 92 | errmsg = _(u"Fatal Error: Backup source host has changed.\n" |
676 | 93 | "Current hostname: %s\n" | 93 | u"Current hostname: %s\n" |
677 | 94 | "Previous hostname: %s") % (globals.hostname, self.hostname) | 94 | u"Previous hostname: %s") % (globals.hostname, self.hostname) |
678 | 95 | code = log.ErrorCode.hostname_mismatch | 95 | code = log.ErrorCode.hostname_mismatch |
680 | 96 | code_extra = "%s %s" % (util.escape(globals.hostname), util.escape(self.hostname)) | 96 | code_extra = u"%s %s" % (util.escape(globals.hostname), util.escape(self.hostname)) |
681 | 97 | 97 | ||
682 | 98 | elif (self.local_dirname and self.local_dirname != globals.local_path.name): # @UndefinedVariable | 98 | elif (self.local_dirname and self.local_dirname != globals.local_path.name): # @UndefinedVariable |
686 | 99 | errmsg = _("Fatal Error: Backup source directory has changed.\n" | 99 | errmsg = _(u"Fatal Error: Backup source directory has changed.\n" |
687 | 100 | "Current directory: %s\n" | 100 | u"Current directory: %s\n" |
688 | 101 | "Previous directory: %s") % (globals.local_path.name, self.local_dirname) # @UndefinedVariable | 101 | u"Previous directory: %s") % (globals.local_path.name, self.local_dirname) # @UndefinedVariable |
689 | 102 | code = log.ErrorCode.source_dir_mismatch | 102 | code = log.ErrorCode.source_dir_mismatch |
692 | 103 | code_extra = "%s %s" % (util.escape(globals.local_path.name), | 103 | code_extra = u"%s %s" % (util.escape(globals.local_path.name), |
693 | 104 | util.escape(self.local_dirname)) # @UndefinedVariable | 104 | util.escape(self.local_dirname)) # @UndefinedVariable |
694 | 105 | else: | 105 | else: |
695 | 106 | return | 106 | return |
696 | 107 | 107 | ||
704 | 108 | log.FatalError(errmsg + "\n\n" + | 108 | log.FatalError(errmsg + u"\n\n" + |
705 | 109 | _("Aborting because you may have accidentally tried to " | 109 | _(u"Aborting because you may have accidentally tried to " |
706 | 110 | "backup two different data sets to the same remote " | 110 | u"backup two different data sets to the same remote " |
707 | 111 | "location, or using the same archive directory. If " | 111 | u"location, or using the same archive directory. If " |
708 | 112 | "this is not a mistake, use the " | 112 | u"this is not a mistake, use the " |
709 | 113 | "--allow-source-mismatch switch to avoid seeing this " | 113 | u"--allow-source-mismatch switch to avoid seeing this " |
710 | 114 | "message"), code, code_extra) | 114 | u"message"), code, code_extra) |
711 | 115 | 115 | ||
712 | 116 | def set_files_changed_info(self, files_changed): | 116 | def set_files_changed_info(self, files_changed): |
713 | 117 | if files_changed: | 117 | if files_changed: |
714 | 118 | self.files_changed = files_changed | 118 | self.files_changed = files_changed |
715 | 119 | 119 | ||
716 | 120 | if self.fh: | 120 | if self.fh: |
718 | 121 | self.fh.write("Filelist %d\n" % len(self.files_changed)) | 121 | self.fh.write(u"Filelist %d\n" % len(self.files_changed)) |
719 | 122 | for fileinfo in self.files_changed: | 122 | for fileinfo in self.files_changed: |
721 | 123 | self.fh.write(" %-7s %s\n" % (fileinfo[1], Quote(fileinfo[0]))) | 123 | self.fh.write(b" %-7s %s\n" % (fileinfo[1], Quote(fileinfo[0]))) |
722 | 124 | 124 | ||
723 | 125 | def add_volume_info(self, vi): | 125 | def add_volume_info(self, vi): |
725 | 126 | """ | 126 | u""" |
726 | 127 | Add volume info vi to manifest and write to manifest | 127 | Add volume info vi to manifest and write to manifest |
727 | 128 | 128 | ||
728 | 129 | @param vi: volume info to add | 129 | @param vi: volume info to add |
729 | @@ -134,10 +134,10 @@ | |||
730 | 134 | vol_num = vi.volume_number | 134 | vol_num = vi.volume_number |
731 | 135 | self.volume_info_dict[vol_num] = vi | 135 | self.volume_info_dict[vol_num] = vi |
732 | 136 | if self.fh: | 136 | if self.fh: |
734 | 137 | self.fh.write(vi.to_string() + "\n") | 137 | self.fh.write(vi.to_string() + b"\n") |
735 | 138 | 138 | ||
736 | 139 | def del_volume_info(self, vol_num): | 139 | def del_volume_info(self, vol_num): |
738 | 140 | """ | 140 | u""" |
739 | 141 | Remove volume vol_num from the manifest | 141 | Remove volume vol_num from the manifest |
740 | 142 | 142 | ||
741 | 143 | @param vol_num: volume number to delete | 143 | @param vol_num: volume number to delete |
742 | @@ -148,87 +148,87 @@ | |||
743 | 148 | try: | 148 | try: |
744 | 149 | del self.volume_info_dict[vol_num] | 149 | del self.volume_info_dict[vol_num] |
745 | 150 | except Exception: | 150 | except Exception: |
747 | 151 | raise ManifestError("Volume %d not present in manifest" % (vol_num,)) | 151 | raise ManifestError(u"Volume %d not present in manifest" % (vol_num,)) |
748 | 152 | 152 | ||
749 | 153 | def to_string(self): | 153 | def to_string(self): |
751 | 154 | """ | 154 | u""" |
752 | 155 | Return string version of self (just concatenate vi strings) | 155 | Return string version of self (just concatenate vi strings) |
753 | 156 | 156 | ||
754 | 157 | @rtype: string | 157 | @rtype: string |
755 | 158 | @return: self in string form | 158 | @return: self in string form |
756 | 159 | """ | 159 | """ |
758 | 160 | result = "" | 160 | result = u"" |
759 | 161 | if self.hostname: | 161 | if self.hostname: |
761 | 162 | result += "Hostname %s\n" % self.hostname | 162 | result += b"Hostname %s\n" % self.hostname |
762 | 163 | if self.local_dirname: | 163 | if self.local_dirname: |
764 | 164 | result += "Localdir %s\n" % Quote(self.local_dirname) | 164 | result += b"Localdir %s\n" % Quote(self.local_dirname) |
765 | 165 | 165 | ||
767 | 166 | result += "Filelist %d\n" % len(self.files_changed) | 166 | result += b"Filelist %d\n" % len(self.files_changed) |
768 | 167 | for fileinfo in self.files_changed: | 167 | for fileinfo in self.files_changed: |
770 | 168 | result += " %-7s %s\n" % (fileinfo[1], Quote(fileinfo[0])) | 168 | result += b" %-7s %s\n" % (fileinfo[1], Quote(fileinfo[0])) |
771 | 169 | 169 | ||
772 | 170 | vol_num_list = self.volume_info_dict.keys() | 170 | vol_num_list = self.volume_info_dict.keys() |
773 | 171 | vol_num_list.sort() | 171 | vol_num_list.sort() |
774 | 172 | 172 | ||
775 | 173 | def vol_num_to_string(vol_num): | 173 | def vol_num_to_string(vol_num): |
776 | 174 | return self.volume_info_dict[vol_num].to_string() | 174 | return self.volume_info_dict[vol_num].to_string() |
779 | 175 | result = "%s%s\n" % (result, | 175 | result = b"%s%s\n" % (result, |
780 | 176 | "\n".join(map(vol_num_to_string, vol_num_list))) | 176 | b"\n".join(map(vol_num_to_string, vol_num_list))) |
781 | 177 | return result | 177 | return result |
782 | 178 | 178 | ||
783 | 179 | __str__ = to_string | 179 | __str__ = to_string |
784 | 180 | 180 | ||
785 | 181 | def from_string(self, s): | 181 | def from_string(self, s): |
787 | 182 | """ | 182 | u""" |
788 | 183 | Initialize self from string s, return self | 183 | Initialize self from string s, return self |
789 | 184 | """ | 184 | """ |
790 | 185 | def get_field(fieldname): | 185 | def get_field(fieldname): |
792 | 186 | """ | 186 | u""" |
793 | 187 | Return the value of a field by parsing s, or None if no field | 187 | Return the value of a field by parsing s, or None if no field |
794 | 188 | """ | 188 | """ |
796 | 189 | m = re.search("(^|\\n)%s\\s(.*?)\n" % fieldname, s, re.I) | 189 | m = re.search(u"(^|\\n)%s\\s(.*?)\n" % fieldname, s, re.I) |
797 | 190 | if not m: | 190 | if not m: |
798 | 191 | return None | 191 | return None |
799 | 192 | else: | 192 | else: |
800 | 193 | return Unquote(m.group(2)) | 193 | return Unquote(m.group(2)) |
803 | 194 | self.hostname = get_field("hostname") | 194 | self.hostname = get_field(u"hostname") |
804 | 195 | self.local_dirname = get_field("localdir") | 195 | self.local_dirname = get_field(u"localdir") |
805 | 196 | 196 | ||
806 | 197 | highest_vol = 0 | 197 | highest_vol = 0 |
807 | 198 | latest_vol = 0 | 198 | latest_vol = 0 |
809 | 199 | vi_regexp = re.compile("(?:^|\\n)(volume\\s.*(?:\\n.*)*?)(?=\\nvolume\\s|$)", re.I) | 199 | vi_regexp = re.compile(u"(?:^|\\n)(volume\\s.*(?:\\n.*)*?)(?=\\nvolume\\s|$)", re.I) |
810 | 200 | vi_iterator = vi_regexp.finditer(s) | 200 | vi_iterator = vi_regexp.finditer(s) |
811 | 201 | for match in vi_iterator: | 201 | for match in vi_iterator: |
812 | 202 | vi = VolumeInfo().from_string(match.group(1)) | 202 | vi = VolumeInfo().from_string(match.group(1)) |
813 | 203 | self.add_volume_info(vi) | 203 | self.add_volume_info(vi) |
814 | 204 | latest_vol = vi.volume_number | 204 | latest_vol = vi.volume_number |
815 | 205 | highest_vol = max(highest_vol, latest_vol) | 205 | highest_vol = max(highest_vol, latest_vol) |
817 | 206 | log.Debug(_("Found manifest volume %s") % latest_vol) | 206 | log.Debug(_(u"Found manifest volume %s") % latest_vol) |
818 | 207 | # If we restarted after losing some remote volumes, the highest volume | 207 | # If we restarted after losing some remote volumes, the highest volume |
819 | 208 | # seen may be higher than the last volume recorded. That is, the | 208 | # seen may be higher than the last volume recorded. That is, the |
820 | 209 | # manifest could contain "vol1, vol2, vol3, vol2." If so, we don't | 209 | # manifest could contain "vol1, vol2, vol3, vol2." If so, we don't |
821 | 210 | # want to keep vol3's info. | 210 | # want to keep vol3's info. |
822 | 211 | for i in range(latest_vol + 1, highest_vol + 1): | 211 | for i in range(latest_vol + 1, highest_vol + 1): |
823 | 212 | self.del_volume_info(i) | 212 | self.del_volume_info(i) |
825 | 213 | log.Info(_("Found %s volumes in manifest") % latest_vol) | 213 | log.Info(_(u"Found %s volumes in manifest") % latest_vol) |
826 | 214 | 214 | ||
827 | 215 | # Get file changed list - not needed if --file-changed not present | 215 | # Get file changed list - not needed if --file-changed not present |
828 | 216 | filecount = 0 | 216 | filecount = 0 |
829 | 217 | if globals.file_changed is not None: | 217 | if globals.file_changed is not None: |
831 | 218 | filelist_regexp = re.compile("(^|\\n)filelist\\s([0-9]+)\\n(.*?)(\\nvolume\\s|$)", re.I | re.S) | 218 | filelist_regexp = re.compile(u"(^|\\n)filelist\\s([0-9]+)\\n(.*?)(\\nvolume\\s|$)", re.I | re.S) |
832 | 219 | match = filelist_regexp.search(s) | 219 | match = filelist_regexp.search(s) |
833 | 220 | if match: | 220 | if match: |
834 | 221 | filecount = int(match.group(2)) | 221 | filecount = int(match.group(2)) |
835 | 222 | if filecount > 0: | 222 | if filecount > 0: |
836 | 223 | def parse_fileinfo(line): | 223 | def parse_fileinfo(line): |
837 | 224 | fileinfo = line.strip().split() | 224 | fileinfo = line.strip().split() |
839 | 225 | return (fileinfo[0], ''.join(fileinfo[1:])) | 225 | return (fileinfo[0], u''.join(fileinfo[1:])) |
840 | 226 | 226 | ||
842 | 227 | self.files_changed = list(map(parse_fileinfo, match.group(3).split('\n'))) | 227 | self.files_changed = list(map(parse_fileinfo, match.group(3).split(u'\n'))) |
843 | 228 | 228 | ||
844 | 229 | if filecount != len(self.files_changed): | 229 | if filecount != len(self.files_changed): |
847 | 230 | log.Error(_("Manifest file '%s' is corrupt: File count says %d, File list contains %d" % | 230 | log.Error(_(u"Manifest file '%s' is corrupt: File count says %d, File list contains %d" % |
848 | 231 | (self.fh.base if self.fh else "", filecount, len(self.files_changed)))) | 231 | (self.fh.base if self.fh else u"", filecount, len(self.files_changed)))) |
849 | 232 | self.corrupt_filelist = True | 232 | self.corrupt_filelist = True |
850 | 233 | 233 | ||
851 | 234 | return self | 234 | return self |
852 | @@ -237,7 +237,7 @@ | |||
853 | 237 | return self.files_changed | 237 | return self.files_changed |
854 | 238 | 238 | ||
855 | 239 | def __eq__(self, other): | 239 | def __eq__(self, other): |
857 | 240 | """ | 240 | u""" |
858 | 241 | Two manifests are equal if they contain the same volume infos | 241 | Two manifests are equal if they contain the same volume infos |
859 | 242 | """ | 242 | """ |
860 | 243 | vi_list1 = self.volume_info_dict.keys() | 243 | vi_list1 = self.volume_info_dict.keys() |
861 | @@ -246,39 +246,39 @@ | |||
862 | 246 | vi_list2.sort() | 246 | vi_list2.sort() |
863 | 247 | 247 | ||
864 | 248 | if vi_list1 != vi_list2: | 248 | if vi_list1 != vi_list2: |
866 | 249 | log.Notice(_("Manifests not equal because different volume numbers")) | 249 | log.Notice(_(u"Manifests not equal because different volume numbers")) |
867 | 250 | return False | 250 | return False |
868 | 251 | 251 | ||
869 | 252 | for i in range(len(vi_list1)): | 252 | for i in range(len(vi_list1)): |
870 | 253 | if not vi_list1[i] == vi_list2[i]: | 253 | if not vi_list1[i] == vi_list2[i]: |
872 | 254 | log.Notice(_("Manifests not equal because volume lists differ")) | 254 | log.Notice(_(u"Manifests not equal because volume lists differ")) |
873 | 255 | return False | 255 | return False |
874 | 256 | 256 | ||
875 | 257 | if (self.hostname != other.hostname or | 257 | if (self.hostname != other.hostname or |
876 | 258 | self.local_dirname != other.local_dirname): | 258 | self.local_dirname != other.local_dirname): |
878 | 259 | log.Notice(_("Manifests not equal because hosts or directories differ")) | 259 | log.Notice(_(u"Manifests not equal because hosts or directories differ")) |
879 | 260 | return False | 260 | return False |
880 | 261 | 261 | ||
881 | 262 | return True | 262 | return True |
882 | 263 | 263 | ||
883 | 264 | def __ne__(self, other): | 264 | def __ne__(self, other): |
885 | 265 | """ | 265 | u""" |
886 | 266 | Defines !=. Not doing this always leads to annoying bugs... | 266 | Defines !=. Not doing this always leads to annoying bugs... |
887 | 267 | """ | 267 | """ |
888 | 268 | return not self.__eq__(other) | 268 | return not self.__eq__(other) |
889 | 269 | 269 | ||
890 | 270 | def write_to_path(self, path): | 270 | def write_to_path(self, path): |
892 | 271 | """ | 271 | u""" |
893 | 272 | Write string version of manifest to given path | 272 | Write string version of manifest to given path |
894 | 273 | """ | 273 | """ |
895 | 274 | assert not path.exists() | 274 | assert not path.exists() |
897 | 275 | fout = path.open("wb") | 275 | fout = path.open(u"wb") |
898 | 276 | fout.write(self.to_string()) | 276 | fout.write(self.to_string()) |
899 | 277 | assert not fout.close() | 277 | assert not fout.close() |
900 | 278 | path.setdata() | 278 | path.setdata() |
901 | 279 | 279 | ||
902 | 280 | def get_containing_volumes(self, index_prefix): | 280 | def get_containing_volumes(self, index_prefix): |
904 | 281 | """ | 281 | u""" |
905 | 282 | Return list of volume numbers that may contain index_prefix | 282 | Return list of volume numbers that may contain index_prefix |
906 | 283 | """ | 283 | """ |
907 | 284 | return filter(lambda vol_num: | 284 | return filter(lambda vol_num: |
908 | @@ -287,18 +287,18 @@ | |||
909 | 287 | 287 | ||
910 | 288 | 288 | ||
911 | 289 | class VolumeInfoError(Exception): | 289 | class VolumeInfoError(Exception): |
913 | 290 | """ | 290 | u""" |
914 | 291 | Raised when there is a problem initializing a VolumeInfo from string | 291 | Raised when there is a problem initializing a VolumeInfo from string |
915 | 292 | """ | 292 | """ |
916 | 293 | pass | 293 | pass |
917 | 294 | 294 | ||
918 | 295 | 295 | ||
919 | 296 | class VolumeInfo: | 296 | class VolumeInfo: |
921 | 297 | """ | 297 | u""" |
922 | 298 | Information about a single volume | 298 | Information about a single volume |
923 | 299 | """ | 299 | """ |
924 | 300 | def __init__(self): | 300 | def __init__(self): |
926 | 301 | """VolumeInfo initializer""" | 301 | u"""VolumeInfo initializer""" |
927 | 302 | self.volume_number = None | 302 | self.volume_number = None |
928 | 303 | self.start_index = None | 303 | self.start_index = None |
929 | 304 | self.start_block = None | 304 | self.start_block = None |
930 | @@ -309,7 +309,7 @@ | |||
931 | 309 | def set_info(self, vol_number, | 309 | def set_info(self, vol_number, |
932 | 310 | start_index, start_block, | 310 | start_index, start_block, |
933 | 311 | end_index, end_block): | 311 | end_index, end_block): |
935 | 312 | """ | 312 | u""" |
936 | 313 | Set essential VolumeInfo information, return self | 313 | Set essential VolumeInfo information, return self |
937 | 314 | 314 | ||
938 | 315 | Call with starting and ending paths stored in the volume. If | 315 | Call with starting and ending paths stored in the volume. If |
939 | @@ -325,13 +325,13 @@ | |||
940 | 325 | return self | 325 | return self |
941 | 326 | 326 | ||
942 | 327 | def set_hash(self, hash_name, data): | 327 | def set_hash(self, hash_name, data): |
944 | 328 | """ | 328 | u""" |
945 | 329 | Set the value of hash hash_name (e.g. "MD5") to data | 329 | Set the value of hash hash_name (e.g. "MD5") to data |
946 | 330 | """ | 330 | """ |
947 | 331 | self.hashes[hash_name] = data | 331 | self.hashes[hash_name] = data |
948 | 332 | 332 | ||
949 | 333 | def get_best_hash(self): | 333 | def get_best_hash(self): |
951 | 334 | """ | 334 | u""" |
952 | 335 | Return pair (hash_type, hash_data) | 335 | Return pair (hash_type, hash_data) |
953 | 336 | 336 | ||
954 | 337 | SHA1 is the best hash, and MD5 is the second best hash. None | 337 | SHA1 is the best hash, and MD5 is the second best hash. None |
955 | @@ -340,59 +340,59 @@ | |||
956 | 340 | if not self.hashes: | 340 | if not self.hashes: |
957 | 341 | return None | 341 | return None |
958 | 342 | try: | 342 | try: |
960 | 343 | return ("SHA1", self.hashes['SHA1']) | 343 | return (u"SHA1", self.hashes[u'SHA1']) |
961 | 344 | except KeyError: | 344 | except KeyError: |
962 | 345 | pass | 345 | pass |
963 | 346 | try: | 346 | try: |
965 | 347 | return ("MD5", self.hashes['MD5']) | 347 | return (u"MD5", self.hashes[u'MD5']) |
966 | 348 | except KeyError: | 348 | except KeyError: |
967 | 349 | pass | 349 | pass |
968 | 350 | return self.hashes.items()[0] | 350 | return self.hashes.items()[0] |
969 | 351 | 351 | ||
970 | 352 | def to_string(self): | 352 | def to_string(self): |
972 | 353 | """ | 353 | u""" |
973 | 354 | Return nicely formatted string reporting all information | 354 | Return nicely formatted string reporting all information |
974 | 355 | """ | 355 | """ |
975 | 356 | def index_to_string(index): | 356 | def index_to_string(index): |
977 | 357 | """Return printable version of index without any whitespace""" | 357 | u"""Return printable version of index without any whitespace""" |
978 | 358 | if index: | 358 | if index: |
980 | 359 | s = "/".join(index) | 359 | s = b"/".join(index) |
981 | 360 | return Quote(s) | 360 | return Quote(s) |
982 | 361 | else: | 361 | else: |
984 | 362 | return "." | 362 | return b"." |
985 | 363 | 363 | ||
992 | 364 | slist = ["Volume %d:" % self.volume_number] | 364 | slist = [b"Volume %d:" % self.volume_number] |
993 | 365 | whitespace = " " | 365 | whitespace = b" " |
994 | 366 | slist.append("%sStartingPath %s %s" % | 366 | slist.append(b"%sStartingPath %s %s" % |
995 | 367 | (whitespace, index_to_string(self.start_index), (self.start_block or " "))) | 367 | (whitespace, index_to_string(self.start_index), (self.start_block or b" "))) |
996 | 368 | slist.append("%sEndingPath %s %s" % | 368 | slist.append(b"%sEndingPath %s %s" % |
997 | 369 | (whitespace, index_to_string(self.end_index), (self.end_block or " "))) | 369 | (whitespace, index_to_string(self.end_index), (self.end_block or b" "))) |
998 | 370 | for key in self.hashes: | 370 | for key in self.hashes: |
1000 | 371 | slist.append("%sHash %s %s" % | 371 | slist.append(b"%sHash %s %s" % |
1001 | 372 | (whitespace, key.encode(), self.hashes[key])) | 372 | (whitespace, key.encode(), self.hashes[key])) |
1003 | 373 | return "\n".join(slist) | 373 | return b"\n".join(slist) |
1004 | 374 | 374 | ||
1005 | 375 | __str__ = to_string | 375 | __str__ = to_string |
1006 | 376 | 376 | ||
1007 | 377 | def from_string(self, s): | 377 | def from_string(self, s): |
1009 | 378 | """ | 378 | u""" |
1010 | 379 | Initialize self from string s as created by to_string | 379 | Initialize self from string s as created by to_string |
1011 | 380 | """ | 380 | """ |
1012 | 381 | def string_to_index(s): | 381 | def string_to_index(s): |
1014 | 382 | """ | 382 | u""" |
1015 | 383 | Return tuple index from string | 383 | Return tuple index from string |
1016 | 384 | """ | 384 | """ |
1017 | 385 | s = Unquote(s) | 385 | s = Unquote(s) |
1019 | 386 | if s == ".": | 386 | if s == b".": |
1020 | 387 | return () | 387 | return () |
1022 | 388 | return tuple(s.split("/")) | 388 | return tuple(s.split(b"/")) |
1023 | 389 | 389 | ||
1025 | 390 | linelist = s.strip().split("\n") | 390 | linelist = s.strip().split(b"\n") |
1026 | 391 | 391 | ||
1027 | 392 | # Set volume number | 392 | # Set volume number |
1029 | 393 | m = re.search("^Volume ([0-9]+):", linelist[0], re.I) | 393 | m = re.search(u"^Volume ([0-9]+):", linelist[0], re.I) |
1030 | 394 | if not m: | 394 | if not m: |
1032 | 395 | raise VolumeInfoError("Bad first line '%s'" % (linelist[0],)) | 395 | raise VolumeInfoError(u"Bad first line '%s'" % (linelist[0],)) |
1033 | 396 | self.volume_number = int(m.group(1)) | 396 | self.volume_number = int(m.group(1)) |
1034 | 397 | 397 | ||
1035 | 398 | # Set other fields | 398 | # Set other fields |
1036 | @@ -402,61 +402,61 @@ | |||
1037 | 402 | line_split = line.strip().split() | 402 | line_split = line.strip().split() |
1038 | 403 | field_name = line_split[0].lower() | 403 | field_name = line_split[0].lower() |
1039 | 404 | other_fields = line_split[1:] | 404 | other_fields = line_split[1:] |
1042 | 405 | if field_name == "Volume": | 405 | if field_name == u"Volume": |
1043 | 406 | log.Warn(_("Warning, found extra Volume identifier")) | 406 | log.Warn(_(u"Warning, found extra Volume identifier")) |
1044 | 407 | break | 407 | break |
1046 | 408 | elif field_name == "startingpath": | 408 | elif field_name == u"startingpath": |
1047 | 409 | self.start_index = string_to_index(other_fields[0]) | 409 | self.start_index = string_to_index(other_fields[0]) |
1048 | 410 | if len(other_fields) > 1: | 410 | if len(other_fields) > 1: |
1049 | 411 | self.start_block = int(other_fields[1]) | 411 | self.start_block = int(other_fields[1]) |
1050 | 412 | else: | 412 | else: |
1051 | 413 | self.start_block = None | 413 | self.start_block = None |
1053 | 414 | elif field_name == "endingpath": | 414 | elif field_name == u"endingpath": |
1054 | 415 | self.end_index = string_to_index(other_fields[0]) | 415 | self.end_index = string_to_index(other_fields[0]) |
1055 | 416 | if len(other_fields) > 1: | 416 | if len(other_fields) > 1: |
1056 | 417 | self.end_block = int(other_fields[1]) | 417 | self.end_block = int(other_fields[1]) |
1057 | 418 | else: | 418 | else: |
1058 | 419 | self.end_block = None | 419 | self.end_block = None |
1060 | 420 | elif field_name == "hash": | 420 | elif field_name == u"hash": |
1061 | 421 | self.set_hash(other_fields[0], other_fields[1]) | 421 | self.set_hash(other_fields[0], other_fields[1]) |
1062 | 422 | 422 | ||
1063 | 423 | if self.start_index is None or self.end_index is None: | 423 | if self.start_index is None or self.end_index is None: |
1065 | 424 | raise VolumeInfoError("Start or end index not set") | 424 | raise VolumeInfoError(u"Start or end index not set") |
1066 | 425 | return self | 425 | return self |
1067 | 426 | 426 | ||
1068 | 427 | def __eq__(self, other): | 427 | def __eq__(self, other): |
1070 | 428 | """ | 428 | u""" |
1071 | 429 | Used in test suite | 429 | Used in test suite |
1072 | 430 | """ | 430 | """ |
1073 | 431 | if not isinstance(other, VolumeInfo): | 431 | if not isinstance(other, VolumeInfo): |
1075 | 432 | log.Notice(_("Other is not VolumeInfo")) | 432 | log.Notice(_(u"Other is not VolumeInfo")) |
1076 | 433 | return None | 433 | return None |
1077 | 434 | if self.volume_number != other.volume_number: | 434 | if self.volume_number != other.volume_number: |
1079 | 435 | log.Notice(_("Volume numbers don't match")) | 435 | log.Notice(_(u"Volume numbers don't match")) |
1080 | 436 | return None | 436 | return None |
1081 | 437 | if self.start_index != other.start_index: | 437 | if self.start_index != other.start_index: |
1083 | 438 | log.Notice(_("start_indicies don't match")) | 438 | log.Notice(_(u"start_indicies don't match")) |
1084 | 439 | return None | 439 | return None |
1085 | 440 | if self.end_index != other.end_index: | 440 | if self.end_index != other.end_index: |
1087 | 441 | log.Notice(_("end_index don't match")) | 441 | log.Notice(_(u"end_index don't match")) |
1088 | 442 | return None | 442 | return None |
1089 | 443 | hash_list1 = self.hashes.items() | 443 | hash_list1 = self.hashes.items() |
1090 | 444 | hash_list1.sort() | 444 | hash_list1.sort() |
1091 | 445 | hash_list2 = other.hashes.items() | 445 | hash_list2 = other.hashes.items() |
1092 | 446 | hash_list2.sort() | 446 | hash_list2.sort() |
1093 | 447 | if hash_list1 != hash_list2: | 447 | if hash_list1 != hash_list2: |
1095 | 448 | log.Notice(_("Hashes don't match")) | 448 | log.Notice(_(u"Hashes don't match")) |
1096 | 449 | return None | 449 | return None |
1097 | 450 | return 1 | 450 | return 1 |
1098 | 451 | 451 | ||
1099 | 452 | def __ne__(self, other): | 452 | def __ne__(self, other): |
1101 | 453 | """ | 453 | u""" |
1102 | 454 | Defines != | 454 | Defines != |
1103 | 455 | """ | 455 | """ |
1104 | 456 | return not self.__eq__(other) | 456 | return not self.__eq__(other) |
1105 | 457 | 457 | ||
1106 | 458 | def contains(self, index_prefix, recursive=1): | 458 | def contains(self, index_prefix, recursive=1): |
1108 | 459 | """ | 459 | u""" |
1109 | 460 | Return true if volume might contain index | 460 | Return true if volume might contain index |
1110 | 461 | 461 | ||
1111 | 462 | If recursive is true, then return true if any index starting | 462 | If recursive is true, then return true if any index starting |
1112 | @@ -471,11 +471,11 @@ | |||
1113 | 471 | return self.start_index <= index_prefix <= self.end_index | 471 | return self.start_index <= index_prefix <= self.end_index |
1114 | 472 | 472 | ||
1115 | 473 | 473 | ||
1117 | 474 | nonnormal_char_re = re.compile("(\\s|[\\\\\"'])") | 474 | nonnormal_char_re = re.compile(u"(\\s|[\\\\\"'])") |
1118 | 475 | 475 | ||
1119 | 476 | 476 | ||
1120 | 477 | def Quote(s): | 477 | def Quote(s): |
1122 | 478 | """ | 478 | u""" |
1123 | 479 | Return quoted version of s safe to put in a manifest or volume info | 479 | Return quoted version of s safe to put in a manifest or volume info |
1124 | 480 | """ | 480 | """ |
1125 | 481 | if not nonnormal_char_re.search(s): | 481 | if not nonnormal_char_re.search(s): |
1126 | @@ -483,29 +483,29 @@ | |||
1127 | 483 | slist = [] | 483 | slist = [] |
1128 | 484 | for char in s: | 484 | for char in s: |
1129 | 485 | if nonnormal_char_re.search(char): | 485 | if nonnormal_char_re.search(char): |
1131 | 486 | slist.append("\\x%02x" % ord(char)) | 486 | slist.append(b"\\x%02x" % ord(char)) |
1132 | 487 | else: | 487 | else: |
1133 | 488 | slist.append(char) | 488 | slist.append(char) |
1135 | 489 | return '"%s"' % "".join(slist) | 489 | return b'"%s"' % u"".join(slist) |
1136 | 490 | 490 | ||
1137 | 491 | 491 | ||
1138 | 492 | def Unquote(quoted_string): | 492 | def Unquote(quoted_string): |
1140 | 493 | """ | 493 | u""" |
1141 | 494 | Return original string from quoted_string produced by above | 494 | Return original string from quoted_string produced by above |
1142 | 495 | """ | 495 | """ |
1144 | 496 | if not quoted_string[0] == '"' or quoted_string[0] == "'": | 496 | if not quoted_string[0] == b'"' or quoted_string[0] == b"'": |
1145 | 497 | return quoted_string | 497 | return quoted_string |
1146 | 498 | assert quoted_string[0] == quoted_string[-1] | 498 | assert quoted_string[0] == quoted_string[-1] |
1147 | 499 | return_list = [] | 499 | return_list = [] |
1148 | 500 | i = 1 # skip initial char | 500 | i = 1 # skip initial char |
1149 | 501 | while i < len(quoted_string) - 1: | 501 | while i < len(quoted_string) - 1: |
1150 | 502 | char = quoted_string[i] | 502 | char = quoted_string[i] |
1152 | 503 | if char == "\\": | 503 | if char == b"\\": |
1153 | 504 | # quoted section | 504 | # quoted section |
1155 | 505 | assert quoted_string[i + 1] == "x" | 505 | assert quoted_string[i + 1] == b"x" |
1156 | 506 | return_list.append(chr(int(quoted_string[i + 2:i + 4], 16))) | 506 | return_list.append(chr(int(quoted_string[i + 2:i + 4], 16))) |
1157 | 507 | i += 4 | 507 | i += 4 |
1158 | 508 | else: | 508 | else: |
1159 | 509 | return_list.append(char) | 509 | return_list.append(char) |
1160 | 510 | i += 1 | 510 | i += 1 |
1162 | 511 | return "".join(return_list) | 511 | return b"".join(return_list) |
1163 | 512 | 512 | ||
1164 | === modified file 'duplicity/patchdir.py' | |||
1165 | --- duplicity/patchdir.py 2018-07-24 11:52:33 +0000 | |||
1166 | +++ duplicity/patchdir.py 2018-09-24 21:19:45 +0000 | |||
1167 | @@ -37,7 +37,7 @@ | |||
1168 | 37 | from duplicity.path import * # @UnusedWildImport | 37 | from duplicity.path import * # @UnusedWildImport |
1169 | 38 | from duplicity.lazy import * # @UnusedWildImport | 38 | from duplicity.lazy import * # @UnusedWildImport |
1170 | 39 | 39 | ||
1172 | 40 | """Functions for patching of directories""" | 40 | u"""Functions for patching of directories""" |
1173 | 41 | 41 | ||
1174 | 42 | 42 | ||
1175 | 43 | class PatchDirException(Exception): | 43 | class PatchDirException(Exception): |
1176 | @@ -45,20 +45,20 @@ | |||
1177 | 45 | 45 | ||
1178 | 46 | 46 | ||
1179 | 47 | def Patch(base_path, difftar_fileobj): | 47 | def Patch(base_path, difftar_fileobj): |
1182 | 48 | """Patch given base_path and file object containing delta""" | 48 | u"""Patch given base_path and file object containing delta""" |
1183 | 49 | diff_tarfile = tarfile.TarFile("arbitrary", "r", difftar_fileobj) | 49 | diff_tarfile = tarfile.TarFile(u"arbitrary", u"r", difftar_fileobj) |
1184 | 50 | patch_diff_tarfile(base_path, diff_tarfile) | 50 | patch_diff_tarfile(base_path, diff_tarfile) |
1185 | 51 | assert not difftar_fileobj.close() | 51 | assert not difftar_fileobj.close() |
1186 | 52 | 52 | ||
1187 | 53 | 53 | ||
1188 | 54 | def Patch_from_iter(base_path, fileobj_iter, restrict_index=()): | 54 | def Patch_from_iter(base_path, fileobj_iter, restrict_index=()): |
1190 | 55 | """Patch given base_path and iterator of delta file objects""" | 55 | u"""Patch given base_path and iterator of delta file objects""" |
1191 | 56 | diff_tarfile = TarFile_FromFileobjs(fileobj_iter) | 56 | diff_tarfile = TarFile_FromFileobjs(fileobj_iter) |
1192 | 57 | patch_diff_tarfile(base_path, diff_tarfile, restrict_index) | 57 | patch_diff_tarfile(base_path, diff_tarfile, restrict_index) |
1193 | 58 | 58 | ||
1194 | 59 | 59 | ||
1195 | 60 | def patch_diff_tarfile(base_path, diff_tarfile, restrict_index=()): | 60 | def patch_diff_tarfile(base_path, diff_tarfile, restrict_index=()): |
1197 | 61 | """Patch given Path object using delta tarfile (as in tarfile.TarFile) | 61 | u"""Patch given Path object using delta tarfile (as in tarfile.TarFile) |
1198 | 62 | 62 | ||
1199 | 63 | If restrict_index is set, ignore any deltas in diff_tarfile that | 63 | If restrict_index is set, ignore any deltas in diff_tarfile that |
1200 | 64 | don't start with restrict_index. | 64 | don't start with restrict_index. |
1201 | @@ -77,12 +77,12 @@ | |||
1202 | 77 | ITR = IterTreeReducer(PathPatcher, [base_path]) | 77 | ITR = IterTreeReducer(PathPatcher, [base_path]) |
1203 | 78 | for basis_path, diff_ropath in collated: | 78 | for basis_path, diff_ropath in collated: |
1204 | 79 | if basis_path: | 79 | if basis_path: |
1206 | 80 | log.Info(_("Patching %s") % (util.fsdecode(basis_path.get_relative_path())), | 80 | log.Info(_(u"Patching %s") % (util.fsdecode(basis_path.get_relative_path())), |
1207 | 81 | log.InfoCode.patch_file_patching, | 81 | log.InfoCode.patch_file_patching, |
1208 | 82 | util.escape(basis_path.get_relative_path())) | 82 | util.escape(basis_path.get_relative_path())) |
1209 | 83 | ITR(basis_path.index, basis_path, diff_ropath) | 83 | ITR(basis_path.index, basis_path, diff_ropath) |
1210 | 84 | else: | 84 | else: |
1212 | 85 | log.Info(_("Patching %s") % (util.fsdecode(diff_ropath.get_relative_path())), | 85 | log.Info(_(u"Patching %s") % (util.fsdecode(diff_ropath.get_relative_path())), |
1213 | 86 | log.InfoCode.patch_file_patching, | 86 | log.InfoCode.patch_file_patching, |
1214 | 87 | util.escape(diff_ropath.get_relative_path())) | 87 | util.escape(diff_ropath.get_relative_path())) |
1215 | 88 | ITR(diff_ropath.index, basis_path, diff_ropath) | 88 | ITR(diff_ropath.index, basis_path, diff_ropath) |
1216 | @@ -96,7 +96,7 @@ | |||
1217 | 96 | 96 | ||
1218 | 97 | 97 | ||
1219 | 98 | def filter_path_iter(path_iter, index): | 98 | def filter_path_iter(path_iter, index): |
1221 | 99 | """Rewrite path elements of path_iter so they start with index | 99 | u"""Rewrite path elements of path_iter so they start with index |
1222 | 100 | 100 | ||
1223 | 101 | Discard any that doesn't start with index, and remove the index | 101 | Discard any that doesn't start with index, and remove the index |
1224 | 102 | prefix from the rest. | 102 | prefix from the rest. |
1225 | @@ -111,7 +111,7 @@ | |||
1226 | 111 | 111 | ||
1227 | 112 | 112 | ||
1228 | 113 | def difftar2path_iter(diff_tarfile): | 113 | def difftar2path_iter(diff_tarfile): |
1230 | 114 | """Turn file-like difftarobj into iterator of ROPaths""" | 114 | u"""Turn file-like difftarobj into iterator of ROPaths""" |
1231 | 115 | tar_iter = iter(diff_tarfile) | 115 | tar_iter = iter(diff_tarfile) |
1232 | 116 | multivol_fileobj = None | 116 | multivol_fileobj = None |
1233 | 117 | 117 | ||
1234 | @@ -132,7 +132,7 @@ | |||
1235 | 132 | ropath = ROPath(index) | 132 | ropath = ROPath(index) |
1236 | 133 | ropath.init_from_tarinfo(tarinfo_list[0]) | 133 | ropath.init_from_tarinfo(tarinfo_list[0]) |
1237 | 134 | ropath.difftype = difftype | 134 | ropath.difftype = difftype |
1239 | 135 | if difftype == "deleted": | 135 | if difftype == u"deleted": |
1240 | 136 | ropath.type = None | 136 | ropath.type = None |
1241 | 137 | elif ropath.isreg(): | 137 | elif ropath.isreg(): |
1242 | 138 | if multivol: | 138 | if multivol: |
1243 | @@ -148,61 +148,61 @@ | |||
1244 | 148 | 148 | ||
1245 | 149 | 149 | ||
1246 | 150 | def get_index_from_tarinfo(tarinfo): | 150 | def get_index_from_tarinfo(tarinfo): |
1250 | 151 | """Return (index, difftype, multivol) pair from tarinfo object""" | 151 | u"""Return (index, difftype, multivol) pair from tarinfo object""" |
1251 | 152 | for prefix in ["snapshot/", "diff/", "deleted/", | 152 | for prefix in [b"snapshot/", b"diff/", b"deleted/", |
1252 | 153 | "multivol_diff/", "multivol_snapshot/"]: | 153 | b"multivol_diff/", b"multivol_snapshot/"]: |
1253 | 154 | tiname = util.get_tarinfo_name(tarinfo) | 154 | tiname = util.get_tarinfo_name(tarinfo) |
1254 | 155 | if tiname.startswith(prefix): | 155 | if tiname.startswith(prefix): |
1255 | 156 | name = tiname[len(prefix):] # strip prefix | 156 | name = tiname[len(prefix):] # strip prefix |
1259 | 157 | if prefix.startswith("multivol"): | 157 | if prefix.startswith(u"multivol"): |
1260 | 158 | if prefix == "multivol_diff/": | 158 | if prefix == u"multivol_diff/": |
1261 | 159 | difftype = "diff" | 159 | difftype = u"diff" |
1262 | 160 | else: | 160 | else: |
1264 | 161 | difftype = "snapshot" | 161 | difftype = u"snapshot" |
1265 | 162 | multivol = 1 | 162 | multivol = 1 |
1266 | 163 | name, num_subs = \ | 163 | name, num_subs = \ |
1269 | 164 | re.subn("(?s)^multivol_(diff|snapshot)/?(.*)/[0-9]+$", | 164 | re.subn(b"(?s)^multivol_(diff|snapshot)/?(.*)/[0-9]+$", |
1270 | 165 | "\\2", tiname) | 165 | b"\\2", tiname) |
1271 | 166 | if num_subs != 1: | 166 | if num_subs != 1: |
1272 | 167 | raise PatchDirException(u"Unrecognized diff entry %s" % | 167 | raise PatchDirException(u"Unrecognized diff entry %s" % |
1273 | 168 | util.fsdecode(tiname)) | 168 | util.fsdecode(tiname)) |
1274 | 169 | else: | 169 | else: |
1275 | 170 | difftype = prefix[:-1] # strip trailing / | 170 | difftype = prefix[:-1] # strip trailing / |
1276 | 171 | name = tiname[len(prefix):] | 171 | name = tiname[len(prefix):] |
1278 | 172 | if name.endswith("/"): | 172 | if name.endswith(b"/"): |
1279 | 173 | name = name[:-1] # strip trailing /'s | 173 | name = name[:-1] # strip trailing /'s |
1280 | 174 | multivol = 0 | 174 | multivol = 0 |
1281 | 175 | break | 175 | break |
1282 | 176 | else: | 176 | else: |
1283 | 177 | raise PatchDirException(u"Unrecognized diff entry %s" % | 177 | raise PatchDirException(u"Unrecognized diff entry %s" % |
1284 | 178 | util.fsdecode(tiname)) | 178 | util.fsdecode(tiname)) |
1286 | 179 | if name == "." or name == "": | 179 | if name == b"." or name == b"": |
1287 | 180 | index = () | 180 | index = () |
1288 | 181 | else: | 181 | else: |
1291 | 182 | index = tuple(name.split("/")) | 182 | index = tuple(name.split(b"/")) |
1292 | 183 | if '..' in index: | 183 | if b'..' in index: |
1293 | 184 | raise PatchDirException(u"Tar entry %s contains '..'. Security " | 184 | raise PatchDirException(u"Tar entry %s contains '..'. Security " |
1295 | 185 | "violation" % util.fsdecode(tiname)) | 185 | u"violation" % util.fsdecode(tiname)) |
1296 | 186 | return (index, difftype, multivol) | 186 | return (index, difftype, multivol) |
1297 | 187 | 187 | ||
1298 | 188 | 188 | ||
1299 | 189 | class Multivol_Filelike: | 189 | class Multivol_Filelike: |
1301 | 190 | """Emulate a file like object from multivols | 190 | u"""Emulate a file like object from multivols |
1302 | 191 | 191 | ||
1303 | 192 | Maintains a buffer about the size of a volume. When it is read() | 192 | Maintains a buffer about the size of a volume. When it is read() |
1304 | 193 | to the end, pull in more volumes as desired. | 193 | to the end, pull in more volumes as desired. |
1305 | 194 | 194 | ||
1306 | 195 | """ | 195 | """ |
1307 | 196 | def __init__(self, tf, tar_iter, tarinfo_list, index): | 196 | def __init__(self, tf, tar_iter, tarinfo_list, index): |
1309 | 197 | """Initializer. tf is TarFile obj, tarinfo is first tarinfo""" | 197 | u"""Initializer. tf is TarFile obj, tarinfo is first tarinfo""" |
1310 | 198 | self.tf, self.tar_iter = tf, tar_iter | 198 | self.tf, self.tar_iter = tf, tar_iter |
1311 | 199 | self.tarinfo_list = tarinfo_list # must store as list for write access | 199 | self.tarinfo_list = tarinfo_list # must store as list for write access |
1312 | 200 | self.index = index | 200 | self.index = index |
1314 | 201 | self.buffer = "" | 201 | self.buffer = b"" |
1315 | 202 | self.at_end = 0 | 202 | self.at_end = 0 |
1316 | 203 | 203 | ||
1317 | 204 | def read(self, length=-1): | 204 | def read(self, length=-1): |
1319 | 205 | """Read length bytes from file""" | 205 | u"""Read length bytes from file""" |
1320 | 206 | if length < 0: | 206 | if length < 0: |
1321 | 207 | while self.addtobuffer(): | 207 | while self.addtobuffer(): |
1322 | 208 | pass | 208 | pass |
1323 | @@ -218,7 +218,7 @@ | |||
1324 | 218 | return result | 218 | return result |
1325 | 219 | 219 | ||
1326 | 220 | def addtobuffer(self): | 220 | def addtobuffer(self): |
1328 | 221 | """Add next chunk to buffer""" | 221 | u"""Add next chunk to buffer""" |
1329 | 222 | if self.at_end: | 222 | if self.at_end: |
1330 | 223 | return None | 223 | return None |
1331 | 224 | index, difftype, multivol = get_index_from_tarinfo( # @UnusedVariable | 224 | index, difftype, multivol = get_index_from_tarinfo( # @UnusedVariable |
1332 | @@ -242,24 +242,24 @@ | |||
1333 | 242 | return 1 | 242 | return 1 |
1334 | 243 | 243 | ||
1335 | 244 | def close(self): | 244 | def close(self): |
1337 | 245 | """If not at end, read remaining data""" | 245 | u"""If not at end, read remaining data""" |
1338 | 246 | if not self.at_end: | 246 | if not self.at_end: |
1339 | 247 | while 1: | 247 | while 1: |
1341 | 248 | self.buffer = "" | 248 | self.buffer = b"" |
1342 | 249 | if not self.addtobuffer(): | 249 | if not self.addtobuffer(): |
1343 | 250 | break | 250 | break |
1344 | 251 | self.at_end = 1 | 251 | self.at_end = 1 |
1345 | 252 | 252 | ||
1346 | 253 | 253 | ||
1347 | 254 | class PathPatcher(ITRBranch): | 254 | class PathPatcher(ITRBranch): |
1349 | 255 | """Used by DirPatch, process the given basis and diff""" | 255 | u"""Used by DirPatch, process the given basis and diff""" |
1350 | 256 | def __init__(self, base_path): | 256 | def __init__(self, base_path): |
1352 | 257 | """Set base_path, Path of root of tree""" | 257 | u"""Set base_path, Path of root of tree""" |
1353 | 258 | self.base_path = base_path | 258 | self.base_path = base_path |
1354 | 259 | self.dir_diff_ropath = None | 259 | self.dir_diff_ropath = None |
1355 | 260 | 260 | ||
1356 | 261 | def start_process(self, index, basis_path, diff_ropath): | 261 | def start_process(self, index, basis_path, diff_ropath): |
1358 | 262 | """Start processing when diff_ropath is a directory""" | 262 | u"""Start processing when diff_ropath is a directory""" |
1359 | 263 | if not (diff_ropath and diff_ropath.isdir()): | 263 | if not (diff_ropath and diff_ropath.isdir()): |
1360 | 264 | assert index == (), util.uindex(index) # should only happen for first elem | 264 | assert index == (), util.uindex(index) # should only happen for first elem |
1361 | 265 | self.fast_process(index, basis_path, diff_ropath) | 265 | self.fast_process(index, basis_path, diff_ropath) |
1362 | @@ -276,44 +276,44 @@ | |||
1363 | 276 | self.dir_diff_ropath = diff_ropath | 276 | self.dir_diff_ropath = diff_ropath |
1364 | 277 | 277 | ||
1365 | 278 | def end_process(self): | 278 | def end_process(self): |
1367 | 279 | """Copy directory permissions when leaving tree""" | 279 | u"""Copy directory permissions when leaving tree""" |
1368 | 280 | if self.dir_diff_ropath: | 280 | if self.dir_diff_ropath: |
1369 | 281 | self.dir_diff_ropath.copy_attribs(self.dir_basis_path) | 281 | self.dir_diff_ropath.copy_attribs(self.dir_basis_path) |
1370 | 282 | 282 | ||
1371 | 283 | def can_fast_process(self, index, basis_path, diff_ropath): | 283 | def can_fast_process(self, index, basis_path, diff_ropath): |
1373 | 284 | """No need to recurse if diff_ropath isn't a directory""" | 284 | u"""No need to recurse if diff_ropath isn't a directory""" |
1374 | 285 | return not (diff_ropath and diff_ropath.isdir()) | 285 | return not (diff_ropath and diff_ropath.isdir()) |
1375 | 286 | 286 | ||
1376 | 287 | def fast_process(self, index, basis_path, diff_ropath): | 287 | def fast_process(self, index, basis_path, diff_ropath): |
1378 | 288 | """For use when neither is a directory""" | 288 | u"""For use when neither is a directory""" |
1379 | 289 | if not diff_ropath: | 289 | if not diff_ropath: |
1380 | 290 | return # no change | 290 | return # no change |
1381 | 291 | elif not basis_path: | 291 | elif not basis_path: |
1383 | 292 | if diff_ropath.difftype == "deleted": | 292 | if diff_ropath.difftype == u"deleted": |
1384 | 293 | pass # already deleted | 293 | pass # already deleted |
1385 | 294 | else: | 294 | else: |
1386 | 295 | # just copy snapshot over | 295 | # just copy snapshot over |
1387 | 296 | diff_ropath.copy(self.base_path.new_index(index)) | 296 | diff_ropath.copy(self.base_path.new_index(index)) |
1389 | 297 | elif diff_ropath.difftype == "deleted": | 297 | elif diff_ropath.difftype == u"deleted": |
1390 | 298 | if basis_path.isdir(): | 298 | if basis_path.isdir(): |
1391 | 299 | basis_path.deltree() | 299 | basis_path.deltree() |
1392 | 300 | else: | 300 | else: |
1393 | 301 | basis_path.delete() | 301 | basis_path.delete() |
1395 | 302 | elif not basis_path.isreg() or (basis_path.isreg() and diff_ropath.difftype == "snapshot"): | 302 | elif not basis_path.isreg() or (basis_path.isreg() and diff_ropath.difftype == u"snapshot"): |
1396 | 303 | if basis_path.isdir(): | 303 | if basis_path.isdir(): |
1397 | 304 | basis_path.deltree() | 304 | basis_path.deltree() |
1398 | 305 | else: | 305 | else: |
1399 | 306 | basis_path.delete() | 306 | basis_path.delete() |
1400 | 307 | diff_ropath.copy(basis_path) | 307 | diff_ropath.copy(basis_path) |
1401 | 308 | else: | 308 | else: |
1403 | 309 | assert diff_ropath.difftype == "diff", diff_ropath.difftype | 309 | assert diff_ropath.difftype == u"diff", diff_ropath.difftype |
1404 | 310 | basis_path.patch_with_attribs(diff_ropath) | 310 | basis_path.patch_with_attribs(diff_ropath) |
1405 | 311 | 311 | ||
1406 | 312 | 312 | ||
1407 | 313 | class TarFile_FromFileobjs: | 313 | class TarFile_FromFileobjs: |
1409 | 314 | """Like a tarfile.TarFile iterator, but read from multiple fileobjs""" | 314 | u"""Like a tarfile.TarFile iterator, but read from multiple fileobjs""" |
1410 | 315 | def __init__(self, fileobj_iter): | 315 | def __init__(self, fileobj_iter): |
1412 | 316 | """Make new tarinfo iterator | 316 | u"""Make new tarinfo iterator |
1413 | 317 | 317 | ||
1414 | 318 | fileobj_iter should be an iterator of file objects opened for | 318 | fileobj_iter should be an iterator of file objects opened for |
1415 | 319 | reading. They will be closed at end of reading. | 319 | reading. They will be closed at end of reading. |
1416 | @@ -327,11 +327,11 @@ | |||
1417 | 327 | return self | 327 | return self |
1418 | 328 | 328 | ||
1419 | 329 | def set_tarfile(self): | 329 | def set_tarfile(self): |
1421 | 330 | """Set tarfile from next file object, or raise StopIteration""" | 330 | u"""Set tarfile from next file object, or raise StopIteration""" |
1422 | 331 | if self.current_fp: | 331 | if self.current_fp: |
1423 | 332 | assert not self.current_fp.close() | 332 | assert not self.current_fp.close() |
1424 | 333 | self.current_fp = next(self.fileobj_iter) | 333 | self.current_fp = next(self.fileobj_iter) |
1426 | 334 | self.tarfile = util.make_tarfile("r", self.current_fp) | 334 | self.tarfile = util.make_tarfile(u"r", self.current_fp) |
1427 | 335 | self.tar_iter = iter(self.tarfile) | 335 | self.tar_iter = iter(self.tarfile) |
1428 | 336 | 336 | ||
1429 | 337 | def next(self): | 337 | def next(self): |
1430 | @@ -345,12 +345,12 @@ | |||
1431 | 345 | return next(self.tar_iter) | 345 | return next(self.tar_iter) |
1432 | 346 | 346 | ||
1433 | 347 | def extractfile(self, tarinfo): | 347 | def extractfile(self, tarinfo): |
1435 | 348 | """Return data associated with given tarinfo""" | 348 | u"""Return data associated with given tarinfo""" |
1436 | 349 | return self.tarfile.extractfile(tarinfo) | 349 | return self.tarfile.extractfile(tarinfo) |
1437 | 350 | 350 | ||
1438 | 351 | 351 | ||
1439 | 352 | def collate_iters(iter_list): | 352 | def collate_iters(iter_list): |
1441 | 353 | """Collate iterators by index | 353 | u"""Collate iterators by index |
1442 | 354 | 354 | ||
1443 | 355 | Input is a list of n iterators each of which must iterate elements | 355 | Input is a list of n iterators each of which must iterate elements |
1444 | 356 | with an index attribute. The elements must come out in increasing | 356 | with an index attribute. The elements must come out in increasing |
1445 | @@ -371,7 +371,7 @@ | |||
1446 | 371 | elems = overflow[:] | 371 | elems = overflow[:] |
1447 | 372 | 372 | ||
1448 | 373 | def setrorps(overflow, elems): | 373 | def setrorps(overflow, elems): |
1450 | 374 | """Set the overflow and rorps list""" | 374 | u"""Set the overflow and rorps list""" |
1451 | 375 | for i in range(iter_num): | 375 | for i in range(iter_num): |
1452 | 376 | if not overflow[i] and elems[i] is None: | 376 | if not overflow[i] and elems[i] is None: |
1453 | 377 | try: | 377 | try: |
1454 | @@ -381,7 +381,7 @@ | |||
1455 | 381 | elems[i] = None | 381 | elems[i] = None |
1456 | 382 | 382 | ||
1457 | 383 | def getleastindex(elems): | 383 | def getleastindex(elems): |
1459 | 384 | """Return the first index in elems, assuming elems isn't empty""" | 384 | u"""Return the first index in elems, assuming elems isn't empty""" |
1460 | 385 | return min(map(lambda elem: elem.index, filter(lambda x: x, elems))) | 385 | return min(map(lambda elem: elem.index, filter(lambda x: x, elems))) |
1461 | 386 | 386 | ||
1462 | 387 | def yield_tuples(iter_num, overflow, elems): | 387 | def yield_tuples(iter_num, overflow, elems): |
1463 | @@ -403,7 +403,7 @@ | |||
1464 | 403 | 403 | ||
1465 | 404 | 404 | ||
1466 | 405 | class IndexedTuple: | 405 | class IndexedTuple: |
1468 | 406 | """Like a tuple, but has .index (used previously by collate_iters)""" | 406 | u"""Like a tuple, but has .index (used previously by collate_iters)""" |
1469 | 407 | def __init__(self, index, sequence): | 407 | def __init__(self, index, sequence): |
1470 | 408 | self.index = index | 408 | self.index = index |
1471 | 409 | self.data = tuple(sequence) | 409 | self.data = tuple(sequence) |
1472 | @@ -412,7 +412,7 @@ | |||
1473 | 412 | return len(self.data) | 412 | return len(self.data) |
1474 | 413 | 413 | ||
1475 | 414 | def __getitem__(self, key): | 414 | def __getitem__(self, key): |
1477 | 415 | """This only works for numerical keys (easier this way)""" | 415 | u"""This only works for numerical keys (easier this way)""" |
1478 | 416 | return self.data[key] | 416 | return self.data[key] |
1479 | 417 | 417 | ||
1480 | 418 | def __lt__(self, other): | 418 | def __lt__(self, other): |
1481 | @@ -448,11 +448,11 @@ | |||
1482 | 448 | return None | 448 | return None |
1483 | 449 | 449 | ||
1484 | 450 | def __str__(self): | 450 | def __str__(self): |
1486 | 451 | return "(%s).%s" % (", ".join(map(str, self.data)), self.index) | 451 | return u"(%s).%s" % (u", ".join(map(str, self.data)), self.index) |
1487 | 452 | 452 | ||
1488 | 453 | 453 | ||
1489 | 454 | def normalize_ps(patch_sequence): | 454 | def normalize_ps(patch_sequence): |
1491 | 455 | """Given an sequence of ROPath deltas, remove blank and unnecessary | 455 | u"""Given an sequence of ROPath deltas, remove blank and unnecessary |
1492 | 456 | 456 | ||
1493 | 457 | The sequence is assumed to be in patch order (later patches apply | 457 | The sequence is assumed to be in patch order (later patches apply |
1494 | 458 | to earlier ones). A patch is unnecessary if a later one doesn't | 458 | to earlier ones). A patch is unnecessary if a later one doesn't |
1495 | @@ -467,29 +467,29 @@ | |||
1496 | 467 | if delta is not None: | 467 | if delta is not None: |
1497 | 468 | # skip blank entries | 468 | # skip blank entries |
1498 | 469 | result_list.insert(0, delta) | 469 | result_list.insert(0, delta) |
1500 | 470 | if delta.difftype != "diff": | 470 | if delta.difftype != u"diff": |
1501 | 471 | break | 471 | break |
1502 | 472 | i -= 1 | 472 | i -= 1 |
1503 | 473 | return result_list | 473 | return result_list |
1504 | 474 | 474 | ||
1505 | 475 | 475 | ||
1506 | 476 | def patch_seq2ropath(patch_seq): | 476 | def patch_seq2ropath(patch_seq): |
1508 | 477 | """Apply the patches in patch_seq, return single ropath""" | 477 | u"""Apply the patches in patch_seq, return single ropath""" |
1509 | 478 | first = patch_seq[0] | 478 | first = patch_seq[0] |
1512 | 479 | assert first.difftype != "diff", "First patch in sequence " \ | 479 | assert first.difftype != u"diff", u"First patch in sequence " \ |
1513 | 480 | "%s was a diff" % patch_seq | 480 | u"%s was a diff" % patch_seq |
1514 | 481 | if not first.isreg(): | 481 | if not first.isreg(): |
1515 | 482 | # No need to bother with data if not regular file | 482 | # No need to bother with data if not regular file |
1518 | 483 | assert len(patch_seq) == 1, "Patch sequence isn't regular, but " \ | 483 | assert len(patch_seq) == 1, u"Patch sequence isn't regular, but " \ |
1519 | 484 | "has %d entries" % len(patch_seq) | 484 | u"has %d entries" % len(patch_seq) |
1520 | 485 | return first.get_ropath() | 485 | return first.get_ropath() |
1521 | 486 | 486 | ||
1523 | 487 | current_file = first.open("rb") | 487 | current_file = first.open(u"rb") |
1524 | 488 | 488 | ||
1525 | 489 | for delta_ropath in patch_seq[1:]: | 489 | for delta_ropath in patch_seq[1:]: |
1527 | 490 | assert delta_ropath.difftype == "diff", delta_ropath.difftype | 490 | assert delta_ropath.difftype == u"diff", delta_ropath.difftype |
1528 | 491 | if not isinstance(current_file, file): | 491 | if not isinstance(current_file, file): |
1530 | 492 | """ | 492 | u""" |
1531 | 493 | librsync insists on a real file object, which we create manually | 493 | librsync insists on a real file object, which we create manually |
1532 | 494 | by using the duplicity.tempdir to tell us where. | 494 | by using the duplicity.tempdir to tell us where. |
1533 | 495 | 495 | ||
1534 | @@ -503,14 +503,14 @@ | |||
1535 | 503 | tempfp.seek(0) | 503 | tempfp.seek(0) |
1536 | 504 | current_file = tempfp | 504 | current_file = tempfp |
1537 | 505 | current_file = librsync.PatchedFile(current_file, | 505 | current_file = librsync.PatchedFile(current_file, |
1539 | 506 | delta_ropath.open("rb")) | 506 | delta_ropath.open(u"rb")) |
1540 | 507 | result = patch_seq[-1].get_ropath() | 507 | result = patch_seq[-1].get_ropath() |
1541 | 508 | result.setfileobj(current_file) | 508 | result.setfileobj(current_file) |
1542 | 509 | return result | 509 | return result |
1543 | 510 | 510 | ||
1544 | 511 | 511 | ||
1545 | 512 | def integrate_patch_iters(iter_list): | 512 | def integrate_patch_iters(iter_list): |
1547 | 513 | """Combine a list of iterators of ropath patches | 513 | u"""Combine a list of iterators of ropath patches |
1548 | 514 | 514 | ||
1549 | 515 | The iter_list should be sorted in patch order, and the elements in | 515 | The iter_list should be sorted in patch order, and the elements in |
1550 | 516 | each iter_list need to be orderd by index. The output will be an | 516 | each iter_list need to be orderd by index. The output will be an |
1551 | @@ -527,14 +527,14 @@ | |||
1552 | 527 | yield final_ropath | 527 | yield final_ropath |
1553 | 528 | except Exception as e: | 528 | except Exception as e: |
1554 | 529 | filename = normalized[-1].get_ropath().get_relative_path() | 529 | filename = normalized[-1].get_ropath().get_relative_path() |
1556 | 530 | log.Warn(_("Error '%s' patching %s") % | 530 | log.Warn(_(u"Error '%s' patching %s") % |
1557 | 531 | (util.uexc(e), util.fsdecode(filename)), | 531 | (util.uexc(e), util.fsdecode(filename)), |
1558 | 532 | log.WarningCode.cannot_process, | 532 | log.WarningCode.cannot_process, |
1559 | 533 | util.escape(filename)) | 533 | util.escape(filename)) |
1560 | 534 | 534 | ||
1561 | 535 | 535 | ||
1562 | 536 | def tarfiles2rop_iter(tarfile_list, restrict_index=()): | 536 | def tarfiles2rop_iter(tarfile_list, restrict_index=()): |
1564 | 537 | """Integrate tarfiles of diffs into single ROPath iter | 537 | u"""Integrate tarfiles of diffs into single ROPath iter |
1565 | 538 | 538 | ||
1566 | 539 | Then filter out all the diffs in that index which don't start with | 539 | Then filter out all the diffs in that index which don't start with |
1567 | 540 | the restrict_index. | 540 | the restrict_index. |
1568 | @@ -548,7 +548,7 @@ | |||
1569 | 548 | 548 | ||
1570 | 549 | 549 | ||
1571 | 550 | def Write_ROPaths(base_path, rop_iter): | 550 | def Write_ROPaths(base_path, rop_iter): |
1573 | 551 | """Write out ropaths in rop_iter starting at base_path | 551 | u"""Write out ropaths in rop_iter starting at base_path |
1574 | 552 | 552 | ||
1575 | 553 | Returns 1 if something was actually written, 0 otherwise. | 553 | Returns 1 if something was actually written, 0 otherwise. |
1576 | 554 | 554 | ||
1577 | @@ -564,20 +564,20 @@ | |||
1578 | 564 | 564 | ||
1579 | 565 | 565 | ||
1580 | 566 | class ROPath_IterWriter(ITRBranch): | 566 | class ROPath_IterWriter(ITRBranch): |
1582 | 567 | """Used in Write_ROPaths above | 567 | u"""Used in Write_ROPaths above |
1583 | 568 | 568 | ||
1584 | 569 | We need to use an ITR because we have to update the | 569 | We need to use an ITR because we have to update the |
1585 | 570 | permissions/times of directories after we write the files in them. | 570 | permissions/times of directories after we write the files in them. |
1586 | 571 | 571 | ||
1587 | 572 | """ | 572 | """ |
1588 | 573 | def __init__(self, base_path): | 573 | def __init__(self, base_path): |
1590 | 574 | """Set base_path, Path of root of tree""" | 574 | u"""Set base_path, Path of root of tree""" |
1591 | 575 | self.base_path = base_path | 575 | self.base_path = base_path |
1592 | 576 | self.dir_diff_ropath = None | 576 | self.dir_diff_ropath = None |
1593 | 577 | self.dir_new_path = None | 577 | self.dir_new_path = None |
1594 | 578 | 578 | ||
1595 | 579 | def start_process(self, index, ropath): | 579 | def start_process(self, index, ropath): |
1597 | 580 | """Write ropath. Only handles the directory case""" | 580 | u"""Write ropath. Only handles the directory case""" |
1598 | 581 | if not ropath.isdir(): | 581 | if not ropath.isdir(): |
1599 | 582 | # Base may not be a directory, but rest should | 582 | # Base may not be a directory, but rest should |
1600 | 583 | assert ropath.index == (), ropath.index | 583 | assert ropath.index == (), ropath.index |
1601 | @@ -596,19 +596,19 @@ | |||
1602 | 596 | self.dir_diff_ropath = ropath | 596 | self.dir_diff_ropath = ropath |
1603 | 597 | 597 | ||
1604 | 598 | def end_process(self): | 598 | def end_process(self): |
1606 | 599 | """Update information of a directory when leaving it""" | 599 | u"""Update information of a directory when leaving it""" |
1607 | 600 | if self.dir_diff_ropath: | 600 | if self.dir_diff_ropath: |
1608 | 601 | self.dir_diff_ropath.copy_attribs(self.dir_new_path) | 601 | self.dir_diff_ropath.copy_attribs(self.dir_new_path) |
1609 | 602 | 602 | ||
1610 | 603 | def can_fast_process(self, index, ropath): | 603 | def can_fast_process(self, index, ropath): |
1613 | 604 | """Can fast process (no recursion) if ropath isn't a directory""" | 604 | u"""Can fast process (no recursion) if ropath isn't a directory""" |
1614 | 605 | log.Info(_("Writing %s of type %s") % | 605 | log.Info(_(u"Writing %s of type %s") % |
1615 | 606 | (util.fsdecode(ropath.get_relative_path()), ropath.type), | 606 | (util.fsdecode(ropath.get_relative_path()), ropath.type), |
1616 | 607 | log.InfoCode.patch_file_writing, | 607 | log.InfoCode.patch_file_writing, |
1618 | 608 | "%s %s" % (util.escape(ropath.get_relative_path()), ropath.type)) | 608 | u"%s %s" % (util.escape(ropath.get_relative_path()), ropath.type)) |
1619 | 609 | return not ropath.isdir() | 609 | return not ropath.isdir() |
1620 | 610 | 610 | ||
1621 | 611 | def fast_process(self, index, ropath): | 611 | def fast_process(self, index, ropath): |
1623 | 612 | """Write non-directory ropath to destination""" | 612 | u"""Write non-directory ropath to destination""" |
1624 | 613 | if ropath.exists(): | 613 | if ropath.exists(): |
1625 | 614 | ropath.copy(self.base_path.new_index(index)) | 614 | ropath.copy(self.base_path.new_index(index)) |
1626 | 615 | 615 | ||
1627 | === modified file 'duplicity/path.py' | |||
1628 | --- duplicity/path.py 2018-07-24 11:52:33 +0000 | |||
1629 | +++ duplicity/path.py 2018-09-24 21:19:45 +0000 | |||
1630 | @@ -19,7 +19,7 @@ | |||
1631 | 19 | # along with duplicity; if not, write to the Free Software Foundation, | 19 | # along with duplicity; if not, write to the Free Software Foundation, |
1632 | 20 | # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA | 20 | # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA |
1633 | 21 | 21 | ||
1635 | 22 | """Wrapper class around a file like "/usr/bin/env" | 22 | u"""Wrapper class around a file like "/usr/bin/env" |
1636 | 23 | 23 | ||
1637 | 24 | This class makes certain file operations more convenient and | 24 | This class makes certain file operations more convenient and |
1638 | 25 | associates stat information with filenames | 25 | associates stat information with filenames |
1639 | @@ -53,7 +53,7 @@ | |||
1640 | 53 | 53 | ||
1641 | 54 | 54 | ||
1642 | 55 | class StatResult: | 55 | class StatResult: |
1644 | 56 | """Used to emulate the output of os.stat() and related""" | 56 | u"""Used to emulate the output of os.stat() and related""" |
1645 | 57 | # st_mode is required by the TarInfo class, but it's unclear how | 57 | # st_mode is required by the TarInfo class, but it's unclear how |
1646 | 58 | # to generate it from file permissions. | 58 | # to generate it from file permissions. |
1647 | 59 | st_mode = 0 | 59 | st_mode = 0 |
1648 | @@ -64,158 +64,158 @@ | |||
1649 | 64 | 64 | ||
1650 | 65 | 65 | ||
1651 | 66 | class ROPath: | 66 | class ROPath: |
1653 | 67 | """Read only Path | 67 | u"""Read only Path |
1654 | 68 | 68 | ||
1655 | 69 | Objects of this class doesn't represent real files, so they don't | 69 | Objects of this class doesn't represent real files, so they don't |
1656 | 70 | have a name. They are required to be indexed though. | 70 | have a name. They are required to be indexed though. |
1657 | 71 | 71 | ||
1658 | 72 | """ | 72 | """ |
1659 | 73 | def __init__(self, index, stat=None): | 73 | def __init__(self, index, stat=None): |
1661 | 74 | """ROPath initializer""" | 74 | u"""ROPath initializer""" |
1662 | 75 | self.opened, self.fileobj = None, None | 75 | self.opened, self.fileobj = None, None |
1663 | 76 | self.index = index | 76 | self.index = index |
1664 | 77 | self.stat, self.type = None, None | 77 | self.stat, self.type = None, None |
1665 | 78 | self.mode, self.devnums = None, None | 78 | self.mode, self.devnums = None, None |
1666 | 79 | 79 | ||
1667 | 80 | def set_from_stat(self): | 80 | def set_from_stat(self): |
1669 | 81 | """Set the value of self.type, self.mode from self.stat""" | 81 | u"""Set the value of self.type, self.mode from self.stat""" |
1670 | 82 | if not self.stat: | 82 | if not self.stat: |
1671 | 83 | self.type = None | 83 | self.type = None |
1672 | 84 | 84 | ||
1673 | 85 | st_mode = self.stat.st_mode | 85 | st_mode = self.stat.st_mode |
1674 | 86 | if stat.S_ISREG(st_mode): | 86 | if stat.S_ISREG(st_mode): |
1676 | 87 | self.type = "reg" | 87 | self.type = u"reg" |
1677 | 88 | elif stat.S_ISDIR(st_mode): | 88 | elif stat.S_ISDIR(st_mode): |
1679 | 89 | self.type = "dir" | 89 | self.type = u"dir" |
1680 | 90 | elif stat.S_ISLNK(st_mode): | 90 | elif stat.S_ISLNK(st_mode): |
1682 | 91 | self.type = "sym" | 91 | self.type = u"sym" |
1683 | 92 | elif stat.S_ISFIFO(st_mode): | 92 | elif stat.S_ISFIFO(st_mode): |
1685 | 93 | self.type = "fifo" | 93 | self.type = u"fifo" |
1686 | 94 | elif stat.S_ISSOCK(st_mode): | 94 | elif stat.S_ISSOCK(st_mode): |
1687 | 95 | raise PathException(util.fsdecode(self.get_relative_path()) + | 95 | raise PathException(util.fsdecode(self.get_relative_path()) + |
1688 | 96 | u"is a socket, unsupported by tar") | 96 | u"is a socket, unsupported by tar") |
1690 | 97 | self.type = "sock" | 97 | self.type = u"sock" |
1691 | 98 | elif stat.S_ISCHR(st_mode): | 98 | elif stat.S_ISCHR(st_mode): |
1693 | 99 | self.type = "chr" | 99 | self.type = u"chr" |
1694 | 100 | elif stat.S_ISBLK(st_mode): | 100 | elif stat.S_ISBLK(st_mode): |
1696 | 101 | self.type = "blk" | 101 | self.type = u"blk" |
1697 | 102 | else: | 102 | else: |
1699 | 103 | raise PathException("Unknown type") | 103 | raise PathException(u"Unknown type") |
1700 | 104 | 104 | ||
1701 | 105 | self.mode = stat.S_IMODE(st_mode) | 105 | self.mode = stat.S_IMODE(st_mode) |
1703 | 106 | if self.type in ("chr", "blk"): | 106 | if self.type in (u"chr", u"blk"): |
1704 | 107 | try: | 107 | try: |
1705 | 108 | self.devnums = (os.major(self.stat.st_rdev), | 108 | self.devnums = (os.major(self.stat.st_rdev), |
1706 | 109 | os.minor(self.stat.st_rdev)) | 109 | os.minor(self.stat.st_rdev)) |
1707 | 110 | except: | 110 | except: |
1709 | 111 | log.Warn(_("Warning: %s invalid devnums (0x%X), treating as (0, 0).") | 111 | log.Warn(_(u"Warning: %s invalid devnums (0x%X), treating as (0, 0).") |
1710 | 112 | % (util.fsdecode(self.get_relative_path()), self.stat.st_rdev)) | 112 | % (util.fsdecode(self.get_relative_path()), self.stat.st_rdev)) |
1711 | 113 | self.devnums = (0, 0) | 113 | self.devnums = (0, 0) |
1712 | 114 | 114 | ||
1713 | 115 | def blank(self): | 115 | def blank(self): |
1715 | 116 | """Black out self - set type and stat to None""" | 116 | u"""Black out self - set type and stat to None""" |
1716 | 117 | self.type, self.stat = None, None | 117 | self.type, self.stat = None, None |
1717 | 118 | 118 | ||
1718 | 119 | def exists(self): | 119 | def exists(self): |
1720 | 120 | """True if corresponding file exists""" | 120 | u"""True if corresponding file exists""" |
1721 | 121 | return self.type | 121 | return self.type |
1722 | 122 | 122 | ||
1723 | 123 | def isreg(self): | 123 | def isreg(self): |
1726 | 124 | """True if self corresponds to regular file""" | 124 | u"""True if self corresponds to regular file""" |
1727 | 125 | return self.type == "reg" | 125 | return self.type == u"reg" |
1728 | 126 | 126 | ||
1729 | 127 | def isdir(self): | 127 | def isdir(self): |
1732 | 128 | """True if self is dir""" | 128 | u"""True if self is dir""" |
1733 | 129 | return self.type == "dir" | 129 | return self.type == u"dir" |
1734 | 130 | 130 | ||
1735 | 131 | def issym(self): | 131 | def issym(self): |
1738 | 132 | """True if self is sym""" | 132 | u"""True if self is sym""" |
1739 | 133 | return self.type == "sym" | 133 | return self.type == u"sym" |
1740 | 134 | 134 | ||
1741 | 135 | def isfifo(self): | 135 | def isfifo(self): |
1744 | 136 | """True if self is fifo""" | 136 | u"""True if self is fifo""" |
1745 | 137 | return self.type == "fifo" | 137 | return self.type == u"fifo" |
1746 | 138 | 138 | ||
1747 | 139 | def issock(self): | 139 | def issock(self): |
1750 | 140 | """True is self is socket""" | 140 | u"""True is self is socket""" |
1751 | 141 | return self.type == "sock" | 141 | return self.type == u"sock" |
1752 | 142 | 142 | ||
1753 | 143 | def isdev(self): | 143 | def isdev(self): |
1756 | 144 | """True is self is a device file""" | 144 | u"""True is self is a device file""" |
1757 | 145 | return self.type == "chr" or self.type == "blk" | 145 | return self.type == u"chr" or self.type == u"blk" |
1758 | 146 | 146 | ||
1759 | 147 | def getdevloc(self): | 147 | def getdevloc(self): |
1761 | 148 | """Return device number path resides on""" | 148 | u"""Return device number path resides on""" |
1762 | 149 | return self.stat.st_dev | 149 | return self.stat.st_dev |
1763 | 150 | 150 | ||
1764 | 151 | def getsize(self): | 151 | def getsize(self): |
1766 | 152 | """Return length in bytes from stat object""" | 152 | u"""Return length in bytes from stat object""" |
1767 | 153 | return self.stat.st_size | 153 | return self.stat.st_size |
1768 | 154 | 154 | ||
1769 | 155 | def getmtime(self): | 155 | def getmtime(self): |
1771 | 156 | """Return mod time of path in seconds""" | 156 | u"""Return mod time of path in seconds""" |
1772 | 157 | return int(self.stat.st_mtime) | 157 | return int(self.stat.st_mtime) |
1773 | 158 | 158 | ||
1774 | 159 | def get_relative_path(self): | 159 | def get_relative_path(self): |
1776 | 160 | """Return relative path, created from index""" | 160 | u"""Return relative path, created from index""" |
1777 | 161 | if self.index: | 161 | if self.index: |
1779 | 162 | return "/".join(self.index) | 162 | return b"/".join(self.index) |
1780 | 163 | else: | 163 | else: |
1782 | 164 | return "." | 164 | return b"." |
1783 | 165 | 165 | ||
1784 | 166 | def getperms(self): | 166 | def getperms(self): |
1786 | 167 | """Return permissions mode, owner and group""" | 167 | u"""Return permissions mode, owner and group""" |
1787 | 168 | s1 = self.stat | 168 | s1 = self.stat |
1789 | 169 | return '%s:%s %o' % (s1.st_uid, s1.st_gid, self.mode) | 169 | return u'%s:%s %o' % (s1.st_uid, s1.st_gid, self.mode) |
1790 | 170 | 170 | ||
1791 | 171 | def open(self, mode): | 171 | def open(self, mode): |
1795 | 172 | """Return fileobj associated with self""" | 172 | u"""Return fileobj associated with self""" |
1796 | 173 | assert mode == "rb" and self.fileobj and not self.opened, \ | 173 | assert mode == u"rb" and self.fileobj and not self.opened, \ |
1797 | 174 | "%s %s %s" % (mode, self.fileobj, self.opened) | 174 | u"%s %s %s" % (mode, self.fileobj, self.opened) |
1798 | 175 | self.opened = 1 | 175 | self.opened = 1 |
1799 | 176 | return self.fileobj | 176 | return self.fileobj |
1800 | 177 | 177 | ||
1801 | 178 | def get_data(self): | 178 | def get_data(self): |
1804 | 179 | """Return contents of associated fileobj in string""" | 179 | u"""Return contents of associated fileobj in string""" |
1805 | 180 | fin = self.open("rb") | 180 | fin = self.open(u"rb") |
1806 | 181 | buf = fin.read() | 181 | buf = fin.read() |
1807 | 182 | assert not fin.close() | 182 | assert not fin.close() |
1808 | 183 | return buf | 183 | return buf |
1809 | 184 | 184 | ||
1810 | 185 | def setfileobj(self, fileobj): | 185 | def setfileobj(self, fileobj): |
1812 | 186 | """Set file object returned by open()""" | 186 | u"""Set file object returned by open()""" |
1813 | 187 | assert not self.fileobj | 187 | assert not self.fileobj |
1814 | 188 | self.fileobj = fileobj | 188 | self.fileobj = fileobj |
1815 | 189 | self.opened = None | 189 | self.opened = None |
1816 | 190 | 190 | ||
1817 | 191 | def init_from_tarinfo(self, tarinfo): | 191 | def init_from_tarinfo(self, tarinfo): |
1819 | 192 | """Set data from tarinfo object (part of tarfile module)""" | 192 | u"""Set data from tarinfo object (part of tarfile module)""" |
1820 | 193 | # Set the typepp | 193 | # Set the typepp |
1821 | 194 | type = tarinfo.type | 194 | type = tarinfo.type |
1822 | 195 | if type == tarfile.REGTYPE or type == tarfile.AREGTYPE: | 195 | if type == tarfile.REGTYPE or type == tarfile.AREGTYPE: |
1824 | 196 | self.type = "reg" | 196 | self.type = u"reg" |
1825 | 197 | elif type == tarfile.LNKTYPE: | 197 | elif type == tarfile.LNKTYPE: |
1827 | 198 | raise PathException("Hard links not supported yet") | 198 | raise PathException(u"Hard links not supported yet") |
1828 | 199 | elif type == tarfile.SYMTYPE: | 199 | elif type == tarfile.SYMTYPE: |
1830 | 200 | self.type = "sym" | 200 | self.type = u"sym" |
1831 | 201 | self.symtext = tarinfo.linkname | 201 | self.symtext = tarinfo.linkname |
1832 | 202 | elif type == tarfile.CHRTYPE: | 202 | elif type == tarfile.CHRTYPE: |
1834 | 203 | self.type = "chr" | 203 | self.type = u"chr" |
1835 | 204 | self.devnums = (tarinfo.devmajor, tarinfo.devminor) | 204 | self.devnums = (tarinfo.devmajor, tarinfo.devminor) |
1836 | 205 | elif type == tarfile.BLKTYPE: | 205 | elif type == tarfile.BLKTYPE: |
1838 | 206 | self.type = "blk" | 206 | self.type = u"blk" |
1839 | 207 | self.devnums = (tarinfo.devmajor, tarinfo.devminor) | 207 | self.devnums = (tarinfo.devmajor, tarinfo.devminor) |
1840 | 208 | elif type == tarfile.DIRTYPE: | 208 | elif type == tarfile.DIRTYPE: |
1842 | 209 | self.type = "dir" | 209 | self.type = u"dir" |
1843 | 210 | elif type == tarfile.FIFOTYPE: | 210 | elif type == tarfile.FIFOTYPE: |
1845 | 211 | self.type = "fifo" | 211 | self.type = u"fifo" |
1846 | 212 | else: | 212 | else: |
1848 | 213 | raise PathException("Unknown tarinfo type %s" % (type,)) | 213 | raise PathException(u"Unknown tarinfo type %s" % (type,)) |
1849 | 214 | 214 | ||
1850 | 215 | self.mode = tarinfo.mode | 215 | self.mode = tarinfo.mode |
1851 | 216 | self.stat = StatResult() | 216 | self.stat = StatResult() |
1852 | 217 | 217 | ||
1854 | 218 | """ Set user and group id | 218 | u""" Set user and group id |
1855 | 219 | use numeric id if name lookup fails | 219 | use numeric id if name lookup fails |
1856 | 220 | OR | 220 | OR |
1857 | 221 | --numeric-owner is set | 221 | --numeric-owner is set |
1858 | @@ -235,13 +235,13 @@ | |||
1859 | 235 | 235 | ||
1860 | 236 | self.stat.st_mtime = int(tarinfo.mtime) | 236 | self.stat.st_mtime = int(tarinfo.mtime) |
1861 | 237 | if self.stat.st_mtime < 0: | 237 | if self.stat.st_mtime < 0: |
1863 | 238 | log.Warn(_("Warning: %s has negative mtime, treating as 0.") | 238 | log.Warn(_(u"Warning: %s has negative mtime, treating as 0.") |
1864 | 239 | % (tarinfo.uc_name)) | 239 | % (tarinfo.uc_name)) |
1865 | 240 | self.stat.st_mtime = 0 | 240 | self.stat.st_mtime = 0 |
1866 | 241 | self.stat.st_size = tarinfo.size | 241 | self.stat.st_size = tarinfo.size |
1867 | 242 | 242 | ||
1868 | 243 | def get_ropath(self): | 243 | def get_ropath(self): |
1870 | 244 | """Return ropath copy of self""" | 244 | u"""Return ropath copy of self""" |
1871 | 245 | new_ropath = ROPath(self.index, self.stat) | 245 | new_ropath = ROPath(self.index, self.stat) |
1872 | 246 | new_ropath.type, new_ropath.mode = self.type, self.mode | 246 | new_ropath.type, new_ropath.mode = self.type, self.mode |
1873 | 247 | if self.issym(): | 247 | if self.issym(): |
1874 | @@ -253,7 +253,7 @@ | |||
1875 | 253 | return new_ropath | 253 | return new_ropath |
1876 | 254 | 254 | ||
1877 | 255 | def get_tarinfo(self): | 255 | def get_tarinfo(self): |
1879 | 256 | """Generate a tarfile.TarInfo object based on self | 256 | u"""Generate a tarfile.TarInfo object based on self |
1880 | 257 | 257 | ||
1881 | 258 | Doesn't set size based on stat, because we may want to replace | 258 | Doesn't set size based on stat, because we may want to replace |
1882 | 259 | data wiht other stream. Size should be set separately by | 259 | data wiht other stream. Size should be set separately by |
1883 | @@ -262,11 +262,11 @@ | |||
1884 | 262 | """ | 262 | """ |
1885 | 263 | ti = tarfile.TarInfo() | 263 | ti = tarfile.TarInfo() |
1886 | 264 | if self.index: | 264 | if self.index: |
1888 | 265 | ti.name = "/".join(self.index) | 265 | ti.name = b"/".join(self.index) |
1889 | 266 | else: | 266 | else: |
1891 | 267 | ti.name = "." | 267 | ti.name = b"." |
1892 | 268 | if self.isdir(): | 268 | if self.isdir(): |
1894 | 269 | ti.name += "/" # tar dir naming convention | 269 | ti.name += b"/" # tar dir naming convention |
1895 | 270 | 270 | ||
1896 | 271 | ti.size = 0 | 271 | ti.size = 0 |
1897 | 272 | if self.type: | 272 | if self.type: |
1898 | @@ -283,18 +283,18 @@ | |||
1899 | 283 | ti.type = tarfile.SYMTYPE | 283 | ti.type = tarfile.SYMTYPE |
1900 | 284 | ti.linkname = self.symtext | 284 | ti.linkname = self.symtext |
1901 | 285 | elif self.isdev(): | 285 | elif self.isdev(): |
1903 | 286 | if self.type == "chr": | 286 | if self.type == u"chr": |
1904 | 287 | ti.type = tarfile.CHRTYPE | 287 | ti.type = tarfile.CHRTYPE |
1905 | 288 | else: | 288 | else: |
1906 | 289 | ti.type = tarfile.BLKTYPE | 289 | ti.type = tarfile.BLKTYPE |
1907 | 290 | ti.devmajor, ti.devminor = self.devnums | 290 | ti.devmajor, ti.devminor = self.devnums |
1908 | 291 | else: | 291 | else: |
1910 | 292 | raise PathException("Unrecognized type " + str(self.type)) | 292 | raise PathException(u"Unrecognized type " + str(self.type)) |
1911 | 293 | 293 | ||
1912 | 294 | ti.mode = self.mode | 294 | ti.mode = self.mode |
1913 | 295 | ti.uid, ti.gid = self.stat.st_uid, self.stat.st_gid | 295 | ti.uid, ti.gid = self.stat.st_uid, self.stat.st_gid |
1914 | 296 | if self.stat.st_mtime < 0: | 296 | if self.stat.st_mtime < 0: |
1916 | 297 | log.Warn(_("Warning: %s has negative mtime, treating as 0.") | 297 | log.Warn(_(u"Warning: %s has negative mtime, treating as 0.") |
1917 | 298 | % (util.fsdecode(self.get_relative_path()))) | 298 | % (util.fsdecode(self.get_relative_path()))) |
1918 | 299 | ti.mtime = 0 | 299 | ti.mtime = 0 |
1919 | 300 | else: | 300 | else: |
1920 | @@ -303,14 +303,14 @@ | |||
1921 | 303 | try: | 303 | try: |
1922 | 304 | ti.uname = cached_ops.getpwuid(ti.uid)[0] | 304 | ti.uname = cached_ops.getpwuid(ti.uid)[0] |
1923 | 305 | except KeyError: | 305 | except KeyError: |
1925 | 306 | ti.uname = '' | 306 | ti.uname = u'' |
1926 | 307 | try: | 307 | try: |
1927 | 308 | ti.gname = cached_ops.getgrgid(ti.gid)[0] | 308 | ti.gname = cached_ops.getgrgid(ti.gid)[0] |
1928 | 309 | except KeyError: | 309 | except KeyError: |
1930 | 310 | ti.gname = '' | 310 | ti.gname = b'' |
1931 | 311 | 311 | ||
1932 | 312 | if ti.type in (tarfile.CHRTYPE, tarfile.BLKTYPE): | 312 | if ti.type in (tarfile.CHRTYPE, tarfile.BLKTYPE): |
1934 | 313 | if hasattr(os, "major") and hasattr(os, "minor"): | 313 | if hasattr(os, u"major") and hasattr(os, u"minor"): |
1935 | 314 | ti.devmajor, ti.devminor = self.devnums | 314 | ti.devmajor, ti.devminor = self.devnums |
1936 | 315 | else: | 315 | else: |
1937 | 316 | # Currently we depend on an uninitiliazed tarinfo file to | 316 | # Currently we depend on an uninitiliazed tarinfo file to |
1938 | @@ -320,7 +320,7 @@ | |||
1939 | 320 | return ti | 320 | return ti |
1940 | 321 | 321 | ||
1941 | 322 | def __eq__(self, other): | 322 | def __eq__(self, other): |
1943 | 323 | """Used to compare two ROPaths. Doesn't look at fileobjs""" | 323 | u"""Used to compare two ROPaths. Doesn't look at fileobjs""" |
1944 | 324 | if not self.type and not other.type: | 324 | if not self.type and not other.type: |
1945 | 325 | return 1 # neither exists | 325 | return 1 # neither exists |
1946 | 326 | if not self.stat and other.stat or not other.stat and self.stat: | 326 | if not self.stat and other.stat or not other.stat and self.stat: |
1947 | @@ -348,7 +348,7 @@ | |||
1948 | 348 | return not self.__eq__(other) | 348 | return not self.__eq__(other) |
1949 | 349 | 349 | ||
1950 | 350 | def compare_verbose(self, other, include_data=0): | 350 | def compare_verbose(self, other, include_data=0): |
1952 | 351 | """Compare ROPaths like __eq__, but log reason if different | 351 | u"""Compare ROPaths like __eq__, but log reason if different |
1953 | 352 | 352 | ||
1954 | 353 | This is placed in a separate function from __eq__ because | 353 | This is placed in a separate function from __eq__ because |
1955 | 354 | __eq__ should be very time sensitive, and logging statements | 354 | __eq__ should be very time sensitive, and logging statements |
1956 | @@ -358,7 +358,7 @@ | |||
1957 | 358 | 358 | ||
1958 | 359 | """ | 359 | """ |
1959 | 360 | def log_diff(log_string): | 360 | def log_diff(log_string): |
1961 | 361 | log_str = _("Difference found:") + u" " + log_string | 361 | log_str = _(u"Difference found:") + u" " + log_string |
1962 | 362 | log.Notice(log_str % (util.fsdecode(self.get_relative_path()))) | 362 | log.Notice(log_str % (util.fsdecode(self.get_relative_path()))) |
1963 | 363 | 363 | ||
1964 | 364 | if include_data is False: | 364 | if include_data is False: |
1965 | @@ -367,24 +367,24 @@ | |||
1966 | 367 | if not self.type and not other.type: | 367 | if not self.type and not other.type: |
1967 | 368 | return 1 | 368 | return 1 |
1968 | 369 | if not self.stat and other.stat: | 369 | if not self.stat and other.stat: |
1970 | 370 | log_diff(_("New file %s")) | 370 | log_diff(_(u"New file %s")) |
1971 | 371 | return 0 | 371 | return 0 |
1972 | 372 | if not other.stat and self.stat: | 372 | if not other.stat and self.stat: |
1974 | 373 | log_diff(_("File %s is missing")) | 373 | log_diff(_(u"File %s is missing")) |
1975 | 374 | return 0 | 374 | return 0 |
1976 | 375 | if self.type != other.type: | 375 | if self.type != other.type: |
1978 | 376 | log_diff(_("File %%s has type %s, expected %s") % | 376 | log_diff(_(u"File %%s has type %s, expected %s") % |
1979 | 377 | (other.type, self.type)) | 377 | (other.type, self.type)) |
1980 | 378 | return 0 | 378 | return 0 |
1981 | 379 | 379 | ||
1982 | 380 | if self.isreg() or self.isdir() or self.isfifo(): | 380 | if self.isreg() or self.isdir() or self.isfifo(): |
1983 | 381 | if not self.perms_equal(other): | 381 | if not self.perms_equal(other): |
1985 | 382 | log_diff(_("File %%s has permissions %s, expected %s") % | 382 | log_diff(_(u"File %%s has permissions %s, expected %s") % |
1986 | 383 | (other.getperms(), self.getperms())) | 383 | (other.getperms(), self.getperms())) |
1987 | 384 | return 0 | 384 | return 0 |
1988 | 385 | if ((int(self.stat.st_mtime) != int(other.stat.st_mtime)) and | 385 | if ((int(self.stat.st_mtime) != int(other.stat.st_mtime)) and |
1989 | 386 | (self.stat.st_mtime > 0 or other.stat.st_mtime > 0)): | 386 | (self.stat.st_mtime > 0 or other.stat.st_mtime > 0)): |
1991 | 387 | log_diff(_("File %%s has mtime %s, expected %s") % | 387 | log_diff(_(u"File %%s has mtime %s, expected %s") % |
1992 | 388 | (dup_time.timetopretty(int(other.stat.st_mtime)), | 388 | (dup_time.timetopretty(int(other.stat.st_mtime)), |
1993 | 389 | dup_time.timetopretty(int(self.stat.st_mtime)))) | 389 | dup_time.timetopretty(int(self.stat.st_mtime)))) |
1994 | 390 | return 0 | 390 | return 0 |
1995 | @@ -392,33 +392,33 @@ | |||
1996 | 392 | if self.compare_data(other): | 392 | if self.compare_data(other): |
1997 | 393 | return 1 | 393 | return 1 |
1998 | 394 | else: | 394 | else: |
2000 | 395 | log_diff(_("Data for file %s is different")) | 395 | log_diff(_(u"Data for file %s is different")) |
2001 | 396 | return 0 | 396 | return 0 |
2002 | 397 | else: | 397 | else: |
2003 | 398 | return 1 | 398 | return 1 |
2004 | 399 | elif self.issym(): | 399 | elif self.issym(): |
2006 | 400 | if self.symtext == other.symtext or self.symtext + "/" == other.symtext: | 400 | if self.symtext == other.symtext or self.symtext + u"/" == other.symtext: |
2007 | 401 | return 1 | 401 | return 1 |
2008 | 402 | else: | 402 | else: |
2010 | 403 | log_diff(_("Symlink %%s points to %s, expected %s") % | 403 | log_diff(_(u"Symlink %%s points to %s, expected %s") % |
2011 | 404 | (other.symtext, self.symtext)) | 404 | (other.symtext, self.symtext)) |
2012 | 405 | return 0 | 405 | return 0 |
2013 | 406 | elif self.isdev(): | 406 | elif self.isdev(): |
2014 | 407 | if not self.perms_equal(other): | 407 | if not self.perms_equal(other): |
2016 | 408 | log_diff(_("File %%s has permissions %s, expected %s") % | 408 | log_diff(_(u"File %%s has permissions %s, expected %s") % |
2017 | 409 | (other.getperms(), self.getperms())) | 409 | (other.getperms(), self.getperms())) |
2018 | 410 | return 0 | 410 | return 0 |
2019 | 411 | if self.devnums != other.devnums: | 411 | if self.devnums != other.devnums: |
2021 | 412 | log_diff(_("Device file %%s has numbers %s, expected %s") | 412 | log_diff(_(u"Device file %%s has numbers %s, expected %s") |
2022 | 413 | % (other.devnums, self.devnums)) | 413 | % (other.devnums, self.devnums)) |
2023 | 414 | return 0 | 414 | return 0 |
2024 | 415 | return 1 | 415 | return 1 |
2025 | 416 | assert 0 | 416 | assert 0 |
2026 | 417 | 417 | ||
2027 | 418 | def compare_data(self, other): | 418 | def compare_data(self, other): |
2031 | 419 | """Compare data from two regular files, return true if same""" | 419 | u"""Compare data from two regular files, return true if same""" |
2032 | 420 | f1 = self.open("rb") | 420 | f1 = self.open(u"rb") |
2033 | 421 | f2 = other.open("rb") | 421 | f2 = other.open(u"rb") |
2034 | 422 | 422 | ||
2035 | 423 | def close(): | 423 | def close(): |
2036 | 424 | assert not f1.close() | 424 | assert not f1.close() |
2037 | @@ -435,15 +435,15 @@ | |||
2038 | 435 | return 1 | 435 | return 1 |
2039 | 436 | 436 | ||
2040 | 437 | def perms_equal(self, other): | 437 | def perms_equal(self, other): |
2042 | 438 | """True if self and other have same permissions and ownership""" | 438 | u"""True if self and other have same permissions and ownership""" |
2043 | 439 | s1, s2 = self.stat, other.stat | 439 | s1, s2 = self.stat, other.stat |
2044 | 440 | return (self.mode == other.mode and | 440 | return (self.mode == other.mode and |
2045 | 441 | s1.st_gid == s2.st_gid and s1.st_uid == s2.st_uid) | 441 | s1.st_gid == s2.st_gid and s1.st_uid == s2.st_uid) |
2046 | 442 | 442 | ||
2047 | 443 | def copy(self, other): | 443 | def copy(self, other): |
2049 | 444 | """Copy self to other. Also copies data. Other must be Path""" | 444 | u"""Copy self to other. Also copies data. Other must be Path""" |
2050 | 445 | if self.isreg(): | 445 | if self.isreg(): |
2052 | 446 | other.writefileobj(self.open("rb")) | 446 | other.writefileobj(self.open(u"rb")) |
2053 | 447 | elif self.isdir(): | 447 | elif self.isdir(): |
2054 | 448 | os.mkdir(other.name) | 448 | os.mkdir(other.name) |
2055 | 449 | elif self.issym(): | 449 | elif self.issym(): |
2056 | @@ -456,15 +456,15 @@ | |||
2057 | 456 | elif self.issock(): | 456 | elif self.issock(): |
2058 | 457 | socket.socket(socket.AF_UNIX).bind(other.name) | 457 | socket.socket(socket.AF_UNIX).bind(other.name) |
2059 | 458 | elif self.isdev(): | 458 | elif self.isdev(): |
2062 | 459 | if self.type == "chr": | 459 | if self.type == u"chr": |
2063 | 460 | devtype = "c" | 460 | devtype = u"c" |
2064 | 461 | else: | 461 | else: |
2066 | 462 | devtype = "b" | 462 | devtype = u"b" |
2067 | 463 | other.makedev(devtype, *self.devnums) | 463 | other.makedev(devtype, *self.devnums) |
2068 | 464 | self.copy_attribs(other) | 464 | self.copy_attribs(other) |
2069 | 465 | 465 | ||
2070 | 466 | def copy_attribs(self, other): | 466 | def copy_attribs(self, other): |
2072 | 467 | """Only copy attributes from self to other""" | 467 | u"""Only copy attributes from self to other""" |
2073 | 468 | if isinstance(other, Path): | 468 | if isinstance(other, Path): |
2074 | 469 | if self.stat is not None: | 469 | if self.stat is not None: |
2075 | 470 | util.maybe_ignore_errors(lambda: os.chown(other.name, self.stat.st_uid, self.stat.st_gid)) | 470 | util.maybe_ignore_errors(lambda: os.chown(other.name, self.stat.st_uid, self.stat.st_gid)) |
2076 | @@ -481,18 +481,18 @@ | |||
2077 | 481 | other.mode = self.mode | 481 | other.mode = self.mode |
2078 | 482 | 482 | ||
2079 | 483 | def __unicode__(self): | 483 | def __unicode__(self): |
2081 | 484 | """Return string representation""" | 484 | u"""Return string representation""" |
2082 | 485 | return u"(%s %s)" % (util.uindex(self.index), self.type) | 485 | return u"(%s %s)" % (util.uindex(self.index), self.type) |
2083 | 486 | 486 | ||
2084 | 487 | 487 | ||
2085 | 488 | class Path(ROPath): | 488 | class Path(ROPath): |
2087 | 489 | """ | 489 | u""" |
2088 | 490 | Path class - wrapper around ordinary local files | 490 | Path class - wrapper around ordinary local files |
2089 | 491 | 491 | ||
2090 | 492 | Besides caching stat() results, this class organizes various file | 492 | Besides caching stat() results, this class organizes various file |
2091 | 493 | code. | 493 | code. |
2092 | 494 | """ | 494 | """ |
2094 | 495 | regex_chars_to_quote = re.compile("[\\\\\\\"\\$`]") | 495 | regex_chars_to_quote = re.compile(u"[\\\\\\\"\\$`]") |
2095 | 496 | 496 | ||
2096 | 497 | def rename_index(self, index): | 497 | def rename_index(self, index): |
2097 | 498 | if not globals.rename or not index: | 498 | if not globals.rename or not index: |
2098 | @@ -508,7 +508,7 @@ | |||
2099 | 508 | return index # no rename found | 508 | return index # no rename found |
2100 | 509 | 509 | ||
2101 | 510 | def __init__(self, base, index=()): | 510 | def __init__(self, base, index=()): |
2103 | 511 | """Path initializer""" | 511 | u"""Path initializer""" |
2104 | 512 | # self.opened should be true if the file has been opened, and | 512 | # self.opened should be true if the file has been opened, and |
2105 | 513 | # self.fileobj can override returned fileobj | 513 | # self.fileobj can override returned fileobj |
2106 | 514 | self.opened, self.fileobj = None, None | 514 | self.opened, self.fileobj = None, None |
2107 | @@ -530,7 +530,7 @@ | |||
2108 | 530 | self.setdata() | 530 | self.setdata() |
2109 | 531 | 531 | ||
2110 | 532 | def setdata(self): | 532 | def setdata(self): |
2112 | 533 | """Refresh stat cache""" | 533 | u"""Refresh stat cache""" |
2113 | 534 | try: | 534 | try: |
2114 | 535 | # We may be asked to look at the target of symlinks rather than | 535 | # We may be asked to look at the target of symlinks rather than |
2115 | 536 | # the link itself. | 536 | # the link itself. |
2116 | @@ -540,7 +540,7 @@ | |||
2117 | 540 | self.stat = os.lstat(self.name) | 540 | self.stat = os.lstat(self.name) |
2118 | 541 | except OSError as e: | 541 | except OSError as e: |
2119 | 542 | err_string = errno.errorcode[e[0]] | 542 | err_string = errno.errorcode[e[0]] |
2121 | 543 | if err_string in ["ENOENT", "ENOTDIR", "ELOOP", "ENOTCONN"]: | 543 | if err_string in [u"ENOENT", u"ENOTDIR", u"ELOOP", u"ENOTCONN"]: |
2122 | 544 | self.stat, self.type = None, None # file doesn't exist | 544 | self.stat, self.type = None, None # file doesn't exist |
2123 | 545 | self.mode = None | 545 | self.mode = None |
2124 | 546 | else: | 546 | else: |
2125 | @@ -551,23 +551,23 @@ | |||
2126 | 551 | self.symtext = os.readlink(self.name) | 551 | self.symtext = os.readlink(self.name) |
2127 | 552 | 552 | ||
2128 | 553 | def append(self, ext): | 553 | def append(self, ext): |
2130 | 554 | """Return new Path with ext added to index""" | 554 | u"""Return new Path with ext added to index""" |
2131 | 555 | return self.__class__(self.base, self.index + (ext,)) | 555 | return self.__class__(self.base, self.index + (ext,)) |
2132 | 556 | 556 | ||
2133 | 557 | def new_index(self, index): | 557 | def new_index(self, index): |
2135 | 558 | """Return new Path with index index""" | 558 | u"""Return new Path with index index""" |
2136 | 559 | return self.__class__(self.base, index) | 559 | return self.__class__(self.base, index) |
2137 | 560 | 560 | ||
2138 | 561 | def listdir(self): | 561 | def listdir(self): |
2140 | 562 | """Return list generated by os.listdir""" | 562 | u"""Return list generated by os.listdir""" |
2141 | 563 | return os.listdir(self.name) | 563 | return os.listdir(self.name) |
2142 | 564 | 564 | ||
2143 | 565 | def isemptydir(self): | 565 | def isemptydir(self): |
2145 | 566 | """Return true if path is a directory and is empty""" | 566 | u"""Return true if path is a directory and is empty""" |
2146 | 567 | return self.isdir() and not self.listdir() | 567 | return self.isdir() and not self.listdir() |
2147 | 568 | 568 | ||
2150 | 569 | def open(self, mode="rb"): | 569 | def open(self, mode=u"rb"): |
2151 | 570 | """ | 570 | u""" |
2152 | 571 | Return fileobj associated with self | 571 | Return fileobj associated with self |
2153 | 572 | 572 | ||
2154 | 573 | Usually this is just the file data on disk, but can be | 573 | Usually this is just the file data on disk, but can be |
2155 | @@ -581,25 +581,25 @@ | |||
2156 | 581 | return result | 581 | return result |
2157 | 582 | 582 | ||
2158 | 583 | def makedev(self, type, major, minor): | 583 | def makedev(self, type, major, minor): |
2163 | 584 | """Make a device file with specified type, major/minor nums""" | 584 | u"""Make a device file with specified type, major/minor nums""" |
2164 | 585 | cmdlist = ['mknod', self.name, type, str(major), str(minor)] | 585 | cmdlist = [u'mknod', self.name, type, str(major), str(minor)] |
2165 | 586 | if os.spawnvp(os.P_WAIT, 'mknod', cmdlist) != 0: | 586 | if os.spawnvp(os.P_WAIT, u'mknod', cmdlist) != 0: |
2166 | 587 | raise PathException("Error running %s" % cmdlist) | 587 | raise PathException(u"Error running %s" % cmdlist) |
2167 | 588 | self.setdata() | 588 | self.setdata() |
2168 | 589 | 589 | ||
2169 | 590 | def mkdir(self): | 590 | def mkdir(self): |
2172 | 591 | """Make directory(s) at specified path""" | 591 | u"""Make directory(s) at specified path""" |
2173 | 592 | log.Info(_("Making directory %s") % self.uc_name) | 592 | log.Info(_(u"Making directory %s") % self.uc_name) |
2174 | 593 | try: | 593 | try: |
2175 | 594 | os.makedirs(self.name) | 594 | os.makedirs(self.name) |
2176 | 595 | except OSError: | 595 | except OSError: |
2177 | 596 | if (not globals.force): | 596 | if (not globals.force): |
2179 | 597 | raise PathException("Error creating directory %s" % self.uc_name, 7) | 597 | raise PathException(u"Error creating directory %s" % self.uc_name, 7) |
2180 | 598 | self.setdata() | 598 | self.setdata() |
2181 | 599 | 599 | ||
2182 | 600 | def delete(self): | 600 | def delete(self): |
2185 | 601 | """Remove this file""" | 601 | u"""Remove this file""" |
2186 | 602 | log.Info(_("Deleting %s") % self.uc_name) | 602 | log.Info(_(u"Deleting %s") % self.uc_name) |
2187 | 603 | if self.isdir(): | 603 | if self.isdir(): |
2188 | 604 | util.ignore_missing(os.rmdir, self.name) | 604 | util.ignore_missing(os.rmdir, self.name) |
2189 | 605 | else: | 605 | else: |
2190 | @@ -607,15 +607,15 @@ | |||
2191 | 607 | self.setdata() | 607 | self.setdata() |
2192 | 608 | 608 | ||
2193 | 609 | def touch(self): | 609 | def touch(self): |
2197 | 610 | """Open the file, write 0 bytes, close""" | 610 | u"""Open the file, write 0 bytes, close""" |
2198 | 611 | log.Info(_("Touching %s") % self.uc_name) | 611 | log.Info(_(u"Touching %s") % self.uc_name) |
2199 | 612 | fp = self.open("wb") | 612 | fp = self.open(u"wb") |
2200 | 613 | fp.close() | 613 | fp.close() |
2201 | 614 | 614 | ||
2202 | 615 | def deltree(self): | 615 | def deltree(self): |
2204 | 616 | """Remove self by recursively deleting files under it""" | 616 | u"""Remove self by recursively deleting files under it""" |
2205 | 617 | from duplicity import selection # todo: avoid circ. dep. issue | 617 | from duplicity import selection # todo: avoid circ. dep. issue |
2207 | 618 | log.Info(_("Deleting tree %s") % self.uc_name) | 618 | log.Info(_(u"Deleting tree %s") % self.uc_name) |
2208 | 619 | itr = IterTreeReducer(PathDeleter, []) | 619 | itr = IterTreeReducer(PathDeleter, []) |
2209 | 620 | for path in selection.Select(self).set_iter(): | 620 | for path in selection.Select(self).set_iter(): |
2210 | 621 | itr(path.index, path) | 621 | itr(path.index, path) |
2211 | @@ -623,50 +623,50 @@ | |||
2212 | 623 | self.setdata() | 623 | self.setdata() |
2213 | 624 | 624 | ||
2214 | 625 | def get_parent_dir(self): | 625 | def get_parent_dir(self): |
2216 | 626 | """Return directory that self is in""" | 626 | u"""Return directory that self is in""" |
2217 | 627 | if self.index: | 627 | if self.index: |
2218 | 628 | return Path(self.base, self.index[:-1]) | 628 | return Path(self.base, self.index[:-1]) |
2219 | 629 | else: | 629 | else: |
2221 | 630 | components = self.base.split("/") | 630 | components = self.base.split(u"/") |
2222 | 631 | if len(components) == 2 and not components[0]: | 631 | if len(components) == 2 and not components[0]: |
2224 | 632 | return Path("/") # already in root directory | 632 | return Path(u"/") # already in root directory |
2225 | 633 | else: | 633 | else: |
2227 | 634 | return Path("/".join(components[:-1])) | 634 | return Path(u"/".join(components[:-1])) |
2228 | 635 | 635 | ||
2229 | 636 | def writefileobj(self, fin): | 636 | def writefileobj(self, fin): |
2232 | 637 | """Copy file object fin to self. Close both when done.""" | 637 | u"""Copy file object fin to self. Close both when done.""" |
2233 | 638 | fout = self.open("wb") | 638 | fout = self.open(u"wb") |
2234 | 639 | while 1: | 639 | while 1: |
2235 | 640 | buf = fin.read(_copy_blocksize) | 640 | buf = fin.read(_copy_blocksize) |
2236 | 641 | if not buf: | 641 | if not buf: |
2237 | 642 | break | 642 | break |
2238 | 643 | fout.write(buf) | 643 | fout.write(buf) |
2239 | 644 | if fin.close() or fout.close(): | 644 | if fin.close() or fout.close(): |
2241 | 645 | raise PathException("Error closing file object") | 645 | raise PathException(u"Error closing file object") |
2242 | 646 | self.setdata() | 646 | self.setdata() |
2243 | 647 | 647 | ||
2244 | 648 | def rename(self, new_path): | 648 | def rename(self, new_path): |
2246 | 649 | """Rename file at current path to new_path.""" | 649 | u"""Rename file at current path to new_path.""" |
2247 | 650 | shutil.move(self.name, new_path.name) | 650 | shutil.move(self.name, new_path.name) |
2248 | 651 | self.setdata() | 651 | self.setdata() |
2249 | 652 | new_path.setdata() | 652 | new_path.setdata() |
2250 | 653 | 653 | ||
2251 | 654 | def move(self, new_path): | 654 | def move(self, new_path): |
2253 | 655 | """Like rename but destination may be on different file system""" | 655 | u"""Like rename but destination may be on different file system""" |
2254 | 656 | self.copy(new_path) | 656 | self.copy(new_path) |
2255 | 657 | self.delete() | 657 | self.delete() |
2256 | 658 | 658 | ||
2257 | 659 | def chmod(self, mode): | 659 | def chmod(self, mode): |
2259 | 660 | """Change permissions of the path""" | 660 | u"""Change permissions of the path""" |
2260 | 661 | os.chmod(self.name, mode) | 661 | os.chmod(self.name, mode) |
2261 | 662 | self.setdata() | 662 | self.setdata() |
2262 | 663 | 663 | ||
2263 | 664 | def patch_with_attribs(self, diff_ropath): | 664 | def patch_with_attribs(self, diff_ropath): |
2265 | 665 | """Patch self with diff and then copy attributes over""" | 665 | u"""Patch self with diff and then copy attributes over""" |
2266 | 666 | assert self.isreg() and diff_ropath.isreg() | 666 | assert self.isreg() and diff_ropath.isreg() |
2267 | 667 | temp_path = self.get_temp_in_same_dir() | 667 | temp_path = self.get_temp_in_same_dir() |
2270 | 668 | fbase = self.open("rb") | 668 | fbase = self.open(u"rb") |
2271 | 669 | fdiff = diff_ropath.open("rb") | 669 | fdiff = diff_ropath.open(u"rb") |
2272 | 670 | patch_fileobj = librsync.PatchedFile(fbase, fdiff) | 670 | patch_fileobj = librsync.PatchedFile(fbase, fdiff) |
2273 | 671 | temp_path.writefileobj(patch_fileobj) | 671 | temp_path.writefileobj(patch_fileobj) |
2274 | 672 | assert not fbase.close() | 672 | assert not fbase.close() |
2275 | @@ -675,11 +675,11 @@ | |||
2276 | 675 | temp_path.rename(self) | 675 | temp_path.rename(self) |
2277 | 676 | 676 | ||
2278 | 677 | def get_temp_in_same_dir(self): | 677 | def get_temp_in_same_dir(self): |
2280 | 678 | """Return temp non existent path in same directory as self""" | 678 | u"""Return temp non existent path in same directory as self""" |
2281 | 679 | global _tmp_path_counter | 679 | global _tmp_path_counter |
2282 | 680 | parent_dir = self.get_parent_dir() | 680 | parent_dir = self.get_parent_dir() |
2283 | 681 | while 1: | 681 | while 1: |
2285 | 682 | temp_path = parent_dir.append("duplicity_temp." + | 682 | temp_path = parent_dir.append(u"duplicity_temp." + |
2286 | 683 | str(_tmp_path_counter)) | 683 | str(_tmp_path_counter)) |
2287 | 684 | if not temp_path.type: | 684 | if not temp_path.type: |
2288 | 685 | return temp_path | 685 | return temp_path |
2289 | @@ -688,18 +688,18 @@ | |||
2290 | 688 | u"Warning too many temp files created for " + self.uc_name | 688 | u"Warning too many temp files created for " + self.uc_name |
2291 | 689 | 689 | ||
2292 | 690 | def compare_recursive(self, other, verbose=None): | 690 | def compare_recursive(self, other, verbose=None): |
2294 | 691 | """Compare self to other Path, descending down directories""" | 691 | u"""Compare self to other Path, descending down directories""" |
2295 | 692 | from duplicity import selection # todo: avoid circ. dep. issue | 692 | from duplicity import selection # todo: avoid circ. dep. issue |
2296 | 693 | selfsel = selection.Select(self).set_iter() | 693 | selfsel = selection.Select(self).set_iter() |
2297 | 694 | othersel = selection.Select(other).set_iter() | 694 | othersel = selection.Select(other).set_iter() |
2298 | 695 | return Iter.equal(selfsel, othersel, verbose) | 695 | return Iter.equal(selfsel, othersel, verbose) |
2299 | 696 | 696 | ||
2300 | 697 | def __repr__(self): | 697 | def __repr__(self): |
2303 | 698 | """Return string representation""" | 698 | u"""Return string representation""" |
2304 | 699 | return "(%s %s %s)" % (self.index, self.name, self.type) | 699 | return u"(%s %s %s)" % (self.index, self.name, self.type) |
2305 | 700 | 700 | ||
2306 | 701 | def quote(self, s=None): | 701 | def quote(self, s=None): |
2308 | 702 | """ | 702 | u""" |
2309 | 703 | Return quoted version of s (defaults to self.name) | 703 | Return quoted version of s (defaults to self.name) |
2310 | 704 | 704 | ||
2311 | 705 | The output is meant to be interpreted with shells, so can be | 705 | The output is meant to be interpreted with shells, so can be |
2312 | @@ -707,15 +707,15 @@ | |||
2313 | 707 | """ | 707 | """ |
2314 | 708 | if not s: | 708 | if not s: |
2315 | 709 | s = self.name | 709 | s = self.name |
2317 | 710 | return '"%s"' % self.regex_chars_to_quote.sub(lambda m: "\\" + m.group(0), s) | 710 | return u'"%s"' % self.regex_chars_to_quote.sub(lambda m: u"\\" + m.group(0), s) |
2318 | 711 | 711 | ||
2319 | 712 | def unquote(self, s): | 712 | def unquote(self, s): |
2323 | 713 | """Return unquoted version of string s, as quoted by above quote()""" | 713 | u"""Return unquoted version of string s, as quoted by above quote()""" |
2324 | 714 | assert s[0] == s[-1] == "\"" # string must be quoted by above | 714 | assert s[0] == s[-1] == u"\"" # string must be quoted by above |
2325 | 715 | result = "" | 715 | result = u"" |
2326 | 716 | i = 1 | 716 | i = 1 |
2327 | 717 | while i < len(s) - 1: | 717 | while i < len(s) - 1: |
2329 | 718 | if s[i] == "\\": | 718 | if s[i] == u"\\": |
2330 | 719 | result += s[i + 1] | 719 | result += s[i + 1] |
2331 | 720 | i += 2 | 720 | i += 2 |
2332 | 721 | else: | 721 | else: |
2333 | @@ -724,38 +724,38 @@ | |||
2334 | 724 | return result | 724 | return result |
2335 | 725 | 725 | ||
2336 | 726 | def get_filename(self): | 726 | def get_filename(self): |
2339 | 727 | """Return filename of last component""" | 727 | u"""Return filename of last component""" |
2340 | 728 | components = self.name.split("/") | 728 | components = self.name.split(u"/") |
2341 | 729 | assert components and components[-1] | 729 | assert components and components[-1] |
2342 | 730 | return components[-1] | 730 | return components[-1] |
2343 | 731 | 731 | ||
2344 | 732 | def get_canonical(self): | 732 | def get_canonical(self): |
2346 | 733 | """ | 733 | u""" |
2347 | 734 | Return string of canonical version of path | 734 | Return string of canonical version of path |
2348 | 735 | 735 | ||
2349 | 736 | Remove ".", and trailing slashes where possible. Note that | 736 | Remove ".", and trailing slashes where possible. Note that |
2350 | 737 | it's harder to remove "..", as "foo/bar/.." is not necessarily | 737 | it's harder to remove "..", as "foo/bar/.." is not necessarily |
2351 | 738 | "foo", so we can't use path.normpath() | 738 | "foo", so we can't use path.normpath() |
2352 | 739 | """ | 739 | """ |
2357 | 740 | newpath = "/".join(filter(lambda x: x and x != ".", | 740 | newpath = u"/".join(filter(lambda x: x and x != u".", |
2358 | 741 | self.name.split("/"))) | 741 | self.name.split(u"/"))) |
2359 | 742 | if self.name[0] == "/": | 742 | if self.name[0] == u"/": |
2360 | 743 | return "/" + newpath | 743 | return u"/" + newpath |
2361 | 744 | elif newpath: | 744 | elif newpath: |
2362 | 745 | return newpath | 745 | return newpath |
2363 | 746 | else: | 746 | else: |
2365 | 747 | return "." | 747 | return u"." |
2366 | 748 | 748 | ||
2367 | 749 | 749 | ||
2368 | 750 | class DupPath(Path): | 750 | class DupPath(Path): |
2370 | 751 | """ | 751 | u""" |
2371 | 752 | Represent duplicity data files | 752 | Represent duplicity data files |
2372 | 753 | 753 | ||
2373 | 754 | Based on the file name, files that are compressed or encrypted | 754 | Based on the file name, files that are compressed or encrypted |
2374 | 755 | will have different open() methods. | 755 | will have different open() methods. |
2375 | 756 | """ | 756 | """ |
2376 | 757 | def __init__(self, base, index=(), parseresults=None): | 757 | def __init__(self, base, index=(), parseresults=None): |
2378 | 758 | """ | 758 | u""" |
2379 | 759 | DupPath initializer | 759 | DupPath initializer |
2380 | 760 | 760 | ||
2381 | 761 | The actual filename (no directory) must be the single element | 761 | The actual filename (no directory) must be the single element |
2382 | @@ -767,12 +767,12 @@ | |||
2383 | 767 | else: | 767 | else: |
2384 | 768 | assert len(index) == 1 | 768 | assert len(index) == 1 |
2385 | 769 | self.pr = file_naming.parse(index[0]) | 769 | self.pr = file_naming.parse(index[0]) |
2387 | 770 | assert self.pr, "must be a recognizable duplicity file" | 770 | assert self.pr, u"must be a recognizable duplicity file" |
2388 | 771 | 771 | ||
2389 | 772 | Path.__init__(self, base, index) | 772 | Path.__init__(self, base, index) |
2390 | 773 | 773 | ||
2393 | 774 | def filtered_open(self, mode="rb", gpg_profile=None): | 774 | def filtered_open(self, mode=u"rb", gpg_profile=None): |
2394 | 775 | """ | 775 | u""" |
2395 | 776 | Return fileobj with appropriate encryption/compression | 776 | Return fileobj with appropriate encryption/compression |
2396 | 777 | 777 | ||
2397 | 778 | If encryption is specified but no gpg_profile, use | 778 | If encryption is specified but no gpg_profile, use |
2398 | @@ -788,16 +788,16 @@ | |||
2399 | 788 | elif self.pr.encrypted: | 788 | elif self.pr.encrypted: |
2400 | 789 | if not gpg_profile: | 789 | if not gpg_profile: |
2401 | 790 | gpg_profile = globals.gpg_profile | 790 | gpg_profile = globals.gpg_profile |
2403 | 791 | if mode == "rb": | 791 | if mode == u"rb": |
2404 | 792 | return gpg.GPGFile(False, self, gpg_profile) | 792 | return gpg.GPGFile(False, self, gpg_profile) |
2406 | 793 | elif mode == "wb": | 793 | elif mode == u"wb": |
2407 | 794 | return gpg.GPGFile(True, self, gpg_profile) | 794 | return gpg.GPGFile(True, self, gpg_profile) |
2408 | 795 | else: | 795 | else: |
2409 | 796 | return self.open(mode) | 796 | return self.open(mode) |
2410 | 797 | 797 | ||
2411 | 798 | 798 | ||
2412 | 799 | class PathDeleter(ITRBranch): | 799 | class PathDeleter(ITRBranch): |
2414 | 800 | """Delete a directory. Called by Path.deltree""" | 800 | u"""Delete a directory. Called by Path.deltree""" |
2415 | 801 | def start_process(self, index, path): | 801 | def start_process(self, index, path): |
2416 | 802 | self.path = path | 802 | self.path = path |
2417 | 803 | 803 | ||
2418 | 804 | 804 | ||
2419 | === modified file 'duplicity/statistics.py' | |||
2420 | --- duplicity/statistics.py 2018-07-24 11:52:33 +0000 | |||
2421 | +++ duplicity/statistics.py 2018-09-24 21:19:45 +0000 | |||
2422 | @@ -19,7 +19,7 @@ | |||
2423 | 19 | # along with duplicity; if not, write to the Free Software Foundation, | 19 | # along with duplicity; if not, write to the Free Software Foundation, |
2424 | 20 | # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA | 20 | # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA |
2425 | 21 | 21 | ||
2427 | 22 | """Generate and process backup statistics""" | 22 | u"""Generate and process backup statistics""" |
2428 | 23 | 23 | ||
2429 | 24 | from future_builtins import map | 24 | from future_builtins import map |
2430 | 25 | 25 | ||
2431 | @@ -35,66 +35,66 @@ | |||
2432 | 35 | 35 | ||
2433 | 36 | 36 | ||
2434 | 37 | class StatsObj: | 37 | class StatsObj: |
2436 | 38 | """Contains various statistics, provide string conversion functions""" | 38 | u"""Contains various statistics, provide string conversion functions""" |
2437 | 39 | # used when quoting files in get_stats_line | 39 | # used when quoting files in get_stats_line |
2439 | 40 | space_regex = re.compile(" ") | 40 | space_regex = re.compile(u" ") |
2440 | 41 | 41 | ||
2457 | 42 | stat_file_attrs = ('SourceFiles', | 42 | stat_file_attrs = (u'SourceFiles', |
2458 | 43 | 'SourceFileSize', | 43 | u'SourceFileSize', |
2459 | 44 | 'NewFiles', | 44 | u'NewFiles', |
2460 | 45 | 'NewFileSize', | 45 | u'NewFileSize', |
2461 | 46 | 'DeletedFiles', | 46 | u'DeletedFiles', |
2462 | 47 | 'ChangedFiles', | 47 | u'ChangedFiles', |
2463 | 48 | 'ChangedFileSize', | 48 | u'ChangedFileSize', |
2464 | 49 | 'ChangedDeltaSize', | 49 | u'ChangedDeltaSize', |
2465 | 50 | 'DeltaEntries', | 50 | u'DeltaEntries', |
2466 | 51 | 'RawDeltaSize') | 51 | u'RawDeltaSize') |
2467 | 52 | stat_misc_attrs = ('Errors', | 52 | stat_misc_attrs = (u'Errors', |
2468 | 53 | 'TotalDestinationSizeChange') | 53 | u'TotalDestinationSizeChange') |
2469 | 54 | stat_time_attrs = ('StartTime', | 54 | stat_time_attrs = (u'StartTime', |
2470 | 55 | 'EndTime', | 55 | u'EndTime', |
2471 | 56 | 'ElapsedTime') | 56 | u'ElapsedTime') |
2472 | 57 | stat_attrs = (('Filename',) + stat_time_attrs + | 57 | stat_attrs = ((u'Filename',) + stat_time_attrs + |
2473 | 58 | stat_misc_attrs + stat_file_attrs) | 58 | stat_misc_attrs + stat_file_attrs) |
2474 | 59 | 59 | ||
2475 | 60 | # Below, the second value in each pair is true iff the value | 60 | # Below, the second value in each pair is true iff the value |
2476 | 61 | # indicates a number of bytes | 61 | # indicates a number of bytes |
2487 | 62 | stat_file_pairs = (('SourceFiles', False), | 62 | stat_file_pairs = ((u'SourceFiles', False), |
2488 | 63 | ('SourceFileSize', True), | 63 | (u'SourceFileSize', True), |
2489 | 64 | ('NewFiles', False), | 64 | (u'NewFiles', False), |
2490 | 65 | ('NewFileSize', True), | 65 | (u'NewFileSize', True), |
2491 | 66 | ('DeletedFiles', False), | 66 | (u'DeletedFiles', False), |
2492 | 67 | ('ChangedFiles', False), | 67 | (u'ChangedFiles', False), |
2493 | 68 | ('ChangedFileSize', True), | 68 | (u'ChangedFileSize', True), |
2494 | 69 | ('ChangedDeltaSize', True), | 69 | (u'ChangedDeltaSize', True), |
2495 | 70 | ('DeltaEntries', False), | 70 | (u'DeltaEntries', False), |
2496 | 71 | ('RawDeltaSize', True)) | 71 | (u'RawDeltaSize', True)) |
2497 | 72 | 72 | ||
2498 | 73 | # This is used in get_byte_summary_string below | 73 | # This is used in get_byte_summary_string below |
2503 | 74 | byte_abbrev_list = ((1024 * 1024 * 1024 * 1024, "TB"), | 74 | byte_abbrev_list = ((1024 * 1024 * 1024 * 1024, u"TB"), |
2504 | 75 | (1024 * 1024 * 1024, "GB"), | 75 | (1024 * 1024 * 1024, u"GB"), |
2505 | 76 | (1024 * 1024, "MB"), | 76 | (1024 * 1024, u"MB"), |
2506 | 77 | (1024, "KB")) | 77 | (1024, u"KB")) |
2507 | 78 | 78 | ||
2508 | 79 | def __init__(self): | 79 | def __init__(self): |
2510 | 80 | """Set attributes to None""" | 80 | u"""Set attributes to None""" |
2511 | 81 | for attr in self.stat_attrs: | 81 | for attr in self.stat_attrs: |
2512 | 82 | self.__dict__[attr] = None | 82 | self.__dict__[attr] = None |
2513 | 83 | 83 | ||
2514 | 84 | def get_stat(self, attribute): | 84 | def get_stat(self, attribute): |
2516 | 85 | """Get a statistic""" | 85 | u"""Get a statistic""" |
2517 | 86 | return self.__dict__[attribute] | 86 | return self.__dict__[attribute] |
2518 | 87 | 87 | ||
2519 | 88 | def set_stat(self, attr, value): | 88 | def set_stat(self, attr, value): |
2521 | 89 | """Set attribute to given value""" | 89 | u"""Set attribute to given value""" |
2522 | 90 | self.__dict__[attr] = value | 90 | self.__dict__[attr] = value |
2523 | 91 | 91 | ||
2524 | 92 | def increment_stat(self, attr): | 92 | def increment_stat(self, attr): |
2526 | 93 | """Add 1 to value of attribute""" | 93 | u"""Add 1 to value of attribute""" |
2527 | 94 | self.__dict__[attr] += 1 | 94 | self.__dict__[attr] += 1 |
2528 | 95 | 95 | ||
2529 | 96 | def get_total_dest_size_change(self): | 96 | def get_total_dest_size_change(self): |
2531 | 97 | """Return total destination size change | 97 | u"""Return total destination size change |
2532 | 98 | 98 | ||
2533 | 99 | This represents the total increase in the size of the | 99 | This represents the total increase in the size of the |
2534 | 100 | duplicity destination directory, or None if not available. | 100 | duplicity destination directory, or None if not available. |
2535 | @@ -103,25 +103,25 @@ | |||
2536 | 103 | return 0 # this needs to be re-done for duplicity | 103 | return 0 # this needs to be re-done for duplicity |
2537 | 104 | 104 | ||
2538 | 105 | def get_stats_line(self, index, use_repr=1): | 105 | def get_stats_line(self, index, use_repr=1): |
2540 | 106 | """Return one line abbreviated version of full stats string""" | 106 | u"""Return one line abbreviated version of full stats string""" |
2541 | 107 | file_attrs = [str(self.get_stat(a)) for a in self.stat_file_attrs] | 107 | file_attrs = [str(self.get_stat(a)) for a in self.stat_file_attrs] |
2542 | 108 | if not index: | 108 | if not index: |
2544 | 109 | filename = "." | 109 | filename = u"." |
2545 | 110 | else: | 110 | else: |
2546 | 111 | filename = os.path.join(*index) | 111 | filename = os.path.join(*index) |
2547 | 112 | if use_repr: | 112 | if use_repr: |
2548 | 113 | # use repr to quote newlines in relative filename, then | 113 | # use repr to quote newlines in relative filename, then |
2549 | 114 | # take of leading and trailing quote and quote spaces. | 114 | # take of leading and trailing quote and quote spaces. |
2552 | 115 | filename = self.space_regex.sub("\\x20", repr(filename)[1:-1]) | 115 | filename = self.space_regex.sub(u"\\x20", repr(filename)[1:-1]) |
2553 | 116 | return " ".join([filename, ] + file_attrs) | 116 | return u" ".join([filename, ] + file_attrs) |
2554 | 117 | 117 | ||
2555 | 118 | def set_stats_from_line(self, line): | 118 | def set_stats_from_line(self, line): |
2557 | 119 | """Set statistics from given line""" | 119 | u"""Set statistics from given line""" |
2558 | 120 | def error(): | 120 | def error(): |
2561 | 121 | raise StatsException("Bad line '%s'" % line) | 121 | raise StatsException(u"Bad line '%s'" % line) |
2562 | 122 | if line[-1] == "\n": | 122 | if line[-1] == u"\n": |
2563 | 123 | line = line[:-1] | 123 | line = line[:-1] |
2565 | 124 | lineparts = line.split(" ") | 124 | lineparts = line.split(u" ") |
2566 | 125 | if len(lineparts) < len(self.stat_file_attrs): | 125 | if len(lineparts) < len(self.stat_file_attrs): |
2567 | 126 | error() | 126 | error() |
2568 | 127 | for attr, val_string in zip(self.stat_file_attrs, | 127 | for attr, val_string in zip(self.stat_file_attrs, |
2569 | @@ -137,62 +137,62 @@ | |||
2570 | 137 | return self | 137 | return self |
2571 | 138 | 138 | ||
2572 | 139 | def get_stats_string(self): | 139 | def get_stats_string(self): |
2577 | 140 | """Return extended string printing out statistics""" | 140 | u"""Return extended string printing out statistics""" |
2578 | 141 | return "%s%s%s" % (self.get_timestats_string(), | 141 | return u"%s%s%s" % (self.get_timestats_string(), |
2579 | 142 | self.get_filestats_string(), | 142 | self.get_filestats_string(), |
2580 | 143 | self.get_miscstats_string()) | 143 | self.get_miscstats_string()) |
2581 | 144 | 144 | ||
2582 | 145 | def get_timestats_string(self): | 145 | def get_timestats_string(self): |
2584 | 146 | """Return portion of statistics string dealing with time""" | 146 | u"""Return portion of statistics string dealing with time""" |
2585 | 147 | timelist = [] | 147 | timelist = [] |
2586 | 148 | if self.StartTime is not None: | 148 | if self.StartTime is not None: |
2588 | 149 | timelist.append("StartTime %.2f (%s)\n" % | 149 | timelist.append(u"StartTime %.2f (%s)\n" % |
2589 | 150 | (self.StartTime, dup_time.timetopretty(self.StartTime))) | 150 | (self.StartTime, dup_time.timetopretty(self.StartTime))) |
2590 | 151 | if self.EndTime is not None: | 151 | if self.EndTime is not None: |
2592 | 152 | timelist.append("EndTime %.2f (%s)\n" % | 152 | timelist.append(u"EndTime %.2f (%s)\n" % |
2593 | 153 | (self.EndTime, dup_time.timetopretty(self.EndTime))) | 153 | (self.EndTime, dup_time.timetopretty(self.EndTime))) |
2594 | 154 | if self.ElapsedTime or (self.StartTime is not None and | 154 | if self.ElapsedTime or (self.StartTime is not None and |
2595 | 155 | self.EndTime is not None): | 155 | self.EndTime is not None): |
2596 | 156 | if self.ElapsedTime is None: | 156 | if self.ElapsedTime is None: |
2597 | 157 | self.ElapsedTime = self.EndTime - self.StartTime | 157 | self.ElapsedTime = self.EndTime - self.StartTime |
2599 | 158 | timelist.append("ElapsedTime %.2f (%s)\n" % | 158 | timelist.append(u"ElapsedTime %.2f (%s)\n" % |
2600 | 159 | (self.ElapsedTime, dup_time.inttopretty(self.ElapsedTime))) | 159 | (self.ElapsedTime, dup_time.inttopretty(self.ElapsedTime))) |
2602 | 160 | return "".join(timelist) | 160 | return u"".join(timelist) |
2603 | 161 | 161 | ||
2604 | 162 | def get_filestats_string(self): | 162 | def get_filestats_string(self): |
2606 | 163 | """Return portion of statistics string about files and bytes""" | 163 | u"""Return portion of statistics string about files and bytes""" |
2607 | 164 | def fileline(stat_file_pair): | 164 | def fileline(stat_file_pair): |
2609 | 165 | """Return zero or one line of the string""" | 165 | u"""Return zero or one line of the string""" |
2610 | 166 | attr, in_bytes = stat_file_pair | 166 | attr, in_bytes = stat_file_pair |
2611 | 167 | val = self.get_stat(attr) | 167 | val = self.get_stat(attr) |
2612 | 168 | if val is None: | 168 | if val is None: |
2614 | 169 | return "" | 169 | return u"" |
2615 | 170 | if in_bytes: | 170 | if in_bytes: |
2618 | 171 | return "%s %s (%s)\n" % (attr, val, | 171 | return u"%s %s (%s)\n" % (attr, val, |
2619 | 172 | self.get_byte_summary_string(val)) | 172 | self.get_byte_summary_string(val)) |
2620 | 173 | else: | 173 | else: |
2622 | 174 | return "%s %s\n" % (attr, val) | 174 | return u"%s %s\n" % (attr, val) |
2623 | 175 | 175 | ||
2625 | 176 | return "".join(map(fileline, self.stat_file_pairs)) | 176 | return u"".join(map(fileline, self.stat_file_pairs)) |
2626 | 177 | 177 | ||
2627 | 178 | def get_miscstats_string(self): | 178 | def get_miscstats_string(self): |
2630 | 179 | """Return portion of extended stat string about misc attributes""" | 179 | u"""Return portion of extended stat string about misc attributes""" |
2631 | 180 | misc_string = "" | 180 | misc_string = u"" |
2632 | 181 | tdsc = self.TotalDestinationSizeChange | 181 | tdsc = self.TotalDestinationSizeChange |
2633 | 182 | if tdsc is not None: | 182 | if tdsc is not None: |
2635 | 183 | misc_string += ("TotalDestinationSizeChange %s (%s)\n" % | 183 | misc_string += (u"TotalDestinationSizeChange %s (%s)\n" % |
2636 | 184 | (tdsc, self.get_byte_summary_string(tdsc))) | 184 | (tdsc, self.get_byte_summary_string(tdsc))) |
2637 | 185 | if self.Errors is not None: | 185 | if self.Errors is not None: |
2639 | 186 | misc_string += "Errors %d\n" % self.Errors | 186 | misc_string += u"Errors %d\n" % self.Errors |
2640 | 187 | return misc_string | 187 | return misc_string |
2641 | 188 | 188 | ||
2642 | 189 | def get_byte_summary_string(self, byte_count): | 189 | def get_byte_summary_string(self, byte_count): |
2644 | 190 | """Turn byte count into human readable string like "7.23GB" """ | 190 | u"""Turn byte count into human readable string like "7.23GB" """ |
2645 | 191 | if byte_count < 0: | 191 | if byte_count < 0: |
2647 | 192 | sign = "-" | 192 | sign = u"-" |
2648 | 193 | byte_count = -byte_count | 193 | byte_count = -byte_count |
2649 | 194 | else: | 194 | else: |
2651 | 195 | sign = "" | 195 | sign = u"" |
2652 | 196 | 196 | ||
2653 | 197 | for abbrev_bytes, abbrev_string in self.byte_abbrev_list: | 197 | for abbrev_bytes, abbrev_string in self.byte_abbrev_list: |
2654 | 198 | if byte_count >= abbrev_bytes: | 198 | if byte_count >= abbrev_bytes: |
2655 | @@ -204,26 +204,26 @@ | |||
2656 | 204 | precision = 1 | 204 | precision = 1 |
2657 | 205 | else: | 205 | else: |
2658 | 206 | precision = 2 | 206 | precision = 2 |
2660 | 207 | return "%s%%.%df %s" % (sign, precision, abbrev_string) \ | 207 | return u"%s%%.%df %s" % (sign, precision, abbrev_string) \ |
2661 | 208 | % (abbrev_count,) | 208 | % (abbrev_count,) |
2662 | 209 | byte_count = round(byte_count) | 209 | byte_count = round(byte_count) |
2663 | 210 | if byte_count == 1: | 210 | if byte_count == 1: |
2665 | 211 | return sign + "1 byte" | 211 | return sign + u"1 byte" |
2666 | 212 | else: | 212 | else: |
2668 | 213 | return "%s%d bytes" % (sign, byte_count) | 213 | return u"%s%d bytes" % (sign, byte_count) |
2669 | 214 | 214 | ||
2670 | 215 | def get_stats_logstring(self, title): | 215 | def get_stats_logstring(self, title): |
2675 | 216 | """Like get_stats_string, but add header and footer""" | 216 | u"""Like get_stats_string, but add header and footer""" |
2676 | 217 | header = "--------------[ %s ]--------------" % title | 217 | header = u"--------------[ %s ]--------------" % title |
2677 | 218 | footer = "-" * len(header) | 218 | footer = u"-" * len(header) |
2678 | 219 | return "%s\n%s%s\n" % (header, self.get_stats_string(), footer) | 219 | return u"%s\n%s%s\n" % (header, self.get_stats_string(), footer) |
2679 | 220 | 220 | ||
2680 | 221 | def set_stats_from_string(self, s): | 221 | def set_stats_from_string(self, s): |
2682 | 222 | """Initialize attributes from string, return self for convenience""" | 222 | u"""Initialize attributes from string, return self for convenience""" |
2683 | 223 | def error(line): | 223 | def error(line): |
2685 | 224 | raise StatsException("Bad line '%s'" % line) | 224 | raise StatsException(u"Bad line '%s'" % line) |
2686 | 225 | 225 | ||
2688 | 226 | for line in s.split("\n"): | 226 | for line in s.split(u"\n"): |
2689 | 227 | if not line: | 227 | if not line: |
2690 | 228 | continue | 228 | continue |
2691 | 229 | line_parts = line.split() | 229 | line_parts = line.split() |
2692 | @@ -247,20 +247,20 @@ | |||
2693 | 247 | return self | 247 | return self |
2694 | 248 | 248 | ||
2695 | 249 | def write_stats_to_path(self, path): | 249 | def write_stats_to_path(self, path): |
2698 | 250 | """Write statistics string to given path""" | 250 | u"""Write statistics string to given path""" |
2699 | 251 | fin = path.open("w") | 251 | fin = path.open(u"w") |
2700 | 252 | fin.write(self.get_stats_string()) | 252 | fin.write(self.get_stats_string()) |
2701 | 253 | assert not fin.close() | 253 | assert not fin.close() |
2702 | 254 | 254 | ||
2703 | 255 | def read_stats_from_path(self, path): | 255 | def read_stats_from_path(self, path): |
2706 | 256 | """Set statistics from path, return self for convenience""" | 256 | u"""Set statistics from path, return self for convenience""" |
2707 | 257 | fp = path.open("r") | 257 | fp = path.open(u"r") |
2708 | 258 | self.set_stats_from_string(fp.read()) | 258 | self.set_stats_from_string(fp.read()) |
2709 | 259 | assert not fp.close() | 259 | assert not fp.close() |
2710 | 260 | return self | 260 | return self |
2711 | 261 | 261 | ||
2712 | 262 | def stats_equal(self, s): | 262 | def stats_equal(self, s): |
2714 | 263 | """Return true if s has same statistics as self""" | 263 | u"""Return true if s has same statistics as self""" |
2715 | 264 | assert isinstance(s, StatsObj) | 264 | assert isinstance(s, StatsObj) |
2716 | 265 | for attr in self.stat_file_attrs: | 265 | for attr in self.stat_file_attrs: |
2717 | 266 | if self.get_stat(attr) != s.get_stat(attr): | 266 | if self.get_stat(attr) != s.get_stat(attr): |
2718 | @@ -268,7 +268,7 @@ | |||
2719 | 268 | return 1 | 268 | return 1 |
2720 | 269 | 269 | ||
2721 | 270 | def set_to_average(self, statobj_list): | 270 | def set_to_average(self, statobj_list): |
2723 | 271 | """Set self's attributes to average of those in statobj_list""" | 271 | u"""Set self's attributes to average of those in statobj_list""" |
2724 | 272 | for attr in self.stat_attrs: | 272 | for attr in self.stat_attrs: |
2725 | 273 | self.set_stat(attr, 0) | 273 | self.set_stat(attr, 0) |
2726 | 274 | for statobj in statobj_list: | 274 | for statobj in statobj_list: |
2727 | @@ -290,7 +290,7 @@ | |||
2728 | 290 | return self | 290 | return self |
2729 | 291 | 291 | ||
2730 | 292 | def get_statsobj_copy(self): | 292 | def get_statsobj_copy(self): |
2732 | 293 | """Return new StatsObj object with same stats as self""" | 293 | u"""Return new StatsObj object with same stats as self""" |
2733 | 294 | s = StatsObj() | 294 | s = StatsObj() |
2734 | 295 | for attr in self.stat_attrs: | 295 | for attr in self.stat_attrs: |
2735 | 296 | s.set_stat(attr, self.get_stat(attr)) | 296 | s.set_stat(attr, self.get_stat(attr)) |
2736 | @@ -298,9 +298,9 @@ | |||
2737 | 298 | 298 | ||
2738 | 299 | 299 | ||
2739 | 300 | class StatsDeltaProcess(StatsObj): | 300 | class StatsDeltaProcess(StatsObj): |
2741 | 301 | """Keep track of statistics during DirDelta process""" | 301 | u"""Keep track of statistics during DirDelta process""" |
2742 | 302 | def __init__(self): | 302 | def __init__(self): |
2744 | 303 | """StatsDeltaProcess initializer - zero file attributes""" | 303 | u"""StatsDeltaProcess initializer - zero file attributes""" |
2745 | 304 | StatsObj.__init__(self) | 304 | StatsObj.__init__(self) |
2746 | 305 | for attr in StatsObj.stat_file_attrs: | 305 | for attr in StatsObj.stat_file_attrs: |
2747 | 306 | self.__dict__[attr] = 0 | 306 | self.__dict__[attr] = 0 |
2748 | @@ -309,39 +309,39 @@ | |||
2749 | 309 | self.files_changed = [] | 309 | self.files_changed = [] |
2750 | 310 | 310 | ||
2751 | 311 | def add_new_file(self, path): | 311 | def add_new_file(self, path): |
2753 | 312 | """Add stats of new file path to statistics""" | 312 | u"""Add stats of new file path to statistics""" |
2754 | 313 | filesize = path.getsize() | 313 | filesize = path.getsize() |
2755 | 314 | self.SourceFiles += 1 | 314 | self.SourceFiles += 1 |
2756 | 315 | # SourceFileSize is added-to incrementally as read | 315 | # SourceFileSize is added-to incrementally as read |
2757 | 316 | self.NewFiles += 1 | 316 | self.NewFiles += 1 |
2758 | 317 | self.NewFileSize += filesize | 317 | self.NewFileSize += filesize |
2759 | 318 | self.DeltaEntries += 1 | 318 | self.DeltaEntries += 1 |
2761 | 319 | self.add_delta_entries_file(path, 'new') | 319 | self.add_delta_entries_file(path, b'new') |
2762 | 320 | 320 | ||
2763 | 321 | def add_changed_file(self, path): | 321 | def add_changed_file(self, path): |
2765 | 322 | """Add stats of file that has changed since last backup""" | 322 | u"""Add stats of file that has changed since last backup""" |
2766 | 323 | filesize = path.getsize() | 323 | filesize = path.getsize() |
2767 | 324 | self.SourceFiles += 1 | 324 | self.SourceFiles += 1 |
2768 | 325 | # SourceFileSize is added-to incrementally as read | 325 | # SourceFileSize is added-to incrementally as read |
2769 | 326 | self.ChangedFiles += 1 | 326 | self.ChangedFiles += 1 |
2770 | 327 | self.ChangedFileSize += filesize | 327 | self.ChangedFileSize += filesize |
2771 | 328 | self.DeltaEntries += 1 | 328 | self.DeltaEntries += 1 |
2773 | 329 | self.add_delta_entries_file(path, 'changed') | 329 | self.add_delta_entries_file(path, b'changed') |
2774 | 330 | 330 | ||
2775 | 331 | def add_deleted_file(self, path): | 331 | def add_deleted_file(self, path): |
2777 | 332 | """Add stats of file no longer in source directory""" | 332 | u"""Add stats of file no longer in source directory""" |
2778 | 333 | self.DeletedFiles += 1 # can't add size since not available | 333 | self.DeletedFiles += 1 # can't add size since not available |
2779 | 334 | self.DeltaEntries += 1 | 334 | self.DeltaEntries += 1 |
2781 | 335 | self.add_delta_entries_file(path, 'deleted') | 335 | self.add_delta_entries_file(path, b'deleted') |
2782 | 336 | 336 | ||
2783 | 337 | def add_unchanged_file(self, path): | 337 | def add_unchanged_file(self, path): |
2785 | 338 | """Add stats of file that hasn't changed since last backup""" | 338 | u"""Add stats of file that hasn't changed since last backup""" |
2786 | 339 | filesize = path.getsize() | 339 | filesize = path.getsize() |
2787 | 340 | self.SourceFiles += 1 | 340 | self.SourceFiles += 1 |
2788 | 341 | self.SourceFileSize += filesize | 341 | self.SourceFileSize += filesize |
2789 | 342 | 342 | ||
2790 | 343 | def close(self): | 343 | def close(self): |
2792 | 344 | """End collection of data, set EndTime""" | 344 | u"""End collection of data, set EndTime""" |
2793 | 345 | self.EndTime = time.time() | 345 | self.EndTime = time.time() |
2794 | 346 | 346 | ||
2795 | 347 | def add_delta_entries_file(self, path, action_type): | 347 | def add_delta_entries_file(self, path, action_type): |
2796 | 348 | 348 | ||
2797 | === modified file 'duplicity/util.py' | |||
2798 | --- duplicity/util.py 2018-07-24 11:52:33 +0000 | |||
2799 | +++ duplicity/util.py 2018-09-24 21:19:45 +0000 | |||
2800 | @@ -19,7 +19,7 @@ | |||
2801 | 19 | # along with duplicity; if not, write to the Free Software Foundation, | 19 | # along with duplicity; if not, write to the Free Software Foundation, |
2802 | 20 | # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA | 20 | # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA |
2803 | 21 | 21 | ||
2805 | 22 | """ | 22 | u""" |
2806 | 23 | Miscellaneous utilities. | 23 | Miscellaneous utilities. |
2807 | 24 | """ | 24 | """ |
2808 | 25 | 25 | ||
2809 | @@ -51,23 +51,23 @@ | |||
2810 | 51 | # ToDo: Revisit this once we drop Python 2 support/the backport is complete | 51 | # ToDo: Revisit this once we drop Python 2 support/the backport is complete |
2811 | 52 | 52 | ||
2812 | 53 | def fsencode(unicode_filename): | 53 | def fsencode(unicode_filename): |
2814 | 54 | """Convert a unicode filename to a filename encoded in the system encoding""" | 54 | u"""Convert a unicode filename to a filename encoded in the system encoding""" |
2815 | 55 | # For paths, just use path.name rather than converting with this | 55 | # For paths, just use path.name rather than converting with this |
2816 | 56 | # If we are not doing any cleverness with non-unicode filename bytes, | 56 | # If we are not doing any cleverness with non-unicode filename bytes, |
2817 | 57 | # encoding to system encoding is good enough | 57 | # encoding to system encoding is good enough |
2819 | 58 | return unicode_filename.encode(sys.getfilesystemencoding(), "replace") | 58 | return unicode_filename.encode(sys.getfilesystemencoding(), u"replace") |
2820 | 59 | 59 | ||
2821 | 60 | def fsdecode(bytes_filename): | 60 | def fsdecode(bytes_filename): |
2823 | 61 | """Convert a filename encoded in the system encoding to unicode""" | 61 | u"""Convert a filename encoded in the system encoding to unicode""" |
2824 | 62 | # For paths, just use path.uc_name rather than converting with this | 62 | # For paths, just use path.uc_name rather than converting with this |
2825 | 63 | # If we are not doing any cleverness with non-unicode filename bytes, | 63 | # If we are not doing any cleverness with non-unicode filename bytes, |
2826 | 64 | # decoding using system encoding is good enough. Use "ignore" as | 64 | # decoding using system encoding is good enough. Use "ignore" as |
2827 | 65 | # Linux paths can contain non-Unicode characters | 65 | # Linux paths can contain non-Unicode characters |
2829 | 66 | return bytes_filename.decode(globals.fsencoding, "replace") | 66 | return bytes_filename.decode(globals.fsencoding, u"replace") |
2830 | 67 | 67 | ||
2831 | 68 | 68 | ||
2832 | 69 | def exception_traceback(limit=50): | 69 | def exception_traceback(limit=50): |
2834 | 70 | """ | 70 | u""" |
2835 | 71 | @return A string representation in typical Python format of the | 71 | @return A string representation in typical Python format of the |
2836 | 72 | currently active/raised exception. | 72 | currently active/raised exception. |
2837 | 73 | """ | 73 | """ |
2838 | @@ -76,20 +76,20 @@ | |||
2839 | 76 | lines = traceback.format_tb(tb, limit) | 76 | lines = traceback.format_tb(tb, limit) |
2840 | 77 | lines.extend(traceback.format_exception_only(type, value)) | 77 | lines.extend(traceback.format_exception_only(type, value)) |
2841 | 78 | 78 | ||
2844 | 79 | msg = "Traceback (innermost last):\n" | 79 | msg = u"Traceback (innermost last):\n" |
2845 | 80 | msg = msg + "%-20s %s" % (string.join(lines[:-1], ""), lines[-1]) | 80 | msg = msg + u"%-20s %s" % (string.join(lines[:-1], u""), lines[-1]) |
2846 | 81 | 81 | ||
2848 | 82 | return msg.decode('unicode-escape', 'replace') | 82 | return msg.decode(u'unicode-escape', u'replace') |
2849 | 83 | 83 | ||
2850 | 84 | 84 | ||
2851 | 85 | def escape(string): | 85 | def escape(string): |
2855 | 86 | "Convert a (bytes) filename to a format suitable for logging (quoted utf8)" | 86 | u"Convert a (bytes) filename to a format suitable for logging (quoted utf8)" |
2856 | 87 | string = fsdecode(string).encode('unicode-escape', 'replace') | 87 | string = fsdecode(string).encode(u'unicode-escape', u'replace') |
2857 | 88 | return u"'%s'" % string.decode('utf8', 'replace') | 88 | return u"'%s'" % string.decode(u'utf8', u'replace') |
2858 | 89 | 89 | ||
2859 | 90 | 90 | ||
2860 | 91 | def uindex(index): | 91 | def uindex(index): |
2862 | 92 | "Convert an index (a tuple of path parts) to unicode for printing" | 92 | u"Convert an index (a tuple of path parts) to unicode for printing" |
2863 | 93 | if index: | 93 | if index: |
2864 | 94 | return os.path.join(*list(map(fsdecode, index))) | 94 | return os.path.join(*list(map(fsdecode, index))) |
2865 | 95 | else: | 95 | else: |
2866 | @@ -101,11 +101,11 @@ | |||
2867 | 101 | # non-ascii will cause a UnicodeDecodeError when implicitly decoding to | 101 | # non-ascii will cause a UnicodeDecodeError when implicitly decoding to |
2868 | 102 | # unicode. So we decode manually, using the filesystem encoding. | 102 | # unicode. So we decode manually, using the filesystem encoding. |
2869 | 103 | # 99.99% of the time, this will be a fine encoding to use. | 103 | # 99.99% of the time, this will be a fine encoding to use. |
2871 | 104 | return fsdecode(unicode(e).encode('utf-8')) | 104 | return fsdecode(unicode(e).encode(u'utf-8')) |
2872 | 105 | 105 | ||
2873 | 106 | 106 | ||
2874 | 107 | def maybe_ignore_errors(fn): | 107 | def maybe_ignore_errors(fn): |
2876 | 108 | """ | 108 | u""" |
2877 | 109 | Execute fn. If the global configuration setting ignore_errors is | 109 | Execute fn. If the global configuration setting ignore_errors is |
2878 | 110 | set to True, catch errors and log them but do continue (and return | 110 | set to True, catch errors and log them but do continue (and return |
2879 | 111 | None). | 111 | None). |
2880 | @@ -117,7 +117,7 @@ | |||
2881 | 117 | return fn() | 117 | return fn() |
2882 | 118 | except Exception as e: | 118 | except Exception as e: |
2883 | 119 | if globals.ignore_errors: | 119 | if globals.ignore_errors: |
2885 | 120 | log.Warn(_("IGNORED_ERROR: Warning: ignoring error as requested: %s: %s") | 120 | log.Warn(_(u"IGNORED_ERROR: Warning: ignoring error as requested: %s: %s") |
2886 | 121 | % (e.__class__.__name__, uexc(e))) | 121 | % (e.__class__.__name__, uexc(e))) |
2887 | 122 | return None | 122 | return None |
2888 | 123 | else: | 123 | else: |
2889 | @@ -145,7 +145,7 @@ | |||
2890 | 145 | # yet. So we want to ignore ReadError exceptions, which are used to signal | 145 | # yet. So we want to ignore ReadError exceptions, which are used to signal |
2891 | 146 | # this. | 146 | # this. |
2892 | 147 | try: | 147 | try: |
2894 | 148 | tf = tarfile.TarFile("arbitrary", mode, fp) | 148 | tf = tarfile.TarFile(u"arbitrary", mode, fp) |
2895 | 149 | # Now we cause TarFile to not cache TarInfo objects. It would end up | 149 | # Now we cause TarFile to not cache TarInfo objects. It would end up |
2896 | 150 | # consuming a lot of memory over the lifetime of our long-lasting | 150 | # consuming a lot of memory over the lifetime of our long-lasting |
2897 | 151 | # signature files otherwise. | 151 | # signature files otherwise. |
2898 | @@ -159,14 +159,14 @@ | |||
2899 | 159 | # Python versions before 2.6 ensure that directories end with /, but 2.6 | 159 | # Python versions before 2.6 ensure that directories end with /, but 2.6 |
2900 | 160 | # and later ensure they they *don't* have /. ::shrug:: Internally, we | 160 | # and later ensure they they *don't* have /. ::shrug:: Internally, we |
2901 | 161 | # continue to use pre-2.6 method. | 161 | # continue to use pre-2.6 method. |
2904 | 162 | if ti.isdir() and not ti.name.endswith("/"): | 162 | if ti.isdir() and not ti.name.endswith(b"/"): |
2905 | 163 | return ti.name + "/" | 163 | return ti.name + b"/" |
2906 | 164 | else: | 164 | else: |
2907 | 165 | return ti.name | 165 | return ti.name |
2908 | 166 | 166 | ||
2909 | 167 | 167 | ||
2910 | 168 | def ignore_missing(fn, filename): | 168 | def ignore_missing(fn, filename): |
2912 | 169 | """ | 169 | u""" |
2913 | 170 | Execute fn on filename. Ignore ENOENT errors, otherwise raise exception. | 170 | Execute fn on filename. Ignore ENOENT errors, otherwise raise exception. |
2914 | 171 | 171 | ||
2915 | 172 | @param fn: callable | 172 | @param fn: callable |
2916 | @@ -184,7 +184,7 @@ | |||
2917 | 184 | @atexit.register | 184 | @atexit.register |
2918 | 185 | def release_lockfile(): | 185 | def release_lockfile(): |
2919 | 186 | if globals.lockfile: | 186 | if globals.lockfile: |
2921 | 187 | log.Debug(_("Releasing lockfile %s") % globals.lockpath) | 187 | log.Debug(_(u"Releasing lockfile %s") % globals.lockpath) |
2922 | 188 | try: | 188 | try: |
2923 | 189 | globals.lockfile.release() | 189 | globals.lockfile.release() |
2924 | 190 | except Exception: | 190 | except Exception: |
2925 | @@ -192,7 +192,7 @@ | |||
2926 | 192 | 192 | ||
2927 | 193 | 193 | ||
2928 | 194 | def copyfileobj(infp, outfp, byte_count=-1): | 194 | def copyfileobj(infp, outfp, byte_count=-1): |
2930 | 195 | """Copy byte_count bytes from infp to outfp, or all if byte_count < 0 | 195 | u"""Copy byte_count bytes from infp to outfp, or all if byte_count < 0 |
2931 | 196 | 196 | ||
2932 | 197 | Returns the number of bytes actually written (may be less than | 197 | Returns the number of bytes actually written (may be less than |
2933 | 198 | byte_count if find eof. Does not close either fileobj. | 198 | byte_count if find eof. Does not close either fileobj. |
2934 | @@ -221,7 +221,7 @@ | |||
2935 | 221 | 221 | ||
2936 | 222 | 222 | ||
2937 | 223 | def which(program): | 223 | def which(program): |
2939 | 224 | """ | 224 | u""" |
2940 | 225 | Return absolute path for program name. | 225 | Return absolute path for program name. |
2941 | 226 | Returns None if program not found. | 226 | Returns None if program not found. |
2942 | 227 | """ | 227 | """ |
2943 | @@ -234,8 +234,8 @@ | |||
2944 | 234 | if is_exe(program): | 234 | if is_exe(program): |
2945 | 235 | return program | 235 | return program |
2946 | 236 | else: | 236 | else: |
2949 | 237 | for path in os.getenv("PATH").split(os.pathsep): | 237 | for path in os.getenv(u"PATH").split(os.pathsep): |
2950 | 238 | path = path.strip('"') | 238 | path = path.strip(u'"') |
2951 | 239 | exe_file = os.path.abspath(os.path.join(path, program)) | 239 | exe_file = os.path.abspath(os.path.join(path, program)) |
2952 | 240 | if is_exe(exe_file): | 240 | if is_exe(exe_file): |
2953 | 241 | return exe_file | 241 | return exe_file |
2954 | 242 | 242 | ||
2955 | === modified file 'po/duplicity.pot' | |||
2956 | --- po/duplicity.pot 2018-09-17 21:03:06 +0000 | |||
2957 | +++ po/duplicity.pot 2018-09-24 21:19:45 +0000 | |||
2958 | @@ -8,7 +8,7 @@ | |||
2959 | 8 | msgstr "" | 8 | msgstr "" |
2960 | 9 | "Project-Id-Version: PACKAGE VERSION\n" | 9 | "Project-Id-Version: PACKAGE VERSION\n" |
2961 | 10 | "Report-Msgid-Bugs-To: Kenneth Loafman <kenneth@loafman.com>\n" | 10 | "Report-Msgid-Bugs-To: Kenneth Loafman <kenneth@loafman.com>\n" |
2963 | 11 | "POT-Creation-Date: 2018-09-17 15:38-0500\n" | 11 | "POT-Creation-Date: 2018-09-24 11:46-0500\n" |
2964 | 12 | "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" | 12 | "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" |
2965 | 13 | "Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" | 13 | "Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" |
2966 | 14 | "Language-Team: LANGUAGE <LL@li.org>\n" | 14 | "Language-Team: LANGUAGE <LL@li.org>\n" |
2967 | 15 | 15 | ||
2968 | === modified file 'testing/test_code.py' | |||
2969 | --- testing/test_code.py 2018-09-17 21:03:06 +0000 | |||
2970 | +++ testing/test_code.py 2018-09-24 21:19:45 +0000 | |||
2971 | @@ -113,12 +113,6 @@ | |||
2972 | 113 | os.path.join(_top_dir, u'setup.py'), | 113 | os.path.join(_top_dir, u'setup.py'), |
2973 | 114 | os.path.join(_top_dir, u'duplicity', u'__init__.py'), | 114 | os.path.join(_top_dir, u'duplicity', u'__init__.py'), |
2974 | 115 | os.path.join(_top_dir, u'duplicity', u'compilec.py'), | 115 | os.path.join(_top_dir, u'duplicity', u'compilec.py'), |
2975 | 116 | os.path.join(_top_dir, u'duplicity', u'diffdir.py'), | ||
2976 | 117 | os.path.join(_top_dir, u'duplicity', u'manifest.py'), | ||
2977 | 118 | os.path.join(_top_dir, u'duplicity', u'patchdir.py'), | ||
2978 | 119 | os.path.join(_top_dir, u'duplicity', u'path.py'), | ||
2979 | 120 | os.path.join(_top_dir, u'duplicity', u'statistics.py'), | ||
2980 | 121 | os.path.join(_top_dir, u'duplicity', u'util.py'), | ||
2981 | 122 | os.path.join(_top_dir, u'testing', u'overrides', u'gettext.py'), | 116 | os.path.join(_top_dir, u'testing', u'overrides', u'gettext.py'), |
2982 | 123 | os.path.join(_top_dir, u'testing', u'test_unadorned.py'), | 117 | os.path.join(_top_dir, u'testing', u'test_unadorned.py'), |
2983 | 124 | os.path.join(_top_dir, u'testing', u'unit', u'test_statistics.py'), | 118 | os.path.join(_top_dir, u'testing', u'unit', u'test_statistics.py'), |
Taking into account the comment related to util.fsencode(), I think that tarinfo names need to be bytes, since they can be used to generate paths, and manifest strings also need to be bytes, since they can be read back in to generate a manifest. At the same time, I'm not sure if a __str__ function should be returning bytes, although I'm not seeing a good solution.