Merge lp:~aaron-whitehouse/duplicity/08-ufn-to-fsdecode into lp:~duplicity-team/duplicity/0.8-series
- 08-ufn-to-fsdecode
- Merge into 0.8-series
Proposed by
Aaron Whitehouse
Status: | Merged |
---|---|
Merged at revision: | 1296 |
Proposed branch: | lp:~aaron-whitehouse/duplicity/08-ufn-to-fsdecode |
Merge into: | lp:~duplicity-team/duplicity/0.8-series |
Diff against target: |
536 lines (+60/-66) 11 files modified
bin/duplicity (+12/-12) bin/rdiffdir (+2/-2) duplicity/backend.py (+2/-2) duplicity/collections.py (+11/-11) duplicity/diffdir.py (+4/-4) duplicity/lazy.py (+2/-2) duplicity/patchdir.py (+7/-7) duplicity/path.py (+4/-4) duplicity/selection.py (+3/-3) duplicity/tempdir.py (+8/-8) duplicity/util.py (+5/-11) |
To merge this branch: | bzr merge lp:~aaron-whitehouse/duplicity/08-ufn-to-fsdecode |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
duplicity-team | Pending | ||
Review via email: mp+335537@code.launchpad.net |
Commit message
Description of the change
* Change util.fsdecode to use "replace" instead of "ignore" (matching behaviour of util.ufn)
* Replace all uses of ufn with fsdecode
* Make backend.tobytes use util.fsencode rather than reimplementing
To post a comment you must log in.
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'bin/duplicity' |
2 | --- bin/duplicity 2017-12-20 13:03:42 +0000 |
3 | +++ bin/duplicity 2017-12-21 22:26:40 +0000 |
4 | @@ -317,7 +317,7 @@ |
5 | return # error querying file |
6 | if size != orig_size: |
7 | code_extra = "%s %d %d" % (util.escape(dest_filename), orig_size, size) |
8 | - log.FatalError(_("File %s was corrupted during upload.") % util.ufn(dest_filename), |
9 | + log.FatalError(_("File %s was corrupted during upload.") % util.fsdecode(dest_filename), |
10 | log.ErrorCode.volume_wrong_size, code_extra) |
11 | |
12 | def put(tdp, dest_filename, vol_num): |
13 | @@ -694,7 +694,7 @@ |
14 | for path in path_iter: |
15 | if path.difftype != "deleted": |
16 | user_info = u"%s %s" % (dup_time.timetopretty(path.getmtime()), |
17 | - util.ufn(path.get_relative_path())) |
18 | + util.fsdecode(path.get_relative_path())) |
19 | log_info = "%s %s %s" % (dup_time.timetostring(path.getmtime()), |
20 | util.escape(path.get_relative_path()), |
21 | path.type) |
22 | @@ -718,7 +718,7 @@ |
23 | restore_get_patched_rop_iter(col_stats)): |
24 | if globals.restore_dir: |
25 | log.FatalError(_("%s not found in archive - no files restored.") |
26 | - % (util.ufn(globals.restore_dir)), |
27 | + % (util.fsdecode(globals.restore_dir)), |
28 | log.ErrorCode.restore_dir_not_found) |
29 | else: |
30 | log.FatalError(_("No files found in archive - nothing restored."), |
31 | @@ -790,7 +790,7 @@ |
32 | log.FatalError("%s\n %s\n %s\n %s\n" % |
33 | (_("Invalid data - %s hash mismatch for file:") % |
34 | hash_pair[0], |
35 | - util.ufn(filename), |
36 | + util.fsdecode(filename), |
37 | _("Calculated hash: %s") % calculated_hash, |
38 | _("Manifest hash: %s") % hash_pair[1]), |
39 | log.ErrorCode.mismatched_hash) |
40 | @@ -892,7 +892,7 @@ |
41 | log.Warn(_("No extraneous files found, nothing deleted in cleanup.")) |
42 | return |
43 | |
44 | - filestr = u"\n".join(map(util.ufn, extraneous)) |
45 | + filestr = u"\n".join(map(util.fsdecode, extraneous)) |
46 | if globals.force: |
47 | log.Notice(ngettext("Deleting this file from backend:", |
48 | "Deleting these files from backend:", |
49 | @@ -1196,11 +1196,11 @@ |
50 | del_name = globals.archive_dir_path.append(fn).name |
51 | |
52 | log.Notice(_("Deleting local %s (not authoritative at backend).") % |
53 | - util.ufn(del_name)) |
54 | + util.fsdecode(del_name)) |
55 | try: |
56 | util.ignore_missing(os.unlink, del_name) |
57 | except Exception as e: |
58 | - log.Warn(_("Unable to delete %s: %s") % (util.ufn(del_name), |
59 | + log.Warn(_("Unable to delete %s: %s") % (util.fsdecode(del_name), |
60 | util.uexc(e))) |
61 | |
62 | def copy_to_local(fn): |
63 | @@ -1232,7 +1232,7 @@ |
64 | else: |
65 | name = None |
66 | log.FatalError(_("Failed to read %s: %s") % |
67 | - (util.ufn(name), sys.exc_info()), |
68 | + (util.fsdecode(name), sys.exc_info()), |
69 | log.ErrorCode.generic) |
70 | if not res.data: |
71 | self.fileobj.close() |
72 | @@ -1245,7 +1245,7 @@ |
73 | def get_footer(self): |
74 | return "" |
75 | |
76 | - log.Notice(_("Copying %s to local cache.") % util.ufn(fn)) |
77 | + log.Notice(_("Copying %s to local cache.") % util.fsdecode(fn)) |
78 | |
79 | pr, loc_name, rem_name = resolve_basename(fn) |
80 | |
81 | @@ -1311,10 +1311,10 @@ |
82 | else: |
83 | if local_missing: |
84 | log.Notice(_("Sync would copy the following from remote to local:") + |
85 | - u"\n" + u"\n".join(map(util.ufn, local_missing))) |
86 | + u"\n" + u"\n".join(map(util.fsdecode, local_missing))) |
87 | if local_spurious: |
88 | log.Notice(_("Sync would remove the following spurious local files:") + |
89 | - u"\n" + u"\n".join(map(util.ufn, local_spurious))) |
90 | + u"\n" + u"\n".join(map(util.fsdecode, local_spurious))) |
91 | |
92 | |
93 | def check_last_manifest(col_stats): |
94 | @@ -1390,7 +1390,7 @@ |
95 | """ |
96 | log.Log(u'=' * 80, verbosity) |
97 | log.Log(u"duplicity $version ($reldate)", verbosity) |
98 | - log.Log(u"Args: %s" % util.ufn(' '.join(sys.argv)), verbosity) |
99 | + log.Log(u"Args: %s" % util.fsdecode(' '.join(sys.argv)), verbosity) |
100 | log.Log(u' '.join(platform.uname()), verbosity) |
101 | log.Log(u"%s %s" % (sys.executable or sys.platform, sys.version), verbosity) |
102 | log.Log(u'=' * 80, verbosity) |
103 | |
104 | === modified file 'bin/rdiffdir' |
105 | --- bin/rdiffdir 2015-05-01 13:56:13 +0000 |
106 | +++ bin/rdiffdir 2017-12-21 22:26:40 +0000 |
107 | @@ -58,7 +58,7 @@ |
108 | try: |
109 | return open(filename, "r") |
110 | except IOError: |
111 | - log.FatalError(_("Error opening file %s") % util.ufn(filename)) |
112 | + log.FatalError(_("Error opening file %s") % util.fsdecode(filename)) |
113 | |
114 | try: |
115 | optlist, args = getopt.getopt(arglist, "v:Vz", |
116 | @@ -126,7 +126,7 @@ |
117 | pass |
118 | else: |
119 | log.FatalError(_("File %s already exists, will not " |
120 | - "overwrite.") % util.ufn(filename)) |
121 | + "overwrite.") % util.fsdecode(filename)) |
122 | |
123 | |
124 | def get_action(args): |
125 | |
126 | === modified file 'duplicity/backend.py' |
127 | --- duplicity/backend.py 2017-12-13 22:43:36 +0000 |
128 | +++ duplicity/backend.py 2017-12-21 22:26:40 +0000 |
129 | @@ -511,7 +511,7 @@ |
130 | |
131 | def __do_put(self, source_path, remote_filename): |
132 | if hasattr(self.backend, '_put'): |
133 | - log.Info(_("Writing %s") % util.ufn(remote_filename)) |
134 | + log.Info(_("Writing %s") % util.fsdecode(remote_filename)) |
135 | self.backend._put(source_path, remote_filename) |
136 | else: |
137 | raise NotImplementedError() |
138 | @@ -568,7 +568,7 @@ |
139 | # There shouldn't be any encoding errors for files we care |
140 | # about, since duplicity filenames are ascii. But user files |
141 | # may be in the same directory. So just replace characters. |
142 | - return filename.encode(globals.fsencoding, 'replace') |
143 | + return util.fsencode(filename) |
144 | else: |
145 | return filename |
146 | |
147 | |
148 | === modified file 'duplicity/collections.py' |
149 | --- duplicity/collections.py 2017-12-13 22:43:36 +0000 |
150 | +++ duplicity/collections.py 2017-12-21 22:26:40 +0000 |
151 | @@ -168,7 +168,7 @@ |
152 | try: |
153 | self.backend.delete(rfn) |
154 | except Exception: |
155 | - log.Debug(_("BackupSet.delete: missing %s") % [util.ufn(f) for f in rfn]) |
156 | + log.Debug(_("BackupSet.delete: missing %s") % [util.fsdecode(f) for f in rfn]) |
157 | pass |
158 | if self.action not in ["collection-status", "replicate"]: |
159 | local_filename_list = globals.archive_dir_path.listdir() |
160 | @@ -182,7 +182,7 @@ |
161 | try: |
162 | globals.archive_dir_path.append(lfn).delete() |
163 | except Exception: |
164 | - log.Debug(_("BackupSet.delete: missing %s") % [util.ufn(f) for f in lfn]) |
165 | + log.Debug(_("BackupSet.delete: missing %s") % [util.fsdecode(f) for f in lfn]) |
166 | pass |
167 | util.release_lockfile() |
168 | |
169 | @@ -194,7 +194,7 @@ |
170 | if self.remote_manifest_name: |
171 | filelist.append(self.remote_manifest_name) |
172 | filelist.extend(self.volume_name_dict.values()) |
173 | - return u"[%s]" % u", ".join(map(util.ufn, filelist)) |
174 | + return u"[%s]" % u", ".join(map(util.fsdecode, filelist)) |
175 | |
176 | def get_timestr(self): |
177 | """ |
178 | @@ -248,10 +248,10 @@ |
179 | manifest_buffer = self.backend.get_data(self.remote_manifest_name) |
180 | except GPGError as message: |
181 | log.Error(_("Error processing remote manifest (%s): %s") % |
182 | - (util.ufn(self.remote_manifest_name), util.uexc(message))) |
183 | + (util.fsdecode(self.remote_manifest_name), util.uexc(message))) |
184 | return None |
185 | log.Info(_("Processing remote manifest %s (%s)") % ( |
186 | - util.ufn(self.remote_manifest_name), len(manifest_buffer))) |
187 | + util.fsdecode(self.remote_manifest_name), len(manifest_buffer))) |
188 | return manifest.Manifest().from_string(manifest_buffer) |
189 | |
190 | def get_manifest(self): |
191 | @@ -789,7 +789,7 @@ |
192 | "Warning, found the following local orphaned " |
193 | "signature files:", |
194 | len(self.local_orphaned_sig_names)) + u"\n" + |
195 | - u"\n".join(map(util.ufn, self.local_orphaned_sig_names)), |
196 | + u"\n".join(map(util.fsdecode, self.local_orphaned_sig_names)), |
197 | log.WarningCode.orphaned_sig) |
198 | |
199 | if self.remote_orphaned_sig_names: |
200 | @@ -798,7 +798,7 @@ |
201 | "Warning, found the following remote orphaned " |
202 | "signature files:", |
203 | len(self.remote_orphaned_sig_names)) + u"\n" + |
204 | - u"\n".join(map(util.ufn, self.remote_orphaned_sig_names)), |
205 | + u"\n".join(map(util.fsdecode, self.remote_orphaned_sig_names)), |
206 | log.WarningCode.orphaned_sig) |
207 | |
208 | if self.all_sig_chains and sig_chain_warning and not self.matched_chain_pair: |
209 | @@ -828,7 +828,7 @@ |
210 | missing files. |
211 | """ |
212 | log.Debug(_("Extracting backup chains from list of files: %s") |
213 | - % [util.ufn(f) for f in filename_list]) |
214 | + % [util.fsdecode(f) for f in filename_list]) |
215 | # First put filenames in set form |
216 | sets = [] |
217 | |
218 | @@ -838,15 +838,15 @@ |
219 | """ |
220 | for set in sets: |
221 | if set.add_filename(filename): |
222 | - log.Debug(_("File %s is part of known set") % (util.ufn(filename),)) |
223 | + log.Debug(_("File %s is part of known set") % (util.fsdecode(filename),)) |
224 | break |
225 | else: |
226 | - log.Debug(_("File %s is not part of a known set; creating new set") % (util.ufn(filename),)) |
227 | + log.Debug(_("File %s is not part of a known set; creating new set") % (util.fsdecode(filename),)) |
228 | new_set = BackupSet(self.backend, self.action) |
229 | if new_set.add_filename(filename): |
230 | sets.append(new_set) |
231 | else: |
232 | - log.Debug(_("Ignoring file (rejected by backup set) '%s'") % util.ufn(filename)) |
233 | + log.Debug(_("Ignoring file (rejected by backup set) '%s'") % util.fsdecode(filename)) |
234 | |
235 | for f in filename_list: |
236 | add_to_sets(f) |
237 | |
238 | === modified file 'duplicity/diffdir.py' |
239 | --- duplicity/diffdir.py 2017-12-13 22:43:36 +0000 |
240 | +++ duplicity/diffdir.py 2017-12-21 22:26:40 +0000 |
241 | @@ -105,7 +105,7 @@ |
242 | index_string = sig_path.get_relative_path() |
243 | else: |
244 | assert 0, "Both new and sig are None for some reason" |
245 | - log.Warn(_("Error %s getting delta for %s") % (str(exc), util.ufn(index_string))) |
246 | + log.Warn(_("Error %s getting delta for %s") % (str(exc), util.fsdecode(index_string))) |
247 | return None |
248 | |
249 | |
250 | @@ -165,14 +165,14 @@ |
251 | if new_path and stats: |
252 | stats.add_new_file(new_path) |
253 | log.Info(_("A %s") % |
254 | - (util.ufn(delta_path.get_relative_path())), |
255 | + (util.fsdecode(delta_path.get_relative_path())), |
256 | log.InfoCode.diff_file_new, |
257 | util.escape(delta_path.get_relative_path())) |
258 | else: |
259 | if new_path and stats: |
260 | stats.add_changed_file(new_path) |
261 | log.Info(_("M %s") % |
262 | - (util.ufn(delta_path.get_relative_path())), |
263 | + (util.fsdecode(delta_path.get_relative_path())), |
264 | log.InfoCode.diff_file_changed, |
265 | util.escape(delta_path.get_relative_path())) |
266 | |
267 | @@ -202,7 +202,7 @@ |
268 | if sig_path and sig_path.exists() and sig_path.index != (): |
269 | # but signature says it did |
270 | log.Info(_("D %s") % |
271 | - (util.ufn(sig_path.get_relative_path())), |
272 | + (util.fsdecode(sig_path.get_relative_path())), |
273 | log.InfoCode.diff_file_deleted, |
274 | util.escape(sig_path.get_relative_path())) |
275 | if sigTarFile: |
276 | |
277 | === modified file 'duplicity/lazy.py' |
278 | --- duplicity/lazy.py 2017-03-02 22:38:47 +0000 |
279 | +++ duplicity/lazy.py 2017-12-21 22:26:40 +0000 |
280 | @@ -410,7 +410,7 @@ |
281 | filename = os.path.join(*self.index) # pylint: disable=not-an-iterable |
282 | else: |
283 | filename = "." |
284 | - log.Warn(_("Error '%s' processing %s") % (exc, util.ufn(filename)), |
285 | + log.Warn(_("Error '%s' processing %s") % (exc, util.fsdecode(filename)), |
286 | log.WarningCode.cannot_process, |
287 | util.escape(filename)) |
288 | |
289 | @@ -420,6 +420,6 @@ |
290 | index_str = "." |
291 | else: |
292 | index_str = os.path.join(*index) |
293 | - log.Warn(_("Skipping %s because of previous error") % util.ufn(index_str), |
294 | + log.Warn(_("Skipping %s because of previous error") % util.fsdecode(index_str), |
295 | log.WarningCode.process_skipped, |
296 | util.escape(index_str)) |
297 | |
298 | === modified file 'duplicity/patchdir.py' |
299 | --- duplicity/patchdir.py 2017-07-11 14:55:38 +0000 |
300 | +++ duplicity/patchdir.py 2017-12-21 22:26:40 +0000 |
301 | @@ -77,12 +77,12 @@ |
302 | ITR = IterTreeReducer(PathPatcher, [base_path]) |
303 | for basis_path, diff_ropath in collated: |
304 | if basis_path: |
305 | - log.Info(_("Patching %s") % (util.ufn(basis_path.get_relative_path())), |
306 | + log.Info(_("Patching %s") % (util.fsdecode(basis_path.get_relative_path())), |
307 | log.InfoCode.patch_file_patching, |
308 | util.escape(basis_path.get_relative_path())) |
309 | ITR(basis_path.index, basis_path, diff_ropath) |
310 | else: |
311 | - log.Info(_("Patching %s") % (util.ufn(diff_ropath.get_relative_path())), |
312 | + log.Info(_("Patching %s") % (util.fsdecode(diff_ropath.get_relative_path())), |
313 | log.InfoCode.patch_file_patching, |
314 | util.escape(diff_ropath.get_relative_path())) |
315 | ITR(diff_ropath.index, basis_path, diff_ropath) |
316 | @@ -165,7 +165,7 @@ |
317 | "\\2", tiname) |
318 | if num_subs != 1: |
319 | raise PatchDirException(u"Unrecognized diff entry %s" % |
320 | - util.ufn(tiname)) |
321 | + util.fsdecode(tiname)) |
322 | else: |
323 | difftype = prefix[:-1] # strip trailing / |
324 | name = tiname[len(prefix):] |
325 | @@ -175,14 +175,14 @@ |
326 | break |
327 | else: |
328 | raise PatchDirException(u"Unrecognized diff entry %s" % |
329 | - util.ufn(tiname)) |
330 | + util.fsdecode(tiname)) |
331 | if name == "." or name == "": |
332 | index = () |
333 | else: |
334 | index = tuple(name.split("/")) |
335 | if '..' in index: |
336 | raise PatchDirException(u"Tar entry %s contains '..'. Security " |
337 | - "violation" % util.ufn(tiname)) |
338 | + "violation" % util.fsdecode(tiname)) |
339 | return (index, difftype, multivol) |
340 | |
341 | |
342 | @@ -528,7 +528,7 @@ |
343 | except Exception as e: |
344 | filename = normalized[-1].get_ropath().get_relative_path() |
345 | log.Warn(_("Error '%s' patching %s") % |
346 | - (util.uexc(e), util.ufn(filename)), |
347 | + (util.uexc(e), util.fsdecode(filename)), |
348 | log.WarningCode.cannot_process, |
349 | util.escape(filename)) |
350 | |
351 | @@ -603,7 +603,7 @@ |
352 | def can_fast_process(self, index, ropath): |
353 | """Can fast process (no recursion) if ropath isn't a directory""" |
354 | log.Info(_("Writing %s of type %s") % |
355 | - (util.ufn(ropath.get_relative_path()), ropath.type), |
356 | + (util.fsdecode(ropath.get_relative_path()), ropath.type), |
357 | log.InfoCode.patch_file_writing, |
358 | "%s %s" % (util.escape(ropath.get_relative_path()), ropath.type)) |
359 | return not ropath.isdir() |
360 | |
361 | === modified file 'duplicity/path.py' |
362 | --- duplicity/path.py 2017-12-13 22:43:36 +0000 |
363 | +++ duplicity/path.py 2017-12-21 22:26:40 +0000 |
364 | @@ -92,7 +92,7 @@ |
365 | elif stat.S_ISFIFO(st_mode): |
366 | self.type = "fifo" |
367 | elif stat.S_ISSOCK(st_mode): |
368 | - raise PathException(util.ufn(self.get_relative_path()) + |
369 | + raise PathException(util.fsdecode(self.get_relative_path()) + |
370 | u"is a socket, unsupported by tar") |
371 | self.type = "sock" |
372 | elif stat.S_ISCHR(st_mode): |
373 | @@ -109,7 +109,7 @@ |
374 | os.minor(self.stat.st_rdev)) |
375 | except: |
376 | log.Warn(_("Warning: %s invalid devnums (0x%X), treating as (0, 0).") |
377 | - % (util.ufn(self.get_relative_path()), self.stat.st_rdev)) |
378 | + % (util.fsdecode(self.get_relative_path()), self.stat.st_rdev)) |
379 | self.devnums = (0, 0) |
380 | |
381 | def blank(self): |
382 | @@ -295,7 +295,7 @@ |
383 | ti.uid, ti.gid = self.stat.st_uid, self.stat.st_gid |
384 | if self.stat.st_mtime < 0: |
385 | log.Warn(_("Warning: %s has negative mtime, treating as 0.") |
386 | - % (util.ufn(self.get_relative_path()))) |
387 | + % (util.fsdecode(self.get_relative_path()))) |
388 | ti.mtime = 0 |
389 | else: |
390 | ti.mtime = int(self.stat.st_mtime) |
391 | @@ -359,7 +359,7 @@ |
392 | """ |
393 | def log_diff(log_string): |
394 | log_str = _("Difference found:") + u" " + log_string |
395 | - log.Notice(log_str % (util.ufn(self.get_relative_path()))) |
396 | + log.Notice(log_str % (util.fsdecode(self.get_relative_path()))) |
397 | |
398 | if include_data is False: |
399 | return True |
400 | |
401 | === modified file 'duplicity/selection.py' |
402 | --- duplicity/selection.py 2017-12-13 22:43:36 +0000 |
403 | +++ duplicity/selection.py 2017-12-21 22:26:40 +0000 |
404 | @@ -116,15 +116,15 @@ |
405 | try: |
406 | mode = os.stat(fullpath)[stat.ST_MODE] |
407 | if stat.S_ISSOCK(mode): |
408 | - log.Info(_("Skipping socket %s") % util.ufn(fullpath), |
409 | + log.Info(_("Skipping socket %s") % util.fsdecode(fullpath), |
410 | log.InfoCode.skipping_socket, |
411 | util.escape(fullpath)) |
412 | else: |
413 | - log.Warn(_("Error initializing file %s") % util.ufn(fullpath), |
414 | + log.Warn(_("Error initializing file %s") % util.fsdecode(fullpath), |
415 | log.WarningCode.cannot_iterate, |
416 | util.escape(fullpath)) |
417 | except OSError: |
418 | - log.Warn(_("Error accessing possibly locked file %s") % util.ufn(fullpath), |
419 | + log.Warn(_("Error accessing possibly locked file %s") % util.fsdecode(fullpath), |
420 | log.WarningCode.cannot_stat, util.escape(fullpath)) |
421 | return None |
422 | |
423 | |
424 | === modified file 'duplicity/tempdir.py' |
425 | --- duplicity/tempdir.py 2016-06-28 21:03:46 +0000 |
426 | +++ duplicity/tempdir.py 2017-12-21 22:26:40 +0000 |
427 | @@ -129,7 +129,7 @@ |
428 | temproot = _initialSystemTempRoot |
429 | self.__dir = tempfile.mkdtemp("-tempdir", "duplicity-", temproot) |
430 | |
431 | - log.Info(_("Using temporary directory %s") % util.ufn(self.__dir)) |
432 | + log.Info(_("Using temporary directory %s") % util.fsdecode(self.__dir)) |
433 | |
434 | # number of mktemp()/mkstemp() calls served so far |
435 | self.__tempcount = 0 |
436 | @@ -173,7 +173,7 @@ |
437 | suffix = "-%d" % (self.__tempcount,) |
438 | filename = tempfile.mktemp(suffix, "mktemp-", self.__dir) |
439 | |
440 | - log.Debug(_("Registering (mktemp) temporary file %s") % util.ufn(filename)) |
441 | + log.Debug(_("Registering (mktemp) temporary file %s") % util.fsdecode(filename)) |
442 | self.__pending[filename] = None |
443 | finally: |
444 | self.__lock.release() |
445 | @@ -195,7 +195,7 @@ |
446 | suffix = "-%d" % (self.__tempcount,) |
447 | fd, filename = tempfile.mkstemp(suffix, "mkstemp-", self.__dir) |
448 | |
449 | - log.Debug(_("Registering (mkstemp) temporary file %s") % util.ufn(filename)) |
450 | + log.Debug(_("Registering (mkstemp) temporary file %s") % util.fsdecode(filename)) |
451 | self.__pending[filename] = None |
452 | finally: |
453 | self.__lock.release() |
454 | @@ -227,10 +227,10 @@ |
455 | self.__lock.acquire() |
456 | try: |
457 | if fname in self.__pending: |
458 | - log.Debug(_("Forgetting temporary file %s") % util.ufn(fname)) |
459 | + log.Debug(_("Forgetting temporary file %s") % util.fsdecode(fname)) |
460 | del(self.__pending[fname]) |
461 | else: |
462 | - log.Warn(_("Attempt to forget unknown tempfile %s - this is probably a bug.") % util.ufn(fname)) |
463 | + log.Warn(_("Attempt to forget unknown tempfile %s - this is probably a bug.") % util.fsdecode(fname)) |
464 | pass |
465 | finally: |
466 | self.__lock.release() |
467 | @@ -249,16 +249,16 @@ |
468 | if self.__dir is not None: |
469 | for file in self.__pending.keys(): |
470 | try: |
471 | - log.Debug(_("Removing still remembered temporary file %s") % util.ufn(file)) |
472 | + log.Debug(_("Removing still remembered temporary file %s") % util.fsdecode(file)) |
473 | util.ignore_missing(os.unlink, file) |
474 | except Exception: |
475 | - log.Info(_("Cleanup of temporary file %s failed") % util.ufn(file)) |
476 | + log.Info(_("Cleanup of temporary file %s failed") % util.fsdecode(file)) |
477 | pass |
478 | try: |
479 | os.rmdir(self.__dir) |
480 | except Exception: |
481 | log.Warn(_("Cleanup of temporary directory %s failed - " |
482 | - "this is probably a bug.") % util.ufn(self.__dir)) |
483 | + "this is probably a bug.") % util.fsdecode(self.__dir)) |
484 | pass |
485 | self.__pending = None |
486 | self.__dir = None |
487 | |
488 | === modified file 'duplicity/util.py' |
489 | --- duplicity/util.py 2017-12-01 22:39:33 +0000 |
490 | +++ duplicity/util.py 2017-12-21 22:26:40 +0000 |
491 | @@ -59,11 +59,11 @@ |
492 | |
493 | def fsdecode(bytes_filename): |
494 | """Convert a filename encoded in the system encoding to unicode""" |
495 | - # For paths, just use path.uname rather than converting with this |
496 | + # For paths, just use path.uc_name rather than converting with this |
497 | # If we are not doing any cleverness with non-unicode filename bytes, |
498 | # decoding using system encoding is good enough. Use "ignore" as |
499 | # Linux paths can contain non-Unicode characters |
500 | - return bytes_filename.decode(globals.fsencoding, "ignore") |
501 | + return bytes_filename.decode(globals.fsencoding, "replace") |
502 | |
503 | |
504 | def exception_traceback(limit=50): |
505 | @@ -84,20 +84,14 @@ |
506 | |
507 | def escape(string): |
508 | "Convert a (bytes) filename to a format suitable for logging (quoted utf8)" |
509 | - string = ufn(string).encode('unicode-escape', 'replace') |
510 | + string = fsdecode(string).encode('unicode-escape', 'replace') |
511 | return u"'%s'" % string.decode('utf8', 'replace') |
512 | |
513 | |
514 | -def ufn(filename): |
515 | - """Convert a (bytes) filename to unicode for printing""" |
516 | - # Note: path.uc_name is preferable for paths |
517 | - return filename.decode(globals.fsencoding, "replace") |
518 | - |
519 | - |
520 | def uindex(index): |
521 | "Convert an index (a tuple of path parts) to unicode for printing" |
522 | if index: |
523 | - return os.path.join(*list(map(ufn, index))) |
524 | + return os.path.join(*list(map(fsdecode, index))) |
525 | else: |
526 | return u'.' |
527 | |
528 | @@ -107,7 +101,7 @@ |
529 | # non-ascii will cause a UnicodeDecodeError when implicitly decoding to |
530 | # unicode. So we decode manually, using the filesystem encoding. |
531 | # 99.99% of the time, this will be a fine encoding to use. |
532 | - return ufn(unicode(e).encode('utf-8')) |
533 | + return fsdecode(unicode(e).encode('utf-8')) |
534 | |
535 | |
536 | def maybe_ignore_errors(fn): |