Merge lp:~kenneth-loafman/duplicity/duplicity-pylint into lp:~duplicity-team/duplicity/0.8-series

Proposed by Kenneth Loafman on 2020-03-22
Status: Merged
Merged at revision: 1614
Proposed branch: lp:~kenneth-loafman/duplicity/duplicity-pylint
Merge into: lp:~duplicity-team/duplicity/0.8-series
Diff against target: 7171 lines (+1102/-1150)
132 files modified
.bzrignore (+1/-0)
bin/duplicity (+1/-19)
bin/duplicity.1 (+2/-2)
bin/rdiffdir (+6/-6)
compilec.py (+1/-1)
debian/rules (+1/-1)
dist/makedist (+2/-3)
docs/conf.py (+3/-2)
docs/duplicity.config.rst (+2/-2)
docs/duplicity.rst (+1/-1)
duplicity/__init__.py (+2/-2)
duplicity/asyncscheduler.py (+1/-1)
duplicity/backend.py (+8/-10)
duplicity/backends/__init__.py (+1/-1)
duplicity/backends/_boto_multi.py (+15/-15)
duplicity/backends/_boto_single.py (+19/-19)
duplicity/backends/_cf_cloudfiles.py (+2/-2)
duplicity/backends/_cf_pyrax.py (+2/-2)
duplicity/backends/adbackend.py (+5/-5)
duplicity/backends/azurebackend.py (+13/-13)
duplicity/backends/b2backend.py (+4/-5)
duplicity/backends/cfbackend.py (+4/-4)
duplicity/backends/dpbxbackend.py (+7/-8)
duplicity/backends/gdocsbackend.py (+2/-2)
duplicity/backends/giobackend.py (+12/-12)
duplicity/backends/hsibackend.py (+1/-2)
duplicity/backends/hubicbackend.py (+1/-1)
duplicity/backends/imapbackend.py (+47/-46)
duplicity/backends/jottacloudbackend.py (+3/-4)
duplicity/backends/lftpbackend.py (+10/-10)
duplicity/backends/localbackend.py (+1/-2)
duplicity/backends/mediafirebackend.py (+1/-1)
duplicity/backends/megabackend.py (+1/-5)
duplicity/backends/multibackend.py (+8/-10)
duplicity/backends/ncftpbackend.py (+4/-4)
duplicity/backends/onedrivebackend.py (+3/-3)
duplicity/backends/par2backend.py (+5/-5)
duplicity/backends/pcabackend.py (+2/-2)
duplicity/backends/pydrivebackend.py (+4/-4)
duplicity/backends/pyrax_identity/__init__.py (+1/-1)
duplicity/backends/pyrax_identity/hubic.py (+2/-1)
duplicity/backends/rclonebackend.py (+3/-7)
duplicity/backends/rsyncbackend.py (+6/-6)
duplicity/backends/s3_boto3_backend.py (+13/-14)
duplicity/backends/s3_boto_backend.py (+3/-3)
duplicity/backends/ssh_paramiko_backend.py (+8/-9)
duplicity/backends/ssh_pexpect_backend.py (+22/-23)
duplicity/backends/swiftbackend.py (+4/-4)
duplicity/backends/sxbackend.py (+1/-1)
duplicity/backends/tahoebackend.py (+2/-3)
duplicity/backends/webdavbackend.py (+7/-7)
duplicity/cached_ops.py (+1/-1)
duplicity/commandline.py (+73/-74)
duplicity/config.py (+1/-1)
duplicity/diffdir.py (+14/-14)
duplicity/dup_collections.py (+21/-24)
duplicity/dup_main.py (+172/-182)
duplicity/dup_temp.py (+7/-8)
duplicity/dup_threading.py (+3/-3)
duplicity/dup_time.py (+8/-8)
duplicity/errors.py (+1/-1)
duplicity/file_naming.py (+38/-38)
duplicity/filechunkio.py (+3/-3)
duplicity/globmatch.py (+1/-1)
duplicity/gpg.py (+19/-22)
duplicity/gpginterface.py (+3/-2)
duplicity/lazy.py (+24/-21)
duplicity/librsync.py (+5/-4)
duplicity/log.py (+1/-1)
duplicity/manifest.py (+15/-16)
duplicity/patchdir.py (+14/-17)
duplicity/path.py (+29/-30)
duplicity/progress.py (+14/-14)
duplicity/robust.py (+3/-3)
duplicity/selection.py (+11/-13)
duplicity/statistics.py (+1/-1)
duplicity/tarfile.py (+1/-1)
duplicity/tempdir.py (+5/-5)
duplicity/util.py (+10/-10)
po/POTFILES.in (+1/-1)
po/duplicity.pot (+1/-1)
pylintrc (+7/-0)
setup.py (+1/-2)
testing/__init__.py (+11/-11)
testing/conftest.py (+2/-0)
testing/docker/duplicity_test/Dockerfile-18.04 (+1/-1)
testing/docker/duplicity_test/Dockerfile-18.10 (+1/-1)
testing/docker/duplicity_test/Dockerfile-19.04 (+1/-1)
testing/docker/duplicity_test/Dockerfile-19.10 (+1/-1)
testing/docker/duplicity_test/Dockerfile-20.04 (+1/-1)
testing/docker/ftp_server/Dockerfile (+1/-1)
testing/docker/ssh_server/Dockerfile (+1/-1)
testing/find_unadorned_strings.py (+1/-1)
testing/fix_unadorned_strings.py (+1/-1)
testing/functional/__init__.py (+3/-3)
testing/functional/test_badupload.py (+1/-1)
testing/functional/test_cleanup.py (+1/-1)
testing/functional/test_final.py (+2/-2)
testing/functional/test_log.py (+2/-2)
testing/functional/test_rdiffdir.py (+1/-1)
testing/functional/test_replicate.py (+2/-5)
testing/functional/test_restart.py (+1/-1)
testing/functional/test_selection.py (+1/-1)
testing/functional/test_verify.py (+1/-1)
testing/manual/__init__.py (+20/-0)
testing/manual/backendtest.py (+86/-91)
testing/manual/roottest.py (+36/-36)
testing/manual/test_config.py.tmpl (+2/-4)
testing/overrides/__init__.py (+2/-0)
testing/overrides/gettext.py (+2/-2)
testing/test_code.py (+2/-2)
testing/unit/__init__.py (+1/-1)
testing/unit/test_backend.py (+19/-20)
testing/unit/test_backend_instance.py (+3/-5)
testing/unit/test_collections.py (+18/-20)
testing/unit/test_diffdir.py (+10/-11)
testing/unit/test_dup_temp.py (+1/-1)
testing/unit/test_dup_time.py (+6/-8)
testing/unit/test_file_naming.py (+16/-16)
testing/unit/test_globmatch.py (+3/-4)
testing/unit/test_gpg.py (+5/-6)
testing/unit/test_gpginterface.py (+7/-7)
testing/unit/test_lazy.py (+8/-6)
testing/unit/test_manifest.py (+7/-10)
testing/unit/test_patchdir.py (+8/-7)
testing/unit/test_path.py (+3/-4)
testing/unit/test_selection.py (+4/-6)
testing/unit/test_statistics.py (+3/-4)
testing/unit/test_tarfile.py (+1/-1)
testing/unit/test_tempdir.py (+2/-2)
testing/unit/test_util.py (+1/-0)
tox.ini (+1/-0)
To merge this branch: bzr merge lp:~kenneth-loafman/duplicity/duplicity-pylint
Reviewer Review Type Date Requested Status
duplicity-team 2020-03-22 Pending
Review via email: mp+381005@code.launchpad.net

Commit message

 * Enable additional pylint warnings. Make 1st pass at correction.
  - unused-argument,
    unused-wildcard-import,
    redefined-builtin,
    bad-indentation,
    mixed-indentation,
    unreachable
* Resolved conflict between duplicity.config and testing.manual.config
* Normalized emacs mode line to have encoding:utf8 on all *.py files

To post a comment you must log in.

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file '.bzrignore'
2--- .bzrignore 2020-03-19 19:05:19 +0000
3+++ .bzrignore 2020-03-22 12:35:54 +0000
4@@ -27,3 +27,4 @@
5 testing/gnupg/.gpg-v21-migrated
6 testing/gnupg/S.*
7 testing/gnupg/private-keys-v1.d
8+testing/manual/test_config.py
9
10=== added directory '.dbeaver'
11=== modified file 'bin/duplicity'
12--- bin/duplicity 2020-02-06 15:27:43 +0000
13+++ bin/duplicity 2020-03-22 12:35:54 +0000
14@@ -1,5 +1,5 @@
15 #!/usr/bin/env python3
16-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
17+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
18 #
19 # duplicity -- Encrypted bandwidth efficient backup
20 #
21@@ -29,27 +29,9 @@
22 from __future__ import print_function
23 from future import standard_library
24 standard_library.install_aliases()
25-from builtins import filter
26-from builtins import map
27-from builtins import next
28-from builtins import object
29-from builtins import range
30
31-import copy
32-import fasteners
33-import gzip
34 import os
35-import platform
36-import re
37-import resource
38 import sys
39-import time
40-import traceback
41-import types
42-
43-
44-from datetime import datetime
45-from os import statvfs
46
47 from duplicity.dup_main import main
48 import duplicity.errors
49
50=== modified file 'bin/duplicity.1'
51--- bin/duplicity.1 2020-03-06 21:25:13 +0000
52+++ bin/duplicity.1 2020-03-22 12:35:54 +0000
53@@ -603,9 +603,9 @@
54
55 file_blocksize = int((file_len / (2000 * 512)) * 512)
56 .br
57-return min(file_blocksize, globals.max_blocksize)
58+return min(file_blocksize, config.max_blocksize)
59
60-where globals.max_blocksize defaults to 2048.
61+where config.max_blocksize defaults to 2048.
62 If you specify a larger max_blocksize, your difftar files will be larger, but your sigtar files will be smaller.
63 If you specify a smaller max_blocksize, the reverse occurs.
64 The --max-blocksize option should be in multiples of 512.
65
66=== modified file 'bin/rdiffdir'
67--- bin/rdiffdir 2020-02-06 15:27:43 +0000
68+++ bin/rdiffdir 2020-03-22 12:35:54 +0000
69@@ -1,5 +1,5 @@
70 #!/usr/bin/env python3
71-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
72+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
73 #
74 # rdiffdir -- Extend rdiff functionality to directories
75 #
76@@ -36,7 +36,7 @@
77 from duplicity import diffdir
78 from duplicity import patchdir
79 from duplicity import log
80-from duplicity import globals
81+from duplicity import config
82 from duplicity import selection
83 from duplicity import path
84 from duplicity import util
85@@ -96,18 +96,18 @@
86 select_opts.append((u"--include-filelist", u"standard input"))
87 select_files.append(sys.stdin)
88 elif opt == u"--max-blocksize":
89- globals.max_blocksize = int(arg)
90+ config.max_blocksize = int(arg)
91 elif opt == u"--null-separator":
92- globals.null_separator = 1
93+ config.null_separator = 1
94 elif opt == u"-V":
95- print(u"rdiffdir", str(globals.version))
96+ print(u"rdiffdir", str(config.version))
97 sys.exit(0)
98 elif opt == u"-v" or opt == u"--verbosity":
99 log.setverbosity(int(arg))
100 elif opt == u"--write-sig-to" or opt == u"--write-signature-to":
101 sig_fileobj = get_fileobj(arg, u"wb")
102 elif opt == u"--ignore-errors":
103- globals.ignore_errors = 1
104+ config.ignore_errors = 1
105 else:
106 command_line_error(u"Unknown option %s" % opt)
107
108
109=== modified file 'compilec.py'
110--- compilec.py 2019-12-28 21:26:47 +0000
111+++ compilec.py 2020-03-22 12:35:54 +0000
112@@ -1,5 +1,5 @@
113 #!/usr/bin/env python3
114-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
115+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
116 #
117 # Copyright 2002 Ben Escoto <ben@emerose.org>
118 # Copyright 2007 Kenneth Loafman <kenneth@loafman.com>
119
120=== modified file 'debian/rules'
121--- debian/rules 2020-01-06 17:38:21 +0000
122+++ debian/rules 2020-03-22 12:35:54 +0000
123@@ -1,5 +1,5 @@
124 #!/usr/bin/make -f
125-# -*- Mode:Makefile; indent-tabs-mode:t; tab-width:4 -*-
126+# -*- Mode:Makefile; indent-tabs-mode:t; tab-width:4; encoding:utf8 -*-
127
128 # Old versions of dpkg-parsechangelog don't support -SVersion
129 UPSTREAM_VERSION=$(shell dpkg-parsechangelog | grep ^Version: | cut -d' ' -f2 | cut -d- -f1)
130
131=== modified file 'dist/makedist'
132--- dist/makedist 2020-03-07 15:55:26 +0000
133+++ dist/makedist 2020-03-22 12:35:54 +0000
134@@ -1,5 +1,5 @@
135 #!/usr/bin/env python3
136-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
137+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
138 #
139 # Copyright 2002 Ben Escoto <ben@emerose.org>
140 # Copyright 2007 Kenneth Loafman <kenneth@loafman.com>
141@@ -29,11 +29,10 @@
142 import shutil
143 import time
144 import sys
145-from subprocess import Popen, PIPE, STDOUT
146+from subprocess import Popen, PIPE
147
148 sys.path.insert(0, os.path.abspath(u"./"))
149
150-from duplicity import util
151 from duplicity import __version__
152
153 bzr = Popen([u"bzr", u"revno"], stdout=PIPE, universal_newlines=True)
154
155=== modified file 'docs/conf.py'
156--- docs/conf.py 2020-02-06 16:04:00 +0000
157+++ docs/conf.py 2020-03-22 12:35:54 +0000
158@@ -1,3 +1,5 @@
159+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
160+#
161 # Configuration file for the Sphinx documentation builder.
162 #
163 # This file only contains a selection of the most common options. For a full
164@@ -9,7 +11,6 @@
165 # If extensions (or modules to document with autodoc) are in another directory,
166 # add these directories to sys.path here. If the directory is relative to the
167 # documentation root, use os.path.abspath to make it absolute, like shown here.
168-import os
169 import sys
170 sys.path.insert(0, '/Users/ken/workspace/duplicity-src8/testing')
171 sys.path.insert(0, '/Users/ken/workspace/duplicity-src8/duplicity')
172@@ -20,7 +21,7 @@
173 # -- Project information -----------------------------------------------------
174
175 project = 'duplicity'
176-copyright = '2020, Kenneth Loafman'
177+copyright = '2020, Kenneth Loafman' # pylint: disable=redefined-builtin
178 author = 'Kenneth Loafman'
179
180
181
182=== renamed file 'docs/duplicity.globals.rst' => 'docs/duplicity.config.rst'
183--- docs/duplicity.globals.rst 2020-02-01 21:33:23 +0000
184+++ docs/duplicity.config.rst 2020-03-22 12:35:54 +0000
185@@ -1,7 +1,7 @@
186-duplicity.globals module
187+duplicity.config module
188 ========================
189
190-.. automodule:: duplicity.globals
191+.. automodule:: duplicity.config
192 :members:
193 :undoc-members:
194 :show-inheritance:
195
196=== modified file 'docs/duplicity.rst'
197--- docs/duplicity.rst 2020-02-01 21:33:23 +0000
198+++ docs/duplicity.rst 2020-03-22 12:35:54 +0000
199@@ -17,6 +17,7 @@
200 duplicity.backend
201 duplicity.cached_ops
202 duplicity.commandline
203+ duplicity.config
204 duplicity.diffdir
205 duplicity.dup_collections
206 duplicity.dup_main
207@@ -26,7 +27,6 @@
208 duplicity.errors
209 duplicity.file_naming
210 duplicity.filechunkio
211- duplicity.globals
212 duplicity.globmatch
213 duplicity.gpg
214 duplicity.gpginterface
215
216=== modified file 'duplicity/__init__.py'
217--- duplicity/__init__.py 2020-03-19 19:07:30 +0000
218+++ duplicity/__init__.py 2020-03-22 12:35:54 +0000
219@@ -1,4 +1,4 @@
220-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
221+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
222 #
223 # Copyright 2002 Ben Escoto <ben@emerose.org>
224 # Copyright 2007 Kenneth Loafman <kenneth@loafman.com>
225@@ -27,4 +27,4 @@
226 if sys.version_info.major >= 3:
227 gettext.install(u'duplicity', names=[u'ngettext'])
228 else:
229- gettext.install(u'duplicity', names=[u'ngettext'], unicode=True)
230+ gettext.install(u'duplicity', names=[u'ngettext'], unicode=True) # pylint: disable=unexpected-keyword-arg
231
232=== modified file 'duplicity/asyncscheduler.py'
233--- duplicity/asyncscheduler.py 2020-01-10 16:35:50 +0000
234+++ duplicity/asyncscheduler.py 2020-03-22 12:35:54 +0000
235@@ -1,4 +1,4 @@
236-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
237+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
238 #
239 # Copyright 2002 Ben Escoto <ben@emerose.org>
240 # Copyright 2007 Kenneth Loafman <kenneth@loafman.com>
241
242=== modified file 'duplicity/backend.py'
243--- duplicity/backend.py 2020-01-21 13:24:32 +0000
244+++ duplicity/backend.py 2020-03-22 12:35:54 +0000
245@@ -1,4 +1,4 @@
246-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
247+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
248 #
249 # Copyright 2002 Ben Escoto <ben@emerose.org>
250 # Copyright 2007 Kenneth Loafman <kenneth@loafman.com>
251@@ -37,17 +37,15 @@
252 import re
253 import getpass
254 import re
255-import types
256 import urllib.request # pylint: disable=import-error
257 import urllib.parse # pylint: disable=import-error
258 import urllib.error # pylint: disable=import-error
259
260 from duplicity import dup_temp
261 from duplicity import file_naming
262-from duplicity import globals
263+from duplicity import config
264 from duplicity import log
265 from duplicity import path
266-from duplicity import progress
267 from duplicity import util
268
269 from duplicity.util import exception_traceback
270@@ -220,7 +218,7 @@
271
272 Raise InvalidBackendURL if the URL is not a valid URL.
273 """
274- if globals.use_gio:
275+ if config.use_gio:
276 url_string = u'gio+' + url_string
277 obj = get_backend_object(url_string)
278 if obj:
279@@ -370,8 +368,8 @@
280 def outer_retry(fn):
281 def inner_retry(self, *args):
282 global _last_exception
283- errors_fatal, errors_default = globals.are_errors_fatal.get(operation, (True, None))
284- for n in range(1, globals.num_retries + 1):
285+ errors_fatal, errors_default = config.are_errors_fatal.get(operation, (True, None))
286+ for n in range(1, config.num_retries + 1):
287 try:
288 return fn(self, *args)
289 except FatalBackendException as e:
290@@ -391,7 +389,7 @@
291 # retry on anything else
292 log.Debug(_(u"Backtrace of previous error: %s")
293 % exception_traceback())
294- at_end = n == globals.num_retries
295+ at_end = n == config.num_retries
296 code = _get_code_from_exception(self.backend, operation, e)
297 if code == log.ErrorCode.backend_not_found:
298 # If we tried to do something, but the file just isn't there,
299@@ -413,9 +411,9 @@
300 % (n, e.__class__.__name__, util.uexc(e)))
301 if not at_end:
302 if isinstance(e, TemporaryLoadException):
303- time.sleep(3 * globals.backend_retry_delay) # wait longer before trying again
304+ time.sleep(3 * config.backend_retry_delay) # wait longer before trying again
305 else:
306- time.sleep(globals.backend_retry_delay) # wait a bit before trying again
307+ time.sleep(config.backend_retry_delay) # wait a bit before trying again
308 if hasattr(self.backend, u'_retry_cleanup'):
309 self.backend._retry_cleanup()
310
311
312=== modified file 'duplicity/backends/__init__.py'
313--- duplicity/backends/__init__.py 2018-07-23 14:55:39 +0000
314+++ duplicity/backends/__init__.py 2020-03-22 12:35:54 +0000
315@@ -1,4 +1,4 @@
316-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
317+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
318 #
319 # Copyright 2002 Ben Escoto <ben@emerose.org>
320 # Copyright 2007 Kenneth Loafman <kenneth@loafman.com>
321
322=== modified file 'duplicity/backends/_boto_multi.py'
323--- duplicity/backends/_boto_multi.py 2020-03-03 12:46:33 +0000
324+++ duplicity/backends/_boto_multi.py 2020-03-22 12:35:54 +0000
325@@ -1,4 +1,4 @@
326-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
327+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
328 #
329 # Copyright 2002 Ben Escoto <ben@emerose.org>
330 # Copyright 2007 Kenneth Loafman <kenneth@loafman.com>
331@@ -34,10 +34,10 @@
332 import time
333 import traceback
334
335-from duplicity import globals
336+from duplicity import config
337 from duplicity import log
338 from duplicity import progress
339-from duplicity.errors import * # @UnusedWildImport
340+from duplicity.errors import * # pylint: disable=unused-wildcard-import
341 from duplicity.filechunkio import FileChunkIO
342
343 from ._boto_single import BotoBackend as BotoSingleBackend
344@@ -101,7 +101,7 @@
345 self._setup_pool()
346
347 def _setup_pool(self):
348- number_of_procs = globals.s3_multipart_max_procs
349+ number_of_procs = config.s3_multipart_max_procs
350 if not number_of_procs:
351 number_of_procs = psutil.cpu_count(logical=False)
352
353@@ -124,16 +124,16 @@
354 def upload(self, filename, key, headers=None):
355 import boto # pylint: disable=import-error
356
357- chunk_size = globals.s3_multipart_chunk_size
358+ chunk_size = config.s3_multipart_chunk_size
359
360 # Check minimum chunk size for S3
361- if chunk_size < globals.s3_multipart_minimum_chunk_size:
362+ if chunk_size < config.s3_multipart_minimum_chunk_size:
363 log.Warn(u"Minimum chunk size is %d, but %d specified." % (
364- globals.s3_multipart_minimum_chunk_size, chunk_size))
365- chunk_size = globals.s3_multipart_minimum_chunk_size
366+ config.s3_multipart_minimum_chunk_size, chunk_size))
367+ chunk_size = config.s3_multipart_minimum_chunk_size
368
369 # Decide in how many chunks to upload
370- bytes = os.path.getsize(filename)
371+ bytes = os.path.getsize(filename) # pylint: disable=redefined-builtin
372 if bytes < chunk_size:
373 chunks = 1
374 else:
375@@ -143,12 +143,12 @@
376
377 log.Debug(u"Uploading %d bytes in %d chunks" % (bytes, chunks))
378
379- mp = self.bucket.initiate_multipart_upload(key.key, headers, encrypt_key=globals.s3_use_sse)
380+ mp = self.bucket.initiate_multipart_upload(key.key, headers, encrypt_key=config.s3_use_sse)
381
382 # Initiate a queue to share progress data between the pool
383 # workers and a consumer thread, that will collect and report
384 queue = None
385- if globals.progress:
386+ if config.progress:
387 manager = multiprocessing.Manager()
388 queue = manager.Queue()
389 consumer = ConsumerThread(queue, bytes)
390@@ -157,14 +157,14 @@
391 for n in range(chunks):
392 storage_uri = boto.storage_uri(self.boto_uri_str)
393 params = [self.scheme, self.parsed_url, storage_uri, self.bucket_name,
394- mp.id, filename, n, chunk_size, globals.num_retries,
395+ mp.id, filename, n, chunk_size, config.num_retries,
396 queue]
397 tasks.append(self._pool.apply_async(multipart_upload_worker, params))
398
399 log.Debug(u"Waiting for the pool to finish processing %s tasks" % len(tasks))
400 while tasks:
401 try:
402- tasks[0].wait(timeout=globals.s3_multipart_max_timeout)
403+ tasks[0].wait(timeout=config.s3_multipart_max_timeout)
404 if tasks[0].ready():
405 if tasks[0].successful():
406 del tasks[0]
407@@ -183,7 +183,7 @@
408 log.Debug(u"Done waiting for the pool to finish processing")
409
410 # Terminate the consumer thread, if any
411- if globals.progress:
412+ if config.progress:
413 consumer.finish = True
414 consumer.join()
415
416@@ -195,7 +195,7 @@
417
418
419 def multipart_upload_worker(scheme, parsed_url, storage_uri, bucket_name, multipart_id,
420- filename, offset, bytes, num_retries, queue):
421+ filename, offset, bytes, num_retries, queue): # pylint: disable=redefined-builtin
422 u"""
423 Worker method for uploading a file chunk to S3 using multipart upload.
424 Note that the file chunk is read into memory, so it's important to keep
425
426=== modified file 'duplicity/backends/_boto_single.py'
427--- duplicity/backends/_boto_single.py 2019-11-09 22:22:05 +0000
428+++ duplicity/backends/_boto_single.py 2020-03-22 12:35:54 +0000
429@@ -1,4 +1,4 @@
430-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
431+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
432 #
433 # Copyright 2002 Ben Escoto <ben@emerose.org>
434 # Copyright 2007 Kenneth Loafman <kenneth@loafman.com>
435@@ -25,7 +25,7 @@
436 import time
437
438 import duplicity.backend
439-from duplicity import globals
440+from duplicity import config
441 from duplicity import log
442 from duplicity.errors import FatalBackendException, BackendException
443 from duplicity import progress
444@@ -72,7 +72,7 @@
445 cfs_supported = False
446 calling_format = None
447
448- if globals.s3_use_new_style:
449+ if config.s3_use_new_style:
450 if cfs_supported:
451 calling_format = SubdomainCallingFormat()
452 else:
453@@ -95,11 +95,11 @@
454
455 if not parsed_url.hostname:
456 # Use the default host.
457- conn = storage_uri.connect(is_secure=(not globals.s3_unencrypted_connection))
458+ conn = storage_uri.connect(is_secure=(not config.s3_unencrypted_connection))
459 else:
460 assert scheme == u's3'
461 conn = storage_uri.connect(host=parsed_url.hostname, port=parsed_url.port,
462- is_secure=(not globals.s3_unencrypted_connection))
463+ is_secure=(not config.s3_unencrypted_connection))
464
465 if hasattr(conn, u'calling_format'):
466 if calling_format is None:
467@@ -166,7 +166,7 @@
468 # boto uses scheme://bucket[/name] and specifies hostname on connect()
469 self.boto_uri_str = u'://'.join((parsed_url.scheme[:2],
470 parsed_url.path.lstrip(u'/')))
471- if globals.s3_european_buckets:
472+ if config.s3_european_buckets:
473 self.my_location = Location.EU
474 else:
475 self.my_location = u''
476@@ -206,8 +206,8 @@
477 def _put(self, source_path, remote_filename):
478 remote_filename = util.fsdecode(remote_filename)
479
480- if globals.s3_european_buckets:
481- if not globals.s3_use_new_style:
482+ if config.s3_european_buckets:
483+ if not config.s3_use_new_style:
484 raise FatalBackendException(u"European bucket creation was requested, but not new-style "
485 u"bucket addressing (--s3-use-new-style)",
486 code=log.ErrorCode.s3_bucket_not_style)
487@@ -224,25 +224,25 @@
488
489 key = self.bucket.new_key(self.key_prefix + remote_filename)
490
491- if globals.s3_use_rrs:
492+ if config.s3_use_rrs:
493 storage_class = u'REDUCED_REDUNDANCY'
494- elif globals.s3_use_ia:
495+ elif config.s3_use_ia:
496 storage_class = u'STANDARD_IA'
497- elif globals.s3_use_onezone_ia:
498+ elif config.s3_use_onezone_ia:
499 storage_class = u'ONEZONE_IA'
500- elif globals.s3_use_glacier and u"manifest" not in remote_filename:
501+ elif config.s3_use_glacier and u"manifest" not in remote_filename:
502 storage_class = u'GLACIER'
503 else:
504 storage_class = u'STANDARD'
505 log.Info(u"Uploading %s/%s to %s Storage" % (self.straight_url, remote_filename, storage_class))
506- if globals.s3_use_sse:
507+ if config.s3_use_sse:
508 headers = {
509 u'Content-Type': u'application/octet-stream',
510 u'x-amz-storage-class': storage_class,
511 u'x-amz-server-side-encryption': u'AES256'
512 }
513- elif globals.s3_use_sse_kms:
514- if globals.s3_kms_key_id is None:
515+ elif config.s3_use_sse_kms:
516+ if config.s3_kms_key_id is None:
517 raise FatalBackendException(u"S3 USE SSE KMS was requested, but key id not provided "
518 u"require (--s3-kms-key-id)",
519 code=log.ErrorCode.s3_kms_no_id)
520@@ -250,10 +250,10 @@
521 u'Content-Type': u'application/octet-stream',
522 u'x-amz-storage-class': storage_class,
523 u'x-amz-server-side-encryption': u'aws:kms',
524- u'x-amz-server-side-encryption-aws-kms-key-id': globals.s3_kms_key_id
525+ u'x-amz-server-side-encryption-aws-kms-key-id': config.s3_kms_key_id
526 }
527- if globals.s3_kms_grant is not None:
528- headers[u'x-amz-grant-full-control'] = globals.s3_kms_grant
529+ if config.s3_kms_grant is not None:
530+ headers[u'x-amz-grant-full-control'] = config.s3_kms_grant
531 else:
532 headers = {
533 u'Content-Type': u'application/octet-stream',
534@@ -317,7 +317,7 @@
535 def upload(self, filename, key, headers):
536 key.set_contents_from_filename(filename, headers,
537 cb=progress.report_transfer,
538- num_cb=(max(2, 8 * globals.volsize / (1024 * 1024)))
539+ num_cb=(max(2, 8 * config.volsize / (1024 * 1024)))
540 ) # Max num of callbacks = 8 times x megabyte
541 key.close()
542
543
544=== modified file 'duplicity/backends/_cf_cloudfiles.py'
545--- duplicity/backends/_cf_cloudfiles.py 2018-11-29 19:00:15 +0000
546+++ duplicity/backends/_cf_cloudfiles.py 2020-03-22 12:35:54 +0000
547@@ -1,4 +1,4 @@
548-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
549+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
550 #
551 # Copyright 2009 Eric EJ Johnson <ej.johnson@rackspace.com>
552 #
553@@ -70,7 +70,7 @@
554 log.ErrorCode.connection_failed)
555 self.container = conn.create_container(container)
556
557- def _error_code(self, operation, e):
558+ def _error_code(self, operation, e): # pylint: disable=unused-argument
559 if isinstance(e, NoSuchObject):
560 return log.ErrorCode.backend_not_found
561 elif isinstance(e, self.resp_exc):
562
563=== modified file 'duplicity/backends/_cf_pyrax.py'
564--- duplicity/backends/_cf_pyrax.py 2018-11-29 19:00:15 +0000
565+++ duplicity/backends/_cf_pyrax.py 2020-03-22 12:35:54 +0000
566@@ -1,4 +1,4 @@
567-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
568+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
569 #
570 # Copyright 2013 J.P. Krauss <jkrauss@asymworks.com>
571 #
572@@ -91,7 +91,7 @@
573 u"Please check your credentials and permissions.",
574 log.ErrorCode.backend_permission_denied)
575
576- def _error_code(self, operation, e):
577+ def _error_code(self, operation, e): # pylint: disable=unused-argument
578 if isinstance(e, self.nso_exc):
579 return log.ErrorCode.backend_not_found
580 elif isinstance(e, self.client_exc):
581
582=== modified file 'duplicity/backends/adbackend.py'
583--- duplicity/backends/adbackend.py 2019-08-08 19:31:58 +0000
584+++ duplicity/backends/adbackend.py 2020-03-22 12:35:54 +0000
585@@ -1,4 +1,4 @@
586-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
587+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
588 #
589 # Copyright 2016 Stefan Breunig <stefan-duplicity@breunig.xyz>
590 # Based on the backend onedrivebackend.py
591@@ -31,7 +31,7 @@
592
593 import duplicity.backend
594 from duplicity.errors import BackendException
595-from duplicity import globals
596+from duplicity import config
597 from duplicity import log
598
599
600@@ -67,7 +67,7 @@
601 self.backup_target_id = None
602 self.backup_target = parsed_url.path.lstrip(u'/')
603
604- if globals.volsize > (10 * 1024 * 1024 * 1024):
605+ if config.volsize > (10 * 1024 * 1024 * 1024):
606 # https://forums.developer.amazon.com/questions/22713/file-size-limits.html
607 # https://forums.developer.amazon.com/questions/22038/support-for-chunked-transfer-encoding.html
608 log.FatalError(
609@@ -320,8 +320,8 @@
610 log.Info(u'%s upload failed with timeout status code=%d. Speculatively '
611 u'waiting for %d seconds to see if Amazon Drive finished the '
612 u'upload anyway' % (remote_filename, response.status_code,
613- globals.timeout))
614- tries = globals.timeout / 15
615+ config.timeout))
616+ tries = config.timeout / 15
617 while tries >= 0:
618 tries -= 1
619 time.sleep(15)
620
621=== modified file 'duplicity/backends/azurebackend.py'
622--- duplicity/backends/azurebackend.py 2019-08-21 12:49:40 +0000
623+++ duplicity/backends/azurebackend.py 2020-03-22 12:35:54 +0000
624@@ -1,4 +1,4 @@
625-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
626+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
627 #
628 # Copyright 2013 Matthieu Huin <mhu@enovance.com>
629 # Copyright 2015 Scott McKenzie <noizyland@gmail.com>
630@@ -23,7 +23,7 @@
631 import os
632
633 import duplicity.backend
634-from duplicity import globals
635+from duplicity import config
636 from duplicity import log
637 from duplicity.errors import BackendException
638 from duplicity.util import fsdecode
639@@ -86,23 +86,23 @@
640 raise BackendException(
641 u'Neither AZURE_ACCOUNT_KEY nor AZURE_SHARED_ACCESS_SIGNATURE environment variable not set.')
642
643- if globals.azure_max_single_put_size:
644+ if config.azure_max_single_put_size:
645 # check if we use azure-storage>=0.30.0
646 try:
647 _ = self.blob_service.MAX_SINGLE_PUT_SIZE
648- self.blob_service.MAX_SINGLE_PUT_SIZE = globals.azure_max_single_put_size
649+ self.blob_service.MAX_SINGLE_PUT_SIZE = config.azure_max_single_put_size
650 # fallback for azure-storage<0.30.0
651 except AttributeError:
652- self.blob_service._BLOB_MAX_DATA_SIZE = globals.azure_max_single_put_size
653+ self.blob_service._BLOB_MAX_DATA_SIZE = config.azure_max_single_put_size
654
655- if globals.azure_max_block_size:
656+ if config.azure_max_block_size:
657 # check if we use azure-storage>=0.30.0
658 try:
659 _ = self.blob_service.MAX_BLOCK_SIZE
660- self.blob_service.MAX_BLOCK_SIZE = globals.azure_max_block_size
661+ self.blob_service.MAX_BLOCK_SIZE = config.azure_max_block_size
662 # fallback for azure-storage<0.30.0
663 except AttributeError:
664- self.blob_service._BLOB_MAX_CHUNK_DATA_SIZE = globals.azure_max_block_size
665+ self.blob_service._BLOB_MAX_CHUNK_DATA_SIZE = config.azure_max_block_size
666
667 def _create_container(self):
668 try:
669@@ -118,8 +118,8 @@
670 def _put(self, source_path, remote_filename):
671 remote_filename = fsdecode(remote_filename)
672 kwargs = {}
673- if globals.azure_max_connections:
674- kwargs[u'max_connections'] = globals.azure_max_connections
675+ if config.azure_max_connections:
676+ kwargs[u'max_connections'] = config.azure_max_connections
677
678 # https://azure.microsoft.com/en-us/documentation/articles/storage-python-how-to-use-blob-storage/#upload-a-blob-into-a-container
679 try:
680@@ -130,9 +130,9 @@
681 self._set_tier(remote_filename)
682
683 def _set_tier(self, remote_filename):
684- if globals.azure_blob_tier is not None:
685+ if config.azure_blob_tier is not None:
686 try:
687- self.blob_service.set_standard_blob_tier(self.container, remote_filename, globals.azure_blob_tier)
688+ self.blob_service.set_standard_blob_tier(self.container, remote_filename, config.azure_blob_tier)
689 except AttributeError: # might not be available in old API
690 pass
691
692@@ -165,7 +165,7 @@
693 info = {u'size': int(prop[u'content-length'])}
694 return info
695
696- def _error_code(self, operation, e):
697+ def _error_code(self, operation, e): # pylint: disable=unused-argument
698 if isinstance(e, self.AzureMissingResourceError):
699 return log.ErrorCode.backend_not_found
700
701
702=== modified file 'duplicity/backends/b2backend.py'
703--- duplicity/backends/b2backend.py 2019-11-16 17:15:49 +0000
704+++ duplicity/backends/b2backend.py 2020-03-22 12:35:54 +0000
705@@ -1,3 +1,4 @@
706+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
707 #
708 # Copyright (c) 2015 Matthew Bentley
709 #
710@@ -22,19 +23,17 @@
711 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
712 # THE SOFTWARE.
713
714-from builtins import object
715 from future import standard_library
716 standard_library.install_aliases()
717+from builtins import object
718
719-import os
720-import hashlib
721 from urllib.parse import quote_plus # pylint: disable=import-error
722
723-import duplicity.backend
724-from duplicity.errors import BackendException, FatalBackendException
725 from duplicity import log
726 from duplicity import progress
727 from duplicity import util
728+from duplicity.errors import BackendException, FatalBackendException
729+import duplicity.backend
730
731
732 class B2ProgressListener(object):
733
734=== modified file 'duplicity/backends/cfbackend.py'
735--- duplicity/backends/cfbackend.py 2018-07-23 14:55:39 +0000
736+++ duplicity/backends/cfbackend.py 2020-03-22 12:35:54 +0000
737@@ -1,4 +1,4 @@
738-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
739+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
740 #
741 # Copyright 2013 Kenneth Loafman
742 #
743@@ -19,10 +19,10 @@
744 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
745
746 import duplicity.backend
747-from duplicity import globals
748+from duplicity import config
749
750-if (globals.cf_backend and
751- globals.cf_backend.lower().strip() == u'pyrax'):
752+if (config.cf_backend and
753+ config.cf_backend.lower().strip() == u'pyrax'):
754 from ._cf_pyrax import PyraxBackend as CFBackend
755 else:
756 from ._cf_cloudfiles import CloudFilesBackend as CFBackend
757
758=== modified file 'duplicity/backends/dpbxbackend.py'
759--- duplicity/backends/dpbxbackend.py 2019-08-08 19:31:58 +0000
760+++ duplicity/backends/dpbxbackend.py 2020-03-22 12:35:54 +0000
761@@ -1,4 +1,4 @@
762-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
763+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
764 #
765 # Copyright 2013 jno <jno@pisem.net>
766 # Copyright 2016 Dmitry Nezhevenko <dion@dion.org.ua>
767@@ -41,11 +41,10 @@
768 import urllib.parse # pylint: disable=import-error
769 import urllib.error # pylint: disable=import-error
770
771-from duplicity import log, globals
772+from duplicity import log, config
773 from duplicity import progress
774 from duplicity.errors import BackendException
775-from duplicity.globals import num_retries
776-from requests.exceptions import ConnectionError
777+from requests.exceptions import ConnectionError # pylint: disable=redefined-builtin
778 import duplicity.backend
779
780 # This is chunk size for upload using Dpbx chumked API v2. It doesn't
781@@ -71,7 +70,7 @@
782 f.close()
783
784
785-def command(login_required=True):
786+def command(login_required=True): # pylint: disable=unused-argument
787 u"""a decorator for handling authentication and exceptions"""
788 def decorate(f):
789 def wrapper(self, *args):
790@@ -194,7 +193,7 @@
791 log.Info(u"dpbx: Successfully authenticated as %s" %
792 self.api_account.name.display_name)
793
794- def _error_code(self, operation, e):
795+ def _error_code(self, operation, e): # pylint: disable=unused-argument
796 if isinstance(e, ApiError):
797 err = e.error
798
799@@ -269,7 +268,7 @@
800
801 requested_offset = None
802 current_chunk_size = DPBX_UPLOAD_CHUNK_SIZE
803- retry_number = globals.num_retries
804+ retry_number = config.num_retries
805 is_eof = False
806
807 # We're doing our own error handling and retrying logic because
808@@ -291,7 +290,7 @@
809 # reset temporary status variables
810 requested_offset = None
811 current_chunk_size = DPBX_UPLOAD_CHUNK_SIZE
812- retry_number = globals.num_retries
813+ retry_number = config.num_retries
814
815 if not is_eof:
816 assert len(buf) != 0
817
818=== modified file 'duplicity/backends/gdocsbackend.py'
819--- duplicity/backends/gdocsbackend.py 2020-01-16 13:38:32 +0000
820+++ duplicity/backends/gdocsbackend.py 2020-03-22 12:35:54 +0000
821@@ -1,4 +1,4 @@
822-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
823+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
824 #
825 # Copyright 2011 Carlos Abalde <carlos.abalde@gmail.com>
826 #
827@@ -155,7 +155,7 @@
828 u'http://www.google.com/support/accounts/bin/static.py?page=guide.cs&guide=1056283&topic=1056286 '
829 u'and create your application-specific password to run duplicity backups.')
830
831- def _fetch_entries(self, folder_id, type, title=None):
832+ def _fetch_entries(self, folder_id, type, title=None): # pylint: disable=redefined-builtin
833 # Build URI.
834 uri = u'/feeds/default/private/full/%s/contents' % folder_id
835 if type == u'folder':
836
837=== modified file 'duplicity/backends/giobackend.py'
838--- duplicity/backends/giobackend.py 2020-01-06 17:13:39 +0000
839+++ duplicity/backends/giobackend.py 2020-03-22 12:35:54 +0000
840@@ -1,4 +1,4 @@
841-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
842+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
843 #
844 # Copyright 2009 Michael Terry <mike@mterry.name>
845 #
846@@ -50,8 +50,8 @@
847 URLs look like schema://user@server/path.
848 """
849 def __init__(self, parsed_url):
850- from gi.repository import Gio # @UnresolvedImport # pylint: disable=import-error
851- from gi.repository import GLib # @UnresolvedImport # pylint: disable=import-error
852+ from gi.repository import Gio # pylint: disable=import-error
853+ from gi.repository import GLib # pylint: disable=import-error
854
855 class DupMountOperation(Gio.MountOperation):
856 u"""A simple MountOperation that grabs the password from the environment
857@@ -99,8 +99,8 @@
858 raise
859
860 def __done_with_mount(self, fileobj, result, loop):
861- from gi.repository import Gio # @UnresolvedImport # pylint: disable=import-error
862- from gi.repository import GLib # @UnresolvedImport # pylint: disable=import-error
863+ from gi.repository import Gio # pylint: disable=import-error
864+ from gi.repository import GLib # pylint: disable=import-error
865 try:
866 fileobj.mount_enclosing_volume_finish(result)
867 except GLib.GError as e:
868@@ -114,7 +114,7 @@
869 pass
870
871 def __copy_file(self, source, target):
872- from gi.repository import Gio # @UnresolvedImport # pylint: disable=import-error
873+ from gi.repository import Gio # pylint: disable=import-error
874 # Don't pass NOFOLLOW_SYMLINKS here. Some backends (e.g. google-drive:)
875 # use symlinks internally for all files. In the normal course of
876 # events, we never deal with symlinks anyway, just tarballs.
877@@ -123,8 +123,8 @@
878 None, self.__copy_progress, None)
879
880 def _error_code(self, operation, e):
881- from gi.repository import Gio # @UnresolvedImport # pylint: disable=import-error
882- from gi.repository import GLib # @UnresolvedImport # pylint: disable=import-error
883+ from gi.repository import Gio # pylint: disable=import-error
884+ from gi.repository import GLib # pylint: disable=import-error
885 if isinstance(e, GLib.GError):
886 if e.code == Gio.IOErrorEnum.FAILED and operation == u'delete':
887 # Sometimes delete will return a generic failure on a file not
888@@ -138,19 +138,19 @@
889 return log.ErrorCode.backend_no_space
890
891 def _put(self, source_path, remote_filename):
892- from gi.repository import Gio # @UnresolvedImport # pylint: disable=import-error
893+ from gi.repository import Gio # pylint: disable=import-error
894 source_file = Gio.File.new_for_path(source_path.name)
895 target_file = self.remote_file.get_child_for_display_name(util.fsdecode(remote_filename))
896 self.__copy_file(source_file, target_file)
897
898 def _get(self, filename, local_path):
899- from gi.repository import Gio # @UnresolvedImport # pylint: disable=import-error
900+ from gi.repository import Gio # pylint: disable=import-error
901 source_file = self.remote_file.get_child_for_display_name(util.fsdecode(filename))
902 target_file = Gio.File.new_for_path(local_path.name)
903 self.__copy_file(source_file, target_file)
904
905 def _list(self):
906- from gi.repository import Gio # @UnresolvedImport # pylint: disable=import-error
907+ from gi.repository import Gio # pylint: disable=import-error
908 files = []
909 # We grab display name, rather than file name because some backends
910 # (e.g. google-drive:) use filesystem-specific IDs as file names and
911@@ -170,7 +170,7 @@
912 target_file.delete(None)
913
914 def _query(self, filename):
915- from gi.repository import Gio # @UnresolvedImport # pylint: disable=import-error
916+ from gi.repository import Gio # pylint: disable=import-error
917 target_file = self.remote_file.get_child_for_display_name(util.fsdecode(filename))
918 info = target_file.query_info(Gio.FILE_ATTRIBUTE_STANDARD_SIZE,
919 Gio.FileQueryInfoFlags.NONE, None)
920
921=== modified file 'duplicity/backends/hsibackend.py'
922--- duplicity/backends/hsibackend.py 2020-01-02 12:05:22 +0000
923+++ duplicity/backends/hsibackend.py 2020-03-22 12:35:54 +0000
924@@ -1,4 +1,4 @@
925-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
926+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
927 #
928 # Copyright 2002 Ben Escoto <ben@emerose.org>
929 # Copyright 2007 Kenneth Loafman <kenneth@loafman.com>
930@@ -50,7 +50,6 @@
931 self.subprocess_popen(commandline)
932
933 def _list(self):
934- import sys
935 commandline = u'%s "ls -l %s"' % (hsi_command, self.remote_dir)
936 l = self.subprocess_popen(commandline)[2]
937 l = l.split(os.linesep.encode())[3:]
938
939=== modified file 'duplicity/backends/hubicbackend.py'
940--- duplicity/backends/hubicbackend.py 2018-11-29 19:00:15 +0000
941+++ duplicity/backends/hubicbackend.py 2020-03-22 12:35:54 +0000
942@@ -1,4 +1,4 @@
943-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
944+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
945 #
946 # Copyright 2013 J.P. Krauss <jkrauss@asymworks.com>
947 #
948
949=== modified file 'duplicity/backends/imapbackend.py'
950--- duplicity/backends/imapbackend.py 2019-05-25 19:43:53 +0000
951+++ duplicity/backends/imapbackend.py 2020-03-22 12:35:54 +0000
952@@ -1,4 +1,4 @@
953-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
954+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
955 #
956 # Copyright 2002 Ben Escoto <ben@emerose.org>
957 # Copyright 2007 Kenneth Loafman <kenneth@loafman.com>
958@@ -20,20 +20,21 @@
959 # along with duplicity; if not, write to the Free Software Foundation,
960 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
961
962-import sys
963 from future import standard_library
964 standard_library.install_aliases()
965 from builtins import input
966+
967+import email
968+import email.encoders
969+import email.mime.multipart
970+import getpass
971 import imaplib
972+import os
973 import re
974-import os
975+import socket
976+import sys
977 import time
978-import socket
979-import io
980-import getpass
981-import email
982-import email.encoders
983-import email.mime.multipart
984+
985 from email.parser import Parser
986 try:
987 from email.policy import default # pylint: disable=import-error
988@@ -45,10 +46,10 @@
989 import ssl
990 socket.sslerror = ssl.SSLError
991
992+from duplicity import config
993+from duplicity import log
994+from duplicity.errors import * # pylint: disable=unused-wildcard-import
995 import duplicity.backend
996-from duplicity import globals
997-from duplicity import log
998-from duplicity.errors import * # @UnusedWildImport
999
1000
1001 class ImapBackend(duplicity.backend.Backend):
1002@@ -104,13 +105,13 @@
1003 self.remote_dir = re.sub(r'^/', r'', parsed_url.path, 1)
1004
1005 # Login
1006- if (not(globals.imap_full_address)):
1007+ if (not(config.imap_full_address)):
1008 self.conn.login(self.username, self.password)
1009- self.conn.select(globals.imap_mailbox)
1010+ self.conn.select(config.imap_mailbox)
1011 log.Info(u"IMAP connected")
1012 else:
1013 self.conn.login(self.username + u"@" + parsed_url.hostname, self.password)
1014- self.conn.select(globals.imap_mailbox)
1015+ self.conn.select(config.imap_mailbox)
1016 log.Info(u"IMAP connected")
1017
1018 def prepareBody(self, f, rname):
1019@@ -133,7 +134,7 @@
1020
1021 def _put(self, source_path, remote_filename):
1022 f = source_path.open(u"rb")
1023- allowedTimeout = globals.timeout
1024+ allowedTimeout = config.timeout
1025 if (allowedTimeout == 0):
1026 # Allow a total timeout of 1 day
1027 allowedTimeout = 2880
1028@@ -143,8 +144,8 @@
1029 body = self.prepareBody(f, remote_filename)
1030 # If we don't select the IMAP folder before
1031 # append, the message goes into the INBOX.
1032- self.conn.select(globals.imap_mailbox)
1033- self.conn.append(globals.imap_mailbox, None, None, body.encode())
1034+ self.conn.select(config.imap_mailbox)
1035+ self.conn.append(config.imap_mailbox, None, None, body.encode())
1036 break
1037 except (imaplib.IMAP4.abort, socket.error, socket.sslerror):
1038 allowedTimeout -= 1
1039@@ -162,26 +163,26 @@
1040 log.Info(u"IMAP mail with '%s' subject stored" % remote_filename)
1041
1042 def _get(self, remote_filename, local_path):
1043- allowedTimeout = globals.timeout
1044+ allowedTimeout = config.timeout
1045 if (allowedTimeout == 0):
1046 # Allow a total timeout of 1 day
1047 allowedTimeout = 2880
1048 while allowedTimeout > 0:
1049 try:
1050- self.conn.select(globals.imap_mailbox)
1051- (result, list) = self.conn.search(None, u'Subject', remote_filename)
1052+ self.conn.select(config.imap_mailbox)
1053+ (result, flist) = self.conn.search(None, u'Subject', remote_filename)
1054 if result != u"OK":
1055- raise Exception(list[0])
1056+ raise Exception(flist[0])
1057
1058 # check if there is any result
1059- if list[0] == u'':
1060+ if flist[0] == u'':
1061 raise Exception(u"no mail with subject %s")
1062
1063- (result, list) = self.conn.fetch(list[0], u"(RFC822)")
1064+ (result, flist) = self.conn.fetch(flist[0], u"(RFC822)")
1065
1066 if result != u"OK":
1067- raise Exception(list[0])
1068- rawbody = list[0][1]
1069+ raise Exception(flist[0])
1070+ rawbody = flist[0][1]
1071
1072 p = Parser()
1073
1074@@ -212,26 +213,26 @@
1075
1076 def _list(self):
1077 ret = []
1078- (result, list) = self.conn.select(globals.imap_mailbox)
1079+ (result, flist) = self.conn.select(config.imap_mailbox)
1080 if result != u"OK":
1081- raise BackendException(list[0])
1082+ raise BackendException(flist[0])
1083
1084 # Going to find all the archives which have remote_dir in the From
1085 # address
1086
1087 # Search returns an error if you haven't selected an IMAP folder.
1088- (result, list) = self.conn.search(None, u'FROM', self.remote_dir)
1089+ (result, flist) = self.conn.search(None, u'FROM', self.remote_dir)
1090 if result != u"OK":
1091- raise Exception(list[0])
1092- if list[0] == b'':
1093+ raise Exception(flist[0])
1094+ if flist[0] == b'':
1095 return ret
1096- nums = list[0].strip().split(b" ")
1097- set = b"%s:%s" % (nums[0], nums[-1])
1098- (result, list) = self.conn.fetch(set, u"(BODY[HEADER])")
1099+ nums = flist[0].strip().split(b" ")
1100+ set = b"%s:%s" % (nums[0], nums[-1]) # pylint: disable=redefined-builtin
1101+ (result, flist) = self.conn.fetch(set, u"(BODY[HEADER])")
1102 if result != u"OK":
1103- raise Exception(list[0])
1104+ raise Exception(flist[0])
1105
1106- for msg in list:
1107+ for msg in flist:
1108 if (len(msg) == 1):
1109 continue
1110 if sys.version_info.major >= 3:
1111@@ -245,33 +246,33 @@
1112 if (not (header_from is None)):
1113 if (re.compile(u"^" + self.remote_dir + u"$").match(header_from)):
1114 ret.append(subj)
1115- log.Info(u"IMAP LIST: %s %s" % (subj, header_from))
1116+ log.Info(u"IMAP flist: %s %s" % (subj, header_from))
1117 return ret
1118
1119 def imapf(self, fun, *args):
1120- (ret, list) = fun(*args)
1121+ (ret, flist) = fun(*args)
1122 if ret != u"OK":
1123- raise Exception(list[0])
1124- return list
1125+ raise Exception(flist[0])
1126+ return flist
1127
1128 def delete_single_mail(self, i):
1129 self.imapf(self.conn.store, i, u"+FLAGS", u'\\DELETED')
1130
1131 def expunge(self):
1132- list = self.imapf(self.conn.expunge)
1133+ flist = self.imapf(self.conn.expunge)
1134
1135 def _delete_list(self, filename_list):
1136 for filename in filename_list:
1137- list = self.imapf(self.conn.search, None, u"(SUBJECT %s)" % filename)
1138- list = list[0].split()
1139- if len(list) > 0 and list[0] != u"":
1140- self.delete_single_mail(list[0])
1141+ flist = self.imapf(self.conn.search, None, u"(SUBJECT %s)" % filename)
1142+ flist = flist[0].split()
1143+ if len(flist) > 0 and flist[0] != u"":
1144+ self.delete_single_mail(flist[0])
1145 log.Notice(u"marked %s to be deleted" % filename)
1146 self.expunge()
1147 log.Notice(u"IMAP expunged %s files" % len(filename_list))
1148
1149 def _close(self):
1150- self.conn.select(globals.imap_mailbox)
1151+ self.conn.select(config.imap_mailbox)
1152 self.conn.close()
1153 self.conn.logout()
1154
1155
1156=== modified file 'duplicity/backends/jottacloudbackend.py'
1157--- duplicity/backends/jottacloudbackend.py 2019-11-21 15:34:26 +0000
1158+++ duplicity/backends/jottacloudbackend.py 2020-03-22 12:35:54 +0000
1159@@ -22,14 +22,13 @@
1160 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
1161
1162 # stdlib
1163+import logging
1164 import posixpath
1165-import locale
1166-import logging
1167
1168-# import duplicity stuff # version 0.6
1169-import duplicity.backend
1170+# import duplicity stuff
1171 from duplicity import log
1172 from duplicity.errors import BackendException
1173+import duplicity.backend
1174
1175
1176 def get_jotta_device(jfs):
1177
1178=== modified file 'duplicity/backends/lftpbackend.py'
1179--- duplicity/backends/lftpbackend.py 2020-01-02 12:05:22 +0000
1180+++ duplicity/backends/lftpbackend.py 2020-03-22 12:35:54 +0000
1181@@ -1,4 +1,4 @@
1182-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
1183+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
1184 #
1185 # Copyright 2002 Ben Escoto <ben@emerose.org>
1186 # Copyright 2007 Kenneth Loafman <kenneth@loafman.com>
1187@@ -39,7 +39,7 @@
1188 from pipes import quote as cmd_quote
1189
1190 import duplicity.backend
1191-from duplicity import globals
1192+from duplicity import config
1193 from duplicity import log
1194 from duplicity import tempdir
1195 from duplicity import util
1196@@ -90,14 +90,14 @@
1197 self.password = self.get_password()
1198 self.authflag = u"-u '%s,%s'" % (self.username, self.password)
1199
1200- if globals.ftp_connection == u'regular':
1201+ if config.ftp_connection == u'regular':
1202 self.conn_opt = u'off'
1203 else:
1204 self.conn_opt = u'on'
1205
1206 # check for cacert file if https
1207- self.cacert_file = globals.ssl_cacert_file
1208- if self.scheme == u'https' and not globals.ssl_no_check_certificate:
1209+ self.cacert_file = config.ssl_cacert_file
1210+ if self.scheme == u'https' and not config.ssl_no_check_certificate:
1211 cacert_candidates = [u"~/.duplicity/cacert.pem",
1212 u"~/duplicity_cacert.pem",
1213 u"/etc/duplicity/cacert.pem"]
1214@@ -113,11 +113,11 @@
1215 self.tempfd, self.tempname = tempdir.default().mkstemp()
1216 self.tempfile = os.fdopen(self.tempfd, u"w")
1217 self.tempfile.write(u"set ssl:verify-certificate " +
1218- (u"false" if globals.ssl_no_check_certificate else u"true") + u"\n")
1219+ (u"false" if config.ssl_no_check_certificate else u"true") + u"\n")
1220 if self.cacert_file:
1221 self.tempfile.write(u"set ssl:ca-file " + cmd_quote(self.cacert_file) + u"\n")
1222- if globals.ssl_cacert_path:
1223- self.tempfile.write(u"set ssl:ca-path " + cmd_quote(globals.ssl_cacert_path) + u"\n")
1224+ if config.ssl_cacert_path:
1225+ self.tempfile.write(u"set ssl:ca-path " + cmd_quote(config.ssl_cacert_path) + u"\n")
1226 if self.parsed_url.scheme == u'ftps':
1227 self.tempfile.write(u"set ftp:ssl-allow true\n")
1228 self.tempfile.write(u"set ftp:ssl-protect-data true\n")
1229@@ -129,8 +129,8 @@
1230 else:
1231 self.tempfile.write(u"set ftp:ssl-allow false\n")
1232 self.tempfile.write(u"set http:use-propfind true\n")
1233- self.tempfile.write(u"set net:timeout %s\n" % globals.timeout)
1234- self.tempfile.write(u"set net:max-retries %s\n" % globals.num_retries)
1235+ self.tempfile.write(u"set net:timeout %s\n" % config.timeout)
1236+ self.tempfile.write(u"set net:max-retries %s\n" % config.num_retries)
1237 self.tempfile.write(u"set ftp:passive-mode %s\n" % self.conn_opt)
1238 if log.getverbosity() >= log.DEBUG:
1239 self.tempfile.write(u"debug\n")
1240
1241=== modified file 'duplicity/backends/localbackend.py'
1242--- duplicity/backends/localbackend.py 2018-07-23 14:55:39 +0000
1243+++ duplicity/backends/localbackend.py 2020-03-22 12:35:54 +0000
1244@@ -1,4 +1,4 @@
1245-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
1246+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
1247 #
1248 # Copyright 2002 Ben Escoto <ben@emerose.org>
1249 # Copyright 2007 Kenneth Loafman <kenneth@loafman.com>
1250@@ -22,7 +22,6 @@
1251 import os
1252
1253 import duplicity.backend
1254-from duplicity import log
1255 from duplicity import path
1256 from duplicity.errors import BackendException
1257
1258
1259=== modified file 'duplicity/backends/mediafirebackend.py'
1260--- duplicity/backends/mediafirebackend.py 2020-02-13 15:46:40 +0000
1261+++ duplicity/backends/mediafirebackend.py 2020-03-22 12:35:54 +0000
1262@@ -1,4 +1,4 @@
1263-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
1264+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
1265 #
1266 # Copyright 2016 Roman Yepishev <rye@keypressure.com>
1267 #
1268
1269=== modified file 'duplicity/backends/megabackend.py'
1270--- duplicity/backends/megabackend.py 2020-01-03 01:16:01 +0000
1271+++ duplicity/backends/megabackend.py 2020-03-22 12:35:54 +0000
1272@@ -1,4 +1,4 @@
1273-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
1274+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
1275 #
1276 # Copyright 2017 Tomas Vondra (Launchpad id: tomas-v)
1277 # Copyright 2017 Kenneth Loafman <kenneth@loafman.com>
1278@@ -22,11 +22,7 @@
1279 from __future__ import print_function
1280 from future import standard_library
1281 standard_library.install_aliases()
1282-from builtins import str
1283-from builtins import range
1284-from builtins import object
1285
1286-from duplicity import log
1287 from duplicity import util
1288 from duplicity.errors import BackendException
1289 import duplicity.backend
1290
1291=== modified file 'duplicity/backends/multibackend.py'
1292--- duplicity/backends/multibackend.py 2020-02-14 19:59:59 +0000
1293+++ duplicity/backends/multibackend.py 2020-03-22 12:35:54 +0000
1294@@ -1,4 +1,4 @@
1295-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
1296+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
1297 #
1298 # Copyright 2015 Steve Tynor <steve.tynor@gmail.com>
1299 # Copyright 2016 Thomas Harning Jr <harningt@gmail.com>
1300@@ -27,7 +27,6 @@
1301 standard_library.install_aliases()
1302 import os
1303 import os.path
1304-import string
1305 import sys
1306 import urllib.request # pylint: disable=import-error
1307 import urllib.parse # pylint: disable=import-error
1308@@ -36,7 +35,7 @@
1309
1310 import duplicity.backend
1311 from duplicity.errors import BackendException
1312-from duplicity import globals
1313+from duplicity import config
1314 from duplicity import log
1315 from duplicity import util
1316
1317@@ -243,7 +242,7 @@
1318 while True:
1319 store = stores[self.__write_cursor]
1320 try:
1321- next = self.__write_cursor + 1
1322+ next = self.__write_cursor + 1 # pylint: disable=redefined-builtin
1323 if (next > len(stores) - 1):
1324 next = 0
1325 log.Log(_(u"MultiBackend: _put: write to store #%s (%s)")
1326@@ -288,8 +287,8 @@
1327 stores = self._eligible_stores(remote_filename)
1328
1329 for s in stores:
1330- list = s.list()
1331- if remote_filename in list:
1332+ flist = s.list()
1333+ if remote_filename in flist:
1334 s.get(remote_filename, local_path)
1335 return
1336 log.Log(_(u"MultiBackend: failed to get %s to %s from %s")
1337@@ -303,7 +302,7 @@
1338 def _list(self):
1339 lists = []
1340 for s in self.__stores:
1341- globals.are_errors_fatal[u'list'] = (False, [])
1342+ config.are_errors_fatal[u'list'] = (False, [])
1343 l = s.list()
1344 log.Notice(_(u"MultiBackend: %s: %d files")
1345 % (s.backend.parsed_url.url_string, len(l)))
1346@@ -333,8 +332,8 @@
1347 # before we try to delete
1348 # ENHANCEME: maintain a cached list for each store
1349 for s in stores:
1350- list = s.list()
1351- if filename in list:
1352+ flist = s.list()
1353+ if filename in flist:
1354 if hasattr(s, u'_delete_list'):
1355 s._do_delete_list([filename, ])
1356 elif hasattr(s, u'_delete'):
1357@@ -350,7 +349,6 @@
1358 log.Log(_(u"MultiBackend: failed to delete %s. Tried all backing stores and none succeeded")
1359 % (filename),
1360 log.ERROR)
1361-# raise BackendException("failed to delete")
1362
1363
1364 duplicity.backend.register_backend(u'multi', MultiBackend)
1365
1366=== modified file 'duplicity/backends/ncftpbackend.py'
1367--- duplicity/backends/ncftpbackend.py 2020-01-02 12:05:22 +0000
1368+++ duplicity/backends/ncftpbackend.py 2020-03-22 12:35:54 +0000
1369@@ -1,4 +1,4 @@
1370-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
1371+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
1372 #
1373 # Copyright 2002 Ben Escoto <ben@emerose.org>
1374 # Copyright 2007 Kenneth Loafman <kenneth@loafman.com>
1375@@ -28,7 +28,7 @@
1376 import re
1377
1378 import duplicity.backend
1379-from duplicity import globals
1380+from duplicity import config
1381 from duplicity import log
1382 from duplicity import tempdir
1383 from duplicity import util
1384@@ -82,7 +82,7 @@
1385
1386 self.password = self.get_password()
1387
1388- if globals.ftp_connection == u'regular':
1389+ if config.ftp_connection == u'regular':
1390 self.conn_opt = u'-E'
1391 else:
1392 self.conn_opt = u'-F'
1393@@ -94,7 +94,7 @@
1394 self.tempfile.write(u"pass %s\n" % self.password)
1395 self.tempfile.close()
1396 self.flags = u"-f %s %s -t %s -o useCLNT=0,useHELP_SITE=0 " % \
1397- (self.tempname, self.conn_opt, globals.timeout)
1398+ (self.tempname, self.conn_opt, config.timeout)
1399 if parsed_url.port is not None and parsed_url.port != 21:
1400 self.flags += u" -P '%s'" % (parsed_url.port)
1401
1402
1403=== modified file 'duplicity/backends/onedrivebackend.py'
1404--- duplicity/backends/onedrivebackend.py 2019-12-15 17:09:35 +0000
1405+++ duplicity/backends/onedrivebackend.py 2020-03-22 12:35:54 +0000
1406@@ -1,4 +1,4 @@
1407-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
1408+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
1409 # vim:tabstop=4:shiftwidth=4:expandtab
1410 #
1411 # Copyright 2014 Google Inc.
1412@@ -34,7 +34,7 @@
1413
1414 import duplicity.backend
1415 from duplicity.errors import BackendException
1416-from duplicity import globals
1417+from duplicity import config
1418 from duplicity import log
1419
1420 # For documentation on the API, see
1421@@ -98,7 +98,7 @@
1422 u'You did not specify a path. '
1423 u'Please specify a path, e.g. onedrive://duplicity_backups'))
1424
1425- if globals.volsize > (10 * 1024 * 1024 * 1024):
1426+ if config.volsize > (10 * 1024 * 1024 * 1024):
1427 raise BackendException((
1428 u'Your --volsize is bigger than 10 GiB, which is the maximum '
1429 u'file size on OneDrive.'))
1430
1431=== modified file 'duplicity/backends/par2backend.py'
1432--- duplicity/backends/par2backend.py 2019-10-27 23:55:56 +0000
1433+++ duplicity/backends/par2backend.py 2020-03-22 12:35:54 +0000
1434@@ -1,3 +1,5 @@
1435+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
1436+#
1437 # Copyright 2013 Germar Reitze <germar.reitze@gmail.com>
1438 #
1439 # This file is part of duplicity.
1440@@ -16,14 +18,12 @@
1441 # along with duplicity; if not, write to the Free Software Foundation,
1442 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
1443
1444-from builtins import filter
1445-
1446 import os
1447 import re
1448 from duplicity import backend
1449 from duplicity.errors import BackendException
1450 from duplicity import log
1451-from duplicity import globals
1452+from duplicity import config
1453 from duplicity import util
1454
1455
1456@@ -44,12 +44,12 @@
1457
1458 self.parsed_url = parsed_url
1459 try:
1460- self.redundancy = globals.par2_redundancy
1461+ self.redundancy = config.par2_redundancy
1462 except AttributeError:
1463 self.redundancy = 10
1464
1465 try:
1466- self.common_options = globals.par2_options + u" -q -q"
1467+ self.common_options = config.par2_options + u" -q -q"
1468 except AttributeError:
1469 self.common_options = u"-q -q"
1470
1471
1472=== modified file 'duplicity/backends/pcabackend.py'
1473--- duplicity/backends/pcabackend.py 2020-03-17 11:47:27 +0000
1474+++ duplicity/backends/pcabackend.py 2020-03-22 12:35:54 +0000
1475@@ -1,4 +1,4 @@
1476-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
1477+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
1478 #
1479 # Copyright 2013 Matthieu Huin <mhu@enovance.com>
1480 # Copyright 2017 Xavier Lucas <xavier.lucas@corp.ovh.com>
1481@@ -144,7 +144,7 @@
1482 log.FatalError(u"Container '%s' exists but its storage policy is '%s' not '%s'."
1483 % (self.container, container_metadata[policy_header.lower()], policy))
1484
1485- def _error_code(self, operation, e): # pylint: disable: unused-argument
1486+ def _error_code(self, operation, e): # pylint: disable= unused-argument
1487 if isinstance(e, self.resp_exc):
1488 if e.http_status == 404:
1489 return log.ErrorCode.backend_not_found
1490
1491=== modified file 'duplicity/backends/pydrivebackend.py'
1492--- duplicity/backends/pydrivebackend.py 2019-11-13 02:18:15 +0000
1493+++ duplicity/backends/pydrivebackend.py 2020-03-22 12:35:54 +0000
1494@@ -1,4 +1,4 @@
1495-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
1496+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
1497 #
1498 # Copyright 2015 Yigal Asnis
1499 #
1500@@ -18,13 +18,13 @@
1501
1502 from builtins import next
1503 from builtins import str
1504-import string
1505+
1506 import os
1507
1508-import duplicity.backend
1509 from duplicity import log
1510 from duplicity import util
1511 from duplicity.errors import BackendException
1512+import duplicity.backend
1513
1514
1515 class PyDriveBackend(duplicity.backend.Backend):
1516@@ -214,7 +214,7 @@
1517 size = int(drive_file[u'fileSize'])
1518 return {u'size': size}
1519
1520- def _error_code(self, operation, error):
1521+ def _error_code(self, operation, error): # pylint: disable=unused-argument
1522 from pydrive.files import ApiRequestError, FileNotUploadedError # pylint: disable=import-error
1523 if isinstance(error, FileNotUploadedError):
1524 return log.ErrorCode.backend_not_found
1525
1526=== modified file 'duplicity/backends/pyrax_identity/__init__.py'
1527--- duplicity/backends/pyrax_identity/__init__.py 2014-12-12 14:39:54 +0000
1528+++ duplicity/backends/pyrax_identity/__init__.py 2020-03-22 12:35:54 +0000
1529@@ -1,4 +1,4 @@
1530-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
1531+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
1532 #
1533 # Copyright 2002 Ben Escoto <ben@emerose.org>
1534 # Copyright 2007 Kenneth Loafman <kenneth@loafman.com>
1535
1536=== modified file 'duplicity/backends/pyrax_identity/hubic.py'
1537--- duplicity/backends/pyrax_identity/hubic.py 2019-12-28 21:26:47 +0000
1538+++ duplicity/backends/pyrax_identity/hubic.py 2020-03-22 12:35:54 +0000
1539@@ -1,4 +1,5 @@
1540-# -*- coding: utf-8 -*-
1541+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
1542+#
1543 # Copyright (c) 2014 Gu1
1544 # Licensed under the MIT license
1545
1546
1547=== modified file 'duplicity/backends/rclonebackend.py'
1548--- duplicity/backends/rclonebackend.py 2020-01-02 12:05:22 +0000
1549+++ duplicity/backends/rclonebackend.py 2020-03-22 12:35:54 +0000
1550@@ -1,4 +1,4 @@
1551-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
1552+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
1553 #
1554 # Copyright 2019 Francesco Magno
1555 # Copyright 2019 Kenneth Loafman <kenneth@loafman.com>
1556@@ -21,18 +21,14 @@
1557
1558 from future import standard_library
1559 standard_library.install_aliases()
1560-from builtins import str
1561-from builtins import range
1562-from builtins import object
1563
1564 import os
1565 import os.path
1566
1567-import duplicity.backend
1568-from duplicity import path
1569 from duplicity import log
1570+from duplicity import util
1571 from duplicity.errors import BackendException
1572-from duplicity import util
1573+import duplicity.backend
1574
1575
1576 class RcloneBackend(duplicity.backend.Backend):
1577
1578=== modified file 'duplicity/backends/rsyncbackend.py'
1579--- duplicity/backends/rsyncbackend.py 2020-01-02 12:05:22 +0000
1580+++ duplicity/backends/rsyncbackend.py 2020-03-22 12:35:54 +0000
1581@@ -1,4 +1,4 @@
1582-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
1583+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
1584 #
1585 # Copyright 2002 Ben Escoto <ben@emerose.org>
1586 # Copyright 2007 Kenneth Loafman <kenneth@loafman.com>
1587@@ -27,7 +27,7 @@
1588
1589 import duplicity.backend
1590 from duplicity.errors import InvalidBackendURL
1591-from duplicity import globals, tempdir, util
1592+from duplicity import config, tempdir, util
1593
1594
1595 class RsyncBackend(duplicity.backend.Backend):
1596@@ -88,8 +88,8 @@
1597 if self.over_rsyncd():
1598 portOption = port
1599 else:
1600- portOption = u"-e 'ssh %s -oBatchMode=yes %s'" % (port, globals.ssh_options)
1601- rsyncOptions = globals.rsync_options
1602+ portOption = u"-e 'ssh %s -oBatchMode=yes %s'" % (port, config.ssh_options)
1603+ rsyncOptions = config.rsync_options
1604 # build cmd
1605 self.cmd = u"rsync %s %s" % (portOption, rsyncOptions)
1606
1607@@ -121,7 +121,7 @@
1608 self.subprocess_popen(commandline)
1609
1610 def _list(self):
1611- def split(str):
1612+ def split(str): # pylint: disable=redefined-builtin
1613 line = str.split()
1614 if len(line) > 4 and line[4] != u'.':
1615 return line[4]
1616@@ -140,7 +140,7 @@
1617 else:
1618 dont_delete_list.append(file)
1619
1620- dir = tempfile.mkdtemp()
1621+ dir = tempfile.mkdtemp() # pylint: disable=redefined-builtin
1622 exclude, exclude_name = tempdir.default().mkstemp_file()
1623 to_delete = [exclude_name]
1624 for file in dont_delete_list:
1625
1626=== modified file 'duplicity/backends/s3_boto3_backend.py'
1627--- duplicity/backends/s3_boto3_backend.py 2019-12-06 16:53:41 +0000
1628+++ duplicity/backends/s3_boto3_backend.py 2020-03-22 12:35:54 +0000
1629@@ -1,4 +1,4 @@
1630-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
1631+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
1632 #
1633 # Copyright 2002 Ben Escoto <ben@emerose.org>
1634 # Copyright 2007 Kenneth Loafman <kenneth@loafman.com>
1635@@ -21,7 +21,7 @@
1636 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
1637
1638 import duplicity.backend
1639-from duplicity import globals
1640+from duplicity import config
1641 from duplicity import log
1642 from duplicity.errors import FatalBackendException, BackendException
1643 from duplicity import util
1644@@ -108,31 +108,31 @@
1645 remote_filename = util.fsdecode(remote_filename)
1646 key = self.key_prefix + remote_filename
1647
1648- if globals.s3_use_rrs:
1649+ if config.s3_use_rrs:
1650 storage_class = u'REDUCED_REDUNDANCY'
1651- elif globals.s3_use_ia:
1652+ elif config.s3_use_ia:
1653 storage_class = u'STANDARD_IA'
1654- elif globals.s3_use_onezone_ia:
1655+ elif config.s3_use_onezone_ia:
1656 storage_class = u'ONEZONE_IA'
1657- elif globals.s3_use_glacier and u"manifest" not in remote_filename:
1658+ elif config.s3_use_glacier and u"manifest" not in remote_filename:
1659 storage_class = u'GLACIER'
1660- elif globals.s3_use_deep_archive and u"manifest" not in remote_filename:
1661+ elif config.s3_use_deep_archive and u"manifest" not in remote_filename:
1662 storage_class = u'DEEP_ARCHIVE'
1663 else:
1664 storage_class = u'STANDARD'
1665 extra_args = {u'StorageClass': storage_class}
1666
1667- if globals.s3_use_sse:
1668+ if config.s3_use_sse:
1669 extra_args[u'ServerSideEncryption'] = u'AES256'
1670- elif globals.s3_use_sse_kms:
1671- if globals.s3_kms_key_id is None:
1672+ elif config.s3_use_sse_kms:
1673+ if config.s3_kms_key_id is None:
1674 raise FatalBackendException(u"S3 USE SSE KMS was requested, but key id not provided "
1675 u"require (--s3-kms-key-id)",
1676 code=log.ErrorCode.s3_kms_no_id)
1677 extra_args[u'ServerSideEncryption'] = u'aws:kms'
1678- extra_args[u'SSEKMSKeyId'] = globals.s3_kms_key_id
1679- if globals.s3_kms_grant:
1680- extra_args[u'GrantFullControl'] = globals.s3_kms_grant
1681+ extra_args[u'SSEKMSKeyId'] = config.s3_kms_key_id
1682+ if config.s3_kms_grant:
1683+ extra_args[u'GrantFullControl'] = config.s3_kms_grant
1684
1685 # Should the tracker be scoped to the put or the backend?
1686 # The put seems right to me, but the results look a little more correct
1687@@ -169,7 +169,6 @@
1688
1689 def _query(self, remote_filename):
1690 import botocore # pylint: disable=import-error
1691- from botocore.exceptions import ClientError # pylint: disable=import-error
1692
1693 remote_filename = util.fsdecode(remote_filename)
1694 key = self.key_prefix + remote_filename
1695
1696=== modified file 'duplicity/backends/s3_boto_backend.py'
1697--- duplicity/backends/s3_boto_backend.py 2020-02-06 15:27:43 +0000
1698+++ duplicity/backends/s3_boto_backend.py 2020-03-22 12:35:54 +0000
1699@@ -1,4 +1,4 @@
1700-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
1701+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
1702 #
1703 # Copyright 2002 Ben Escoto <ben@emerose.org>
1704 # Copyright 2007 Kenneth Loafman <kenneth@loafman.com>
1705@@ -21,9 +21,9 @@
1706 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
1707
1708 import duplicity.backend
1709-from duplicity import globals
1710+from duplicity import config
1711
1712-if globals.s3_use_multiprocessing:
1713+if config.s3_use_multiprocessing:
1714 from ._boto_multi import BotoBackend
1715 else:
1716 from ._boto_single import BotoBackend
1717
1718=== modified file 'duplicity/backends/ssh_paramiko_backend.py'
1719--- duplicity/backends/ssh_paramiko_backend.py 2020-03-15 11:52:38 +0000
1720+++ duplicity/backends/ssh_paramiko_backend.py 2020-03-22 12:35:54 +0000
1721@@ -1,4 +1,4 @@
1722-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
1723+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
1724 #
1725 # Copyright 2002 Ben Escoto <ben@emerose.org>
1726 # Copyright 2007 Kenneth Loafman <kenneth@loafman.com>
1727@@ -39,8 +39,7 @@
1728 from binascii import hexlify
1729
1730 import duplicity.backend
1731-from duplicity import globals
1732-from duplicity import util
1733+from duplicity import config
1734 from duplicity.errors import BackendException
1735
1736 global paramiko
1737@@ -204,7 +203,7 @@
1738 self.config.update({u'port': 22})
1739 # parse ssh options for alternative ssh private key, identity file
1740 m = re.search(r"^(?:.+\s+)?(?:-oIdentityFile=|-i\s+)(([\"'])([^\\2]+)\\2|[\S]+).*",
1741- globals.ssh_options)
1742+ config.ssh_options)
1743 if (m is not None):
1744 keyfilename = m.group(3) if m.group(3) else m.group(1)
1745 self.config[u'identityfile'] = keyfilename
1746@@ -227,7 +226,7 @@
1747 self.config[u'identityfile'] = None
1748
1749 # get password, enable prompt if askpass is set
1750- self.use_getpass = globals.ssh_askpass
1751+ self.use_getpass = config.ssh_askpass
1752 # set url values for beautiful login prompt
1753 parsed_url.username = self.config[u'user']
1754 parsed_url.hostname = self.config[u'hostname']
1755@@ -246,7 +245,7 @@
1756 self.config[u'user'],
1757 self.config[u'hostname'],
1758 self.config[u'port'], e))
1759- self.client.get_transport().set_keepalive((int)(globals.timeout / 2))
1760+ self.client.get_transport().set_keepalive((int)(config.timeout / 2))
1761
1762 self.scheme = duplicity.backend.strip_prefix(parsed_url.scheme,
1763 u'paramiko')
1764@@ -300,7 +299,7 @@
1765 f = open(source_path.name, u'rb')
1766 try:
1767 chan = self.client.get_transport().open_session()
1768- chan.settimeout(globals.timeout)
1769+ chan.settimeout(config.timeout)
1770 # scp in sink mode uses the arg as base directory
1771 chan.exec_command(u"scp -t '%s'" % self.remote_dir)
1772 except Exception as e:
1773@@ -332,7 +331,7 @@
1774 if self.use_scp:
1775 try:
1776 chan = self.client.get_transport().open_session()
1777- chan.settimeout(globals.timeout)
1778+ chan.settimeout(config.timeout)
1779 chan.exec_command(u"scp -f '%s/%s'" % (self.remote_dir,
1780 remote_filename))
1781 except Exception as e:
1782@@ -398,7 +397,7 @@
1783 command and returns stdout of command. throws an exception if exit
1784 code!=0 and not ignored"""
1785 try:
1786- ch_in, ch_out, ch_err = self.client.exec_command(cmd, -1, globals.timeout)
1787+ ch_in, ch_out, ch_err = self.client.exec_command(cmd, -1, config.timeout)
1788 output = ch_out.read(-1)
1789 return output
1790 except Exception as e:
1791
1792=== modified file 'duplicity/backends/ssh_pexpect_backend.py'
1793--- duplicity/backends/ssh_pexpect_backend.py 2020-01-29 19:09:08 +0000
1794+++ duplicity/backends/ssh_pexpect_backend.py 2020-03-22 12:35:54 +0000
1795@@ -1,4 +1,4 @@
1796-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
1797+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
1798 #
1799 # Copyright 2002 Ben Escoto <ben@emerose.org>
1800 # Copyright 2007 Kenneth Loafman <kenneth@loafman.com>
1801@@ -29,15 +29,14 @@
1802 standard_library.install_aliases()
1803 from builtins import map
1804
1805+import os
1806 import re
1807-import string
1808-import os
1809
1810-import duplicity.backend
1811-from duplicity import globals
1812+from duplicity import config
1813 from duplicity import log
1814 from duplicity import util
1815 from duplicity.errors import BackendException
1816+import duplicity.backend
1817
1818
1819 class SSHPExpectBackend(duplicity.backend.Backend):
1820@@ -57,12 +56,12 @@
1821 self.retry_delay = 10
1822
1823 self.scp_command = u"scp"
1824- if globals.scp_command:
1825- self.scp_command = globals.scp_command
1826+ if config.scp_command:
1827+ self.scp_command = config.scp_command
1828
1829 self.sftp_command = u"sftp"
1830- if globals.sftp_command:
1831- self.sftp_command = globals.sftp_command
1832+ if config.sftp_command:
1833+ self.sftp_command = config.sftp_command
1834
1835 self.scheme = duplicity.backend.strip_prefix(parsed_url.scheme, u'pexpect')
1836 self.use_scp = (self.scheme == u'scp')
1837@@ -81,22 +80,22 @@
1838 self.remote_prefix = self.remote_dir + u'/'
1839 # maybe use different ssh port
1840 if parsed_url.port:
1841- globals.ssh_options = globals.ssh_options + u" -oPort=%s" % parsed_url.port
1842+ config.ssh_options = config.ssh_options + u" -oPort=%s" % parsed_url.port
1843 # set some defaults if user has not specified already.
1844- if u"ServerAliveInterval" not in globals.ssh_options:
1845- globals.ssh_options += u" -oServerAliveInterval=%d" % ((int)(globals.timeout / 2))
1846- if u"ServerAliveCountMax" not in globals.ssh_options:
1847- globals.ssh_options += u" -oServerAliveCountMax=2"
1848+ if u"ServerAliveInterval" not in config.ssh_options:
1849+ config.ssh_options += u" -oServerAliveInterval=%d" % ((int)(config.timeout / 2))
1850+ if u"ServerAliveCountMax" not in config.ssh_options:
1851+ config.ssh_options += u" -oServerAliveCountMax=2"
1852
1853 # set up password
1854- self.use_getpass = globals.ssh_askpass
1855+ self.use_getpass = config.ssh_askpass
1856 self.password = self.get_password()
1857
1858 def run_scp_command(self, commandline):
1859 u""" Run an scp command, responding to password prompts """
1860 log.Info(u"Running '%s'" % commandline)
1861 child = pexpect.spawn(commandline, timeout=None)
1862- if globals.ssh_askpass:
1863+ if config.ssh_askpass:
1864 state = u"authorizing"
1865 else:
1866 state = u"copying"
1867@@ -171,7 +170,7 @@
1868 u"open(.*): Failure"]
1869 max_response_len = max([len(p) for p in responses[1:]])
1870 log.Info(u"Running '%s'" % (commandline))
1871- child = pexpect.spawn(commandline, timeout=None, maxread=maxread, encoding=globals.fsencoding)
1872+ child = pexpect.spawn(commandline, timeout=None, maxread=maxread, encoding=config.fsencoding)
1873 cmdloc = 0
1874 passprompt = 0
1875 while 1:
1876@@ -239,13 +238,13 @@
1877 u"rename \"%s.%s.part\" \"%s%s\"" %
1878 (self.remote_prefix, remote_filename, self.remote_prefix, remote_filename)]
1879 commandline = (u"%s %s %s" % (self.sftp_command,
1880- globals.ssh_options,
1881+ config.ssh_options,
1882 self.host_string))
1883 self.run_sftp_command(commandline, commands)
1884
1885 def put_scp(self, source_path, remote_filename):
1886 commandline = u"%s %s %s %s:%s%s" % \
1887- (self.scp_command, globals.ssh_options, source_path.uc_name, self.host_string,
1888+ (self.scp_command, config.ssh_options, source_path.uc_name, self.host_string,
1889 self.remote_prefix, remote_filename)
1890 self.run_scp_command(commandline)
1891
1892@@ -260,13 +259,13 @@
1893 commands = [u"get \"%s%s\" \"%s\"" %
1894 (self.remote_prefix, remote_filename, local_path.uc_name)]
1895 commandline = (u"%s %s %s" % (self.sftp_command,
1896- globals.ssh_options,
1897+ config.ssh_options,
1898 self.host_string))
1899 self.run_sftp_command(commandline, commands)
1900
1901 def get_scp(self, remote_filename, local_path):
1902 commandline = u"%s %s %s:%s%s %s" % \
1903- (self.scp_command, globals.ssh_options, self.host_string, self.remote_prefix,
1904+ (self.scp_command, config.ssh_options, self.host_string, self.remote_prefix,
1905 remote_filename, local_path.uc_name)
1906 self.run_scp_command(commandline)
1907
1908@@ -285,7 +284,7 @@
1909
1910 commands = mkdir_commands + [u"ls -1"]
1911 commandline = (u"%s %s %s" % (self.sftp_command,
1912- globals.ssh_options,
1913+ config.ssh_options,
1914 self.host_string))
1915
1916 l = self.run_sftp_command(commandline, commands).split(u'\n')[1:]
1917@@ -295,7 +294,7 @@
1918 def _delete(self, filename):
1919 commands = [u"cd \"%s\"" % (self.remote_dir,)]
1920 commands.append(u"rm \"%s\"" % util.fsdecode(filename))
1921- commandline = (u"%s %s %s" % (self.sftp_command, globals.ssh_options, self.host_string))
1922+ commandline = (u"%s %s %s" % (self.sftp_command, config.ssh_options, self.host_string))
1923 self.run_sftp_command(commandline, commands)
1924
1925
1926
1927=== modified file 'duplicity/backends/swiftbackend.py'
1928--- duplicity/backends/swiftbackend.py 2019-10-28 15:43:01 +0000
1929+++ duplicity/backends/swiftbackend.py 2020-03-22 12:35:54 +0000
1930@@ -1,4 +1,4 @@
1931-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
1932+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
1933 #
1934 # Copyright 2013 Matthieu Huin <mhu@enovance.com>
1935 #
1936@@ -22,7 +22,7 @@
1937 import os
1938
1939 import duplicity.backend
1940-from duplicity import globals
1941+from duplicity import config
1942 from duplicity import log
1943 from duplicity import util
1944 from duplicity.errors import BackendException
1945@@ -112,7 +112,7 @@
1946 else:
1947 self.prefix = u''
1948
1949- policy = globals.swift_storage_policy
1950+ policy = config.swift_storage_policy
1951 policy_header = u'X-Storage-Policy'
1952
1953 container_metadata = None
1954@@ -139,7 +139,7 @@
1955 log.FatalError(u"Container '%s' exists but its storage policy is '%s' not '%s'."
1956 % (self.container, container_metadata[policy_header.lower()], policy))
1957
1958- def _error_code(self, operation, e):
1959+ def _error_code(self, operation, e): # pylint: disable=unused-argument
1960 if isinstance(e, self.resp_exc):
1961 if e.http_status == 404:
1962 return log.ErrorCode.backend_not_found
1963
1964=== modified file 'duplicity/backends/sxbackend.py'
1965--- duplicity/backends/sxbackend.py 2020-01-02 12:05:22 +0000
1966+++ duplicity/backends/sxbackend.py 2020-03-22 12:35:54 +0000
1967@@ -1,4 +1,4 @@
1968-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
1969+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
1970 #
1971 # Copyright 2014 Andrea Grandi <a.grandi@gmail.com>
1972 #
1973
1974=== modified file 'duplicity/backends/tahoebackend.py'
1975--- duplicity/backends/tahoebackend.py 2020-01-02 12:05:22 +0000
1976+++ duplicity/backends/tahoebackend.py 2020-03-22 12:35:54 +0000
1977@@ -1,4 +1,4 @@
1978-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
1979+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
1980 #
1981 # Copyright 2008 Francois Deppierraz
1982 #
1983@@ -18,10 +18,9 @@
1984 # along with duplicity; if not, write to the Free Software Foundation,
1985 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
1986
1987-import duplicity.backend
1988 from duplicity import log
1989 from duplicity import util
1990-from duplicity.errors import BackendException
1991+import duplicity.backend
1992
1993
1994 class TAHOEBackend(duplicity.backend.Backend):
1995
1996=== modified file 'duplicity/backends/webdavbackend.py'
1997--- duplicity/backends/webdavbackend.py 2019-10-05 18:31:58 +0000
1998+++ duplicity/backends/webdavbackend.py 2020-03-22 12:35:54 +0000
1999@@ -1,4 +1,4 @@
2000-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
2001+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
2002 #
2003 # Copyright 2002 Ben Escoto <ben@emerose.org>
2004 # Copyright 2007 Kenneth Loafman <kenneth@loafman.com>
2005@@ -36,7 +36,7 @@
2006 import xml.dom.minidom
2007
2008 import duplicity.backend
2009-from duplicity import globals
2010+from duplicity import config
2011 from duplicity import log
2012 from duplicity import util
2013 from duplicity.errors import BackendException, FatalBackendException
2014@@ -67,7 +67,7 @@
2015
2016 http.client.HTTPSConnection.__init__(self, *args, **kwargs)
2017
2018- self.cacert_file = globals.ssl_cacert_file
2019+ self.cacert_file = config.ssl_cacert_file
2020 self.cacert_candidates = [u"~/.duplicity/cacert.pem",
2021 u"~/duplicity_cacert.pem",
2022 u"/etc/duplicity/cacert.pem"]
2023@@ -96,11 +96,11 @@
2024 if u"create_default_context" in dir(ssl):
2025 context = ssl.create_default_context(ssl.Purpose.SERVER_AUTH,
2026 cafile=self.cacert_file,
2027- capath=globals.ssl_cacert_path)
2028+ capath=config.ssl_cacert_path)
2029 self.sock = context.wrap_socket(sock, server_hostname=self.host)
2030 # the legacy way needing a cert file
2031 else:
2032- if globals.ssl_cacert_path:
2033+ if config.ssl_cacert_path:
2034 raise FatalBackendException(
2035 _(u"Option '--ssl-cacert-path' is not supported "
2036 u"with python 2.7.8 and below."))
2037@@ -152,7 +152,7 @@
2038 self.password = self.get_password()
2039 self.directory = self.sanitize_path(parsed_url.path)
2040
2041- log.Info(_(u"Using WebDAV protocol %s") % (globals.webdav_proto,))
2042+ log.Info(_(u"Using WebDAV protocol %s") % (config.webdav_proto,))
2043 log.Info(_(u"Using WebDAV host %s port %s") % (parsed_url.hostname,
2044 parsed_url.port))
2045 log.Info(_(u"Using WebDAV directory %s") % (self.directory,))
2046@@ -192,7 +192,7 @@
2047 if self.parsed_url.scheme in [u'webdav', u'http']:
2048 self.conn = http.client.HTTPConnection(self.parsed_url.hostname, self.parsed_url.port)
2049 elif self.parsed_url.scheme in [u'webdavs', u'https']:
2050- if globals.ssl_no_check_certificate:
2051+ if config.ssl_no_check_certificate:
2052 self.conn = http.client.HTTPSConnection(self.parsed_url.hostname, self.parsed_url.port)
2053 else:
2054 self.conn = VerifiedHTTPSConnection(self.parsed_url.hostname, self.parsed_url.port)
2055
2056=== modified file 'duplicity/cached_ops.py'
2057--- duplicity/cached_ops.py 2018-11-29 19:00:15 +0000
2058+++ duplicity/cached_ops.py 2020-03-22 12:35:54 +0000
2059@@ -1,4 +1,4 @@
2060-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
2061+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
2062 #
2063 # Copyright 2012 Google Inc.
2064 #
2065
2066=== modified file 'duplicity/commandline.py'
2067--- duplicity/commandline.py 2020-03-06 21:25:13 +0000
2068+++ duplicity/commandline.py 2020-03-22 12:35:54 +0000
2069@@ -1,4 +1,4 @@
2070-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
2071+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
2072 #
2073 # Copyright 2002 Ben Escoto <ben@emerose.org>
2074 # Copyright 2007 Kenneth Loafman <kenneth@loafman.com>
2075@@ -19,12 +19,11 @@
2076 # along with duplicity; if not, write to the Free Software Foundation,
2077 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
2078
2079-u"""Parse command line, check for consistency, and set globals"""
2080+u"""Parse command line, check for consistency, and set config"""
2081
2082 from __future__ import print_function
2083 from future import standard_library
2084 standard_library.install_aliases()
2085-from builtins import filter
2086 from builtins import str
2087 from builtins import range
2088
2089@@ -43,7 +42,7 @@
2090
2091 from duplicity import backend
2092 from duplicity import dup_time
2093-from duplicity import globals
2094+from duplicity import config
2095 from duplicity import gpg
2096 from duplicity import log
2097 from duplicity import path
2098@@ -108,8 +107,8 @@
2099 u"""
2100 Return expanded version of archdir joined with backname.
2101 """
2102- assert globals.backup_name is not None, \
2103- u"expand_archive_dir() called prior to globals.backup_name being set"
2104+ assert config.backup_name is not None, \
2105+ u"expand_archive_dir() called prior to config.backup_name being set"
2106
2107 return expand_fn(os.path.join(archdir, backname))
2108
2109@@ -135,18 +134,18 @@
2110 return burlhash.hexdigest()
2111
2112
2113-def check_file(option, opt, value):
2114+def check_file(option, opt, value): # pylint: disable=unused-argument
2115 return expand_fn(value)
2116
2117
2118-def check_time(option, opt, value):
2119+def check_time(option, opt, value): # pylint: disable=unused-argument
2120 try:
2121 return dup_time.genstrtotime(value)
2122 except dup_time.TimeException as e:
2123 raise optparse.OptionValueError(str(e))
2124
2125
2126-def check_verbosity(option, opt, value):
2127+def check_verbosity(option, opt, value): # pylint: disable=unused-argument
2128 fail = False
2129
2130 value = value.lower()
2131@@ -218,17 +217,17 @@
2132 def set_time_sep(sep, opt):
2133 if sep == u'-':
2134 raise optparse.OptionValueError(u"Dash ('-') not valid for time-separator.")
2135- globals.time_separator = sep
2136+ config.time_separator = sep
2137 old_fn_deprecation(opt)
2138
2139- def add_selection(o, option, additional_arg, p):
2140+ def add_selection(o, option, additional_arg, p): # pylint: disable=unused-argument
2141 if o.type in (u"string", u"file"):
2142 addarg = util.fsdecode(additional_arg)
2143 else:
2144 addarg = additional_arg
2145 select_opts.append((util.fsdecode(option), addarg))
2146
2147- def add_filelist(o, s, filename, p):
2148+ def add_filelist(o, s, filename, p): # pylint: disable=unused-argument
2149 select_opts.append((util.fsdecode(s), util.fsdecode(filename)))
2150 try:
2151 select_files.append(io.open(filename, u"rt", encoding=u"UTF-8"))
2152@@ -236,12 +235,12 @@
2153 log.FatalError(_(u"Error opening file %s") % filename,
2154 log.ErrorCode.cant_open_filelist)
2155
2156- def print_ver(o, s, v, p):
2157- print(u"duplicity %s" % (globals.version))
2158+ def print_ver(o, s, v, p): # pylint: disable=unused-argument
2159+ print(u"duplicity %s" % (config.version))
2160 sys.exit(0)
2161
2162- def add_rename(o, s, v, p):
2163- globals.rename[os.path.normcase(os.path.normpath(v[0]))] = v[1]
2164+ def add_rename(o, s, v, p): # pylint: disable=unused-argument
2165+ config.rename[os.path.normcase(os.path.normpath(v[0]))] = v[1]
2166
2167 parser = optparse.OptionParser(option_class=DupOption, usage=usage())
2168
2169@@ -282,14 +281,14 @@
2170 # --encrypt-key <gpg_key_id>
2171 parser.add_option(u"--encrypt-key", type=u"string", metavar=_(u"gpg-key-id"),
2172 dest=u"", action=u"callback",
2173- callback=lambda o, s, v, p: globals.gpg_profile.recipients.append(v)) # @UndefinedVariable
2174+ callback=lambda o, s, v, p: config.gpg_profile.recipients.append(v))
2175
2176 # secret keyring in which the private encrypt key can be found
2177 parser.add_option(u"--encrypt-secret-keyring", type=u"string", metavar=_(u"path"))
2178
2179 parser.add_option(u"--encrypt-sign-key", type=u"string", metavar=_(u"gpg-key-id"),
2180 dest=u"", action=u"callback",
2181- callback=lambda o, s, v, p: (globals.gpg_profile.recipients.append(v), set_sign_key(v)))
2182+ callback=lambda o, s, v, p: (config.gpg_profile.recipients.append(v), set_sign_key(v)))
2183
2184 # TRANSL: Used in usage help to represent a "glob" style pattern for
2185 # matching one or more files, as described in the documentation.
2186@@ -382,7 +381,7 @@
2187 # --hidden-encrypt-key <gpg_key_id>
2188 parser.add_option(u"--hidden-encrypt-key", type=u"string", metavar=_(u"gpg-key-id"),
2189 dest=u"", action=u"callback",
2190- callback=lambda o, s, v, p: globals.gpg_profile.hidden_recipients.append(v)) # @UndefinedVariable
2191+ callback=lambda o, s, v, p: config.gpg_profile.hidden_recipients.append(v))
2192
2193 # ignore (some) errors during operations; supposed to make it more
2194 # likely that you are able to restore data under problematic
2195@@ -650,30 +649,30 @@
2196 # parse the options
2197 (options, args) = parser.parse_args(arglist)
2198
2199- # Copy all arguments and their values to the globals module. Don't copy
2200+ # Copy all arguments and their values to the config module. Don't copy
2201 # attributes that are 'hidden' (start with an underscore) or whose name is
2202 # the empty string (used for arguments that don't directly store a value
2203 # by using dest="")
2204 for f in [x for x in dir(options) if x and not x.startswith(u"_")]:
2205 v = getattr(options, f)
2206 # Only set if v is not None because None is the default for all the
2207- # variables. If user didn't set it, we'll use defaults in globals.py
2208+ # variables. If user didn't set it, we'll use defaults in config.py
2209 if v is not None:
2210- setattr(globals, f, v)
2211+ setattr(config, f, v)
2212
2213 # convert file_prefix* string
2214 if sys.version_info.major >= 3:
2215- if isinstance(globals.file_prefix, str):
2216- globals.file_prefix = bytes(globals.file_prefix, u'utf-8')
2217- if isinstance(globals.file_prefix_manifest, str):
2218- globals.file_prefix_manifest = bytes(globals.file_prefix_manifest, u'utf-8')
2219- if isinstance(globals.file_prefix_archive, str):
2220- globals.file_prefix_archive = bytes(globals.file_prefix_archive, u'utf-8')
2221- if isinstance(globals.file_prefix_signature, str):
2222- globals.file_prefix_signature = bytes(globals.file_prefix_signature, u'utf-8')
2223+ if isinstance(config.file_prefix, str):
2224+ config.file_prefix = bytes(config.file_prefix, u'utf-8')
2225+ if isinstance(config.file_prefix_manifest, str):
2226+ config.file_prefix_manifest = bytes(config.file_prefix_manifest, u'utf-8')
2227+ if isinstance(config.file_prefix_archive, str):
2228+ config.file_prefix_archive = bytes(config.file_prefix_archive, u'utf-8')
2229+ if isinstance(config.file_prefix_signature, str):
2230+ config.file_prefix_signature = bytes(config.file_prefix_signature, u'utf-8')
2231
2232 # todo: this should really NOT be done here
2233- socket.setdefaulttimeout(globals.timeout)
2234+ socket.setdefaulttimeout(config.timeout)
2235
2236 # expect no cmd and two positional args
2237 cmd = u""
2238@@ -703,7 +702,7 @@
2239 full_backup = True
2240 num_expect = 2
2241 elif cmd == u"incremental":
2242- globals.incremental = True
2243+ config.incremental = True
2244 num_expect = 2
2245 elif cmd == u"list-current-files":
2246 list_current = True
2247@@ -713,19 +712,19 @@
2248 arg = args.pop(0)
2249 except Exception:
2250 command_line_error(u"Missing time string for remove-older-than")
2251- globals.remove_time = dup_time.genstrtotime(arg)
2252+ config.remove_time = dup_time.genstrtotime(arg)
2253 num_expect = 1
2254 elif cmd == u"remove-all-but-n-full" or cmd == u"remove-all-inc-of-but-n-full":
2255 if cmd == u"remove-all-but-n-full":
2256- globals.remove_all_but_n_full_mode = True
2257+ config.remove_all_but_n_full_mode = True
2258 if cmd == u"remove-all-inc-of-but-n-full":
2259- globals.remove_all_inc_of_but_n_full_mode = True
2260+ config.remove_all_inc_of_but_n_full_mode = True
2261 try:
2262 arg = args.pop(0)
2263 except Exception:
2264 command_line_error(u"Missing count for " + cmd)
2265- globals.keep_chains = int(arg)
2266- if not globals.keep_chains > 0:
2267+ config.keep_chains = int(arg)
2268+ if not config.keep_chains > 0:
2269 command_line_error(cmd + u" count must be > 0")
2270 num_expect = 1
2271 elif cmd == u"verify":
2272@@ -758,19 +757,19 @@
2273 command_line_error(u"Two URLs expected for replicate.")
2274 src_backend_url, backend_url = args[0], args[1]
2275 else:
2276- lpath, backend_url = args_to_path_backend(args[0], args[1]) # @UnusedVariable
2277+ lpath, backend_url = args_to_path_backend(args[0], args[1])
2278 else:
2279 command_line_error(u"Too many arguments")
2280
2281- if globals.backup_name is None:
2282- globals.backup_name = generate_default_backup_name(backend_url)
2283+ if config.backup_name is None:
2284+ config.backup_name = generate_default_backup_name(backend_url)
2285
2286 # set and expand archive dir
2287- set_archive_dir(expand_archive_dir(globals.archive_dir,
2288- globals.backup_name))
2289+ set_archive_dir(expand_archive_dir(config.archive_dir,
2290+ config.backup_name))
2291
2292- log.Info(_(u"Using archive dir: %s") % (globals.archive_dir_path.uc_name,))
2293- log.Info(_(u"Using backup name: %s") % (globals.backup_name,))
2294+ log.Info(_(u"Using archive dir: %s") % (config.archive_dir_path.uc_name,))
2295+ log.Info(_(u"Using backup name: %s") % (config.backup_name,))
2296
2297 return args
2298
2299@@ -788,7 +787,7 @@
2300 be assumed to be for the benefit of translators, since they can get each string
2301 (paired with its preceding comment, if any) independently of the others."""
2302
2303- dict = {
2304+ trans = {
2305 # TRANSL: Used in usage help to represent a Unix-style path name. Example:
2306 # rsync://user[:password]@other_host[:port]//absolute_path
2307 u'absolute_path': _(u"absolute_path"),
2308@@ -939,7 +938,7 @@
2309 duplicity remove-all-inc-of-but-n-full %(count)s [%(options)s] %(target_url)s
2310 duplicity replicate %(source_url)s %(target_url)s
2311
2312-""" % dict
2313+""" % trans
2314
2315 # TRANSL: Header in usage help
2316 msg = msg + _(u"Backends and their URL formats:") + u"""
2317@@ -972,7 +971,7 @@
2318 webdav://%(user)s[:%(password)s]@%(other_host)s/%(some_dir)s
2319 webdavs://%(user)s[:%(password)s]@%(other_host)s/%(some_dir)s
2320
2321-""" % dict
2322+""" % trans
2323
2324 # TRANSL: Header in usage help
2325 msg = msg + _(u"Commands:") + u"""
2326@@ -986,7 +985,7 @@
2327 remove-all-but-n-full <%(count)s> <%(target_url)s>
2328 remove-all-inc-of-but-n-full <%(count)s> <%(target_url)s>
2329 verify <%(target_url)s> <%(source_dir)s>
2330- replicate <%(source_url)s> <%(target_url)s>""" % dict
2331+ replicate <%(source_url)s> <%(target_url)s>""" % trans
2332
2333 return msg
2334
2335@@ -1003,24 +1002,24 @@
2336 log.FatalError(_(u"Specified archive directory '%s' does not exist, "
2337 u"or is not a directory") % (archive_dir_path.uc_name,),
2338 log.ErrorCode.bad_archive_dir)
2339- globals.archive_dir_path = archive_dir_path
2340+ config.archive_dir_path = archive_dir_path
2341
2342
2343 def set_sign_key(sign_key):
2344- u"""Set globals.sign_key assuming proper key given"""
2345+ u"""Set config.sign_key assuming proper key given"""
2346 if not re.search(u"^(0x)?([0-9A-Fa-f]{8}|[0-9A-Fa-f]{16}|[0-9A-Fa-f]{40})$", sign_key):
2347 log.FatalError(_(u"Sign key should be an 8, 16 alt. 40 character hex string, like "
2348 u"'AA0E73D2'.\nReceived '%s' instead.") % (sign_key,),
2349 log.ErrorCode.bad_sign_key)
2350- globals.gpg_profile.sign_key = sign_key
2351+ config.gpg_profile.sign_key = sign_key
2352
2353
2354 def set_selection():
2355 u"""Return selection iter starting at filename with arguments applied"""
2356 global select_opts, select_files
2357- sel = selection.Select(globals.local_path)
2358+ sel = selection.Select(config.local_path)
2359 sel.ParseArgs(select_opts, select_files)
2360- globals.select = sel.set_iter()
2361+ config.select = sel.set_iter()
2362
2363
2364 def args_to_path_backend(arg1, arg2):
2365@@ -1056,7 +1055,7 @@
2366 """
2367 path, bend = args_to_path_backend(arg1, arg2)
2368
2369- globals.backend = backend.get_backend(bend)
2370+ config.backend = backend.get_backend(bend)
2371
2372 if path == arg2:
2373 return (None, arg2) # False?
2374@@ -1065,10 +1064,10 @@
2375
2376
2377 def process_local_dir(action, local_pathname):
2378- u"""Check local directory, set globals.local_path"""
2379+ u"""Check local directory, set config.local_path"""
2380 local_path = path.Path(path.Path(local_pathname).get_canonical())
2381 if action == u"restore":
2382- if (local_path.exists() and not local_path.isemptydir()) and not globals.force:
2383+ if (local_path.exists() and not local_path.isemptydir()) and not config.force:
2384 log.FatalError(_(u"Restore destination directory %s already "
2385 u"exists.\nWill not overwrite.") % (local_path.uc_name,),
2386 log.ErrorCode.restore_dir_exists)
2387@@ -1084,7 +1083,7 @@
2388 % (local_path.uc_name,),
2389 log.ErrorCode.backup_dir_doesnt_exist)
2390
2391- globals.local_path = local_path
2392+ config.local_path = local_path
2393
2394
2395 def check_consistency(action):
2396@@ -1102,12 +1101,12 @@
2397 if action in [u"list-current", u"collection-status",
2398 u"cleanup", u"remove-old", u"remove-all-but-n-full", u"remove-all-inc-of-but-n-full", u"replicate"]:
2399 assert_only_one([list_current, collection_status, cleanup, replicate,
2400- globals.remove_time is not None])
2401+ config.remove_time is not None])
2402 elif action == u"restore" or action == u"verify":
2403 if full_backup:
2404 command_line_error(u"--full option cannot be used when "
2405 u"restoring or verifying")
2406- elif globals.incremental:
2407+ elif config.incremental:
2408 command_line_error(u"--incremental option cannot be used when "
2409 u"restoring or verifying")
2410 if select_opts and action == u"restore":
2411@@ -1119,36 +1118,36 @@
2412 if verify:
2413 command_line_error(u"--verify option cannot be used "
2414 u"when backing up")
2415- if globals.restore_dir:
2416+ if config.restore_dir:
2417 command_line_error(u"restore option incompatible with %s backup"
2418 % (action,))
2419- if sum([globals.s3_use_rrs, globals.s3_use_ia, globals.s3_use_onezone_ia]) >= 2:
2420+ if sum([config.s3_use_rrs, config.s3_use_ia, config.s3_use_onezone_ia]) >= 2:
2421 command_line_error(u"only one of --s3-use-rrs, --s3-use-ia, and --s3-use-onezone-ia may be used")
2422
2423
2424 def ProcessCommandLine(cmdline_list):
2425- u"""Process command line, set globals, return action
2426+ u"""Process command line, set config, return action
2427
2428 action will be "list-current", "collection-status", "cleanup",
2429 "remove-old", "restore", "verify", "full", or "inc".
2430
2431 """
2432 # build initial gpg_profile
2433- globals.gpg_profile = gpg.GPGProfile()
2434+ config.gpg_profile = gpg.GPGProfile()
2435
2436 # parse command line
2437 args = parse_cmdline_options(cmdline_list)
2438
2439 # if we get a different gpg-binary from the commandline then redo gpg_profile
2440- if globals.gpg_binary is not None:
2441- src = globals.gpg_profile
2442- globals.gpg_profile = gpg.GPGProfile(
2443+ if config.gpg_binary is not None:
2444+ src = config.gpg_profile
2445+ config.gpg_profile = gpg.GPGProfile(
2446 passphrase=src.passphrase,
2447 sign_key=src.sign_key,
2448 recipients=src.recipients,
2449 hidden_recipients=src.hidden_recipients)
2450 log.Debug(_(u"GPG binary is %s, version %s") %
2451- ((globals.gpg_binary or u'gpg'), globals.gpg_profile.gpg_version))
2452+ ((config.gpg_binary or u'gpg'), config.gpg_profile.gpg_version))
2453
2454 # we can now try to import all the backends
2455 backend.import_backends()
2456@@ -1164,24 +1163,24 @@
2457 action = u"collection-status"
2458 elif cleanup:
2459 action = u"cleanup"
2460- elif globals.remove_time is not None:
2461+ elif config.remove_time is not None:
2462 action = u"remove-old"
2463- elif globals.remove_all_but_n_full_mode:
2464+ elif config.remove_all_but_n_full_mode:
2465 action = u"remove-all-but-n-full"
2466- elif globals.remove_all_inc_of_but_n_full_mode:
2467+ elif config.remove_all_inc_of_but_n_full_mode:
2468 action = u"remove-all-inc-of-but-n-full"
2469 else:
2470 command_line_error(u"Too few arguments")
2471- globals.backend = backend.get_backend(args[0])
2472- if not globals.backend:
2473+ config.backend = backend.get_backend(args[0])
2474+ if not config.backend:
2475 log.FatalError(_(u"""Bad URL '%s'.
2476 Examples of URL strings are "scp://user@host.net:1234/path" and
2477 "file:///usr/local". See the man page for more information.""") % (args[0],),
2478 log.ErrorCode.bad_url)
2479 elif len(args) == 2:
2480 if replicate:
2481- globals.src_backend = backend.get_backend(args[0])
2482- globals.backend = backend.get_backend(args[1])
2483+ config.src_backend = backend.get_backend(args[0])
2484+ config.backend = backend.get_backend(args[1])
2485 action = u"replicate"
2486 else:
2487 # Figure out whether backup or restore
2488
2489=== renamed file 'duplicity/globals.py' => 'duplicity/config.py'
2490--- duplicity/globals.py 2020-02-12 19:33:59 +0000
2491+++ duplicity/config.py 2020-03-22 12:35:54 +0000
2492@@ -1,4 +1,4 @@
2493-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
2494+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
2495 #
2496 # Copyright 2002 Ben Escoto <ben@emerose.org>
2497 # Copyright 2007 Kenneth Loafman <kenneth@loafman.com>
2498
2499=== modified file 'duplicity/diffdir.py'
2500--- duplicity/diffdir.py 2019-12-09 15:47:05 +0000
2501+++ duplicity/diffdir.py 2020-03-22 12:35:54 +0000
2502@@ -1,4 +1,4 @@
2503-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
2504+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
2505 #
2506 # Copyright 2002 Ben Escoto <ben@emerose.org>
2507 # Copyright 2007 Kenneth Loafman <kenneth@loafman.com>
2508@@ -37,13 +37,13 @@
2509 from builtins import object
2510
2511 import io
2512-import types
2513-import math
2514+import sys
2515+
2516 from duplicity import statistics
2517 from duplicity import util
2518-from duplicity import globals
2519-from duplicity.path import * # @UnusedWildImport
2520-from duplicity.lazy import * # @UnusedWildImport
2521+from duplicity import config
2522+from duplicity.path import * # pylint: disable=unused-wildcard-import,redefined-builtin
2523+from duplicity.lazy import * # pylint: disable=unused-wildcard-import,redefined-builtin
2524 from duplicity import progress
2525
2526 # A StatsObj will be written to this from DirDelta and DirDelta_WriteSig.
2527@@ -96,13 +96,13 @@
2528 else:
2529 sig_iter = sigtar2path_iter(dirsig_fileobj_list)
2530 delta_iter = get_delta_iter(path_iter, sig_iter)
2531- if globals.dry_run or (globals.progress and not progress.tracker.has_collected_evidence()):
2532+ if config.dry_run or (config.progress and not progress.tracker.has_collected_evidence()):
2533 return DummyBlockIter(delta_iter)
2534 else:
2535 return DeltaTarBlockIter(delta_iter)
2536
2537
2538-def delta_iter_error_handler(exc, new_path, sig_path, sig_tar=None):
2539+def delta_iter_error_handler(exc, new_path, sig_path, sig_tar=None): # pylint: disable=unused-argument
2540 u"""
2541 Called by get_delta_iter, report error in getting delta
2542 """
2543@@ -389,7 +389,7 @@
2544 else:
2545 sig_path_iter = sigtar2path_iter(sig_infp_list)
2546 delta_iter = get_delta_iter(path_iter, sig_path_iter, newsig_outfp)
2547- if globals.dry_run or (globals.progress and not progress.tracker.has_collected_evidence()):
2548+ if config.dry_run or (config.progress and not progress.tracker.has_collected_evidence()):
2549 return DummyBlockIter(delta_iter)
2550 else:
2551 return DeltaTarBlockIter(delta_iter)
2552@@ -502,15 +502,15 @@
2553 Make tarblock out of tarinfo and file data
2554 """
2555 tarinfo.size = len(file_data)
2556- headers = tarinfo.tobuf(errors=u'replace', encoding=globals.fsencoding)
2557- blocks, remainder = divmod(tarinfo.size, tarfile.BLOCKSIZE) # @UnusedVariable
2558+ headers = tarinfo.tobuf(errors=u'replace', encoding=config.fsencoding)
2559+ blocks, remainder = divmod(tarinfo.size, tarfile.BLOCKSIZE)
2560 if remainder > 0:
2561 filler_data = b"\0" * (tarfile.BLOCKSIZE - remainder)
2562 else:
2563 filler_data = b""
2564 return TarBlock(index, b"%s%s%s" % (headers, file_data, filler_data))
2565
2566- def process(self, val):
2567+ def process(self, val): # pylint: disable=unused-argument
2568 u"""
2569 Turn next value of input_iter into a TarBlock
2570 """
2571@@ -589,7 +589,7 @@
2572 u"""
2573 Return closing string for tarfile, reset offset
2574 """
2575- blocks, remainder = divmod(self.offset, tarfile.RECORDSIZE) # @UnusedVariable
2576+ blocks, remainder = divmod(self.offset, tarfile.RECORDSIZE)
2577 self.offset = 0
2578 return b'\0' * (tarfile.RECORDSIZE - remainder) # remainder can be 0
2579
2580@@ -767,4 +767,4 @@
2581 else:
2582 # Split file into about 2000 pieces, rounding to 512
2583 file_blocksize = int((file_len / (2000 * 512))) * 512
2584- return min(file_blocksize, globals.max_blocksize)
2585+ return min(file_blocksize, config.max_blocksize)
2586
2587=== modified file 'duplicity/dup_collections.py'
2588--- duplicity/dup_collections.py 2020-03-03 16:35:31 +0000
2589+++ duplicity/dup_collections.py 2020-03-22 12:35:54 +0000
2590@@ -1,4 +1,4 @@
2591-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
2592+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
2593 #
2594 # Copyright 2002 Ben Escoto <ben@emerose.org>
2595 # Copyright 2007 Kenneth Loafman <kenneth@loafman.com>
2596@@ -21,15 +21,12 @@
2597
2598 u"""Classes and functions on collections of backup volumes"""
2599
2600-from past.builtins import cmp
2601-from builtins import filter
2602 from builtins import str
2603 from builtins import zip
2604 from builtins import map
2605 from builtins import range
2606 from builtins import object
2607
2608-import types
2609 import sys
2610
2611 from duplicity import log
2612@@ -37,7 +34,7 @@
2613 from duplicity import path
2614 from duplicity import util
2615 from duplicity import dup_time
2616-from duplicity import globals
2617+from duplicity import config
2618 from duplicity import manifest
2619 from duplicity import util
2620 from duplicity.gpg import GPGError
2621@@ -153,7 +150,7 @@
2622 self.remote_manifest_name = remote_filename
2623
2624 if self.action != u"replicate":
2625- local_filename_list = globals.archive_dir_path.listdir()
2626+ local_filename_list = config.archive_dir_path.listdir()
2627 else:
2628 local_filename_list = []
2629 for local_filename in local_filename_list:
2630@@ -163,7 +160,7 @@
2631 pr.start_time == self.start_time and
2632 pr.end_time == self.end_time):
2633 self.local_manifest_path = \
2634- globals.archive_dir_path.append(local_filename)
2635+ config.archive_dir_path.append(local_filename)
2636
2637 self.set_files_changed()
2638 break
2639@@ -180,7 +177,7 @@
2640 log.Debug(_(u"BackupSet.delete: missing %s") % [util.fsdecode(f) for f in rfn])
2641 pass
2642 if self.action != u"replicate":
2643- local_filename_list = globals.archive_dir_path.listdir()
2644+ local_filename_list = config.archive_dir_path.listdir()
2645 else:
2646 local_filename_list = []
2647 for lfn in local_filename_list:
2648@@ -189,7 +186,7 @@
2649 pr.start_time == self.start_time and
2650 pr.end_time == self.end_time):
2651 try:
2652- globals.archive_dir_path.append(lfn).delete()
2653+ config.archive_dir_path.append(lfn).delete()
2654 except Exception:
2655 log.Debug(_(u"BackupSet.delete: missing %s") % [util.fsdecode(f) for f in lfn])
2656 pass
2657@@ -415,16 +412,16 @@
2658 l = []
2659 for s in self.get_all_sets():
2660 if s.time:
2661- type = u"full"
2662+ btype = u"full"
2663 time = s.time
2664 else:
2665- type = u"inc"
2666+ btype = u"inc"
2667 time = s.end_time
2668 if s.encrypted:
2669 enc = u"enc"
2670 else:
2671 enc = u"noenc"
2672- l.append(u"%s%s %s %d %s" % (prefix, type, dup_time.timetostring(time), (len(s)), enc))
2673+ l.append(u"%s%s %s %d %s" % (prefix, btype, dup_time.timetostring(time), (len(s)), enc))
2674 return l
2675
2676 def __str__(self):
2677@@ -443,12 +440,12 @@
2678
2679 for s in self.get_all_sets():
2680 if s.time:
2681- type = _(u"Full")
2682+ btype = _(u"Full")
2683 time = s.time
2684 else:
2685- type = _(u"Incremental")
2686+ btype = _(u"Incremental")
2687 time = s.end_time
2688- l.append(set_schema % (type, dup_time.timetopretty(time), len(s)))
2689+ l.append(set_schema % (btype, dup_time.timetopretty(time), len(s)))
2690
2691 l.append(u"-------------------------")
2692 return u"\n".join(l)
2693@@ -484,14 +481,14 @@
2694 Return new SignatureChain.
2695
2696 local should be true iff the signature chain resides in
2697- globals.archive_dir_path and false if the chain is in
2698- globals.backend.
2699+ config.archive_dir_path and false if the chain is in
2700+ config.backend.
2701
2702- @param local: True if sig chain in globals.archive_dir_path
2703+ @param local: True if sig chain in config.archive_dir_path
2704 @type local: Boolean
2705
2706 @param location: Where the sig chain is located
2707- @type location: globals.archive_dir_path or globals.backend
2708+ @type location: config.archive_dir_path or config.backend
2709 """
2710 if local:
2711 self.archive_dir_path, self.backend = location, None
2712@@ -855,7 +852,7 @@
2713 Try adding filename to existing sets, or make new one
2714 """
2715 pr = file_naming.parse(filename)
2716- for set in sets:
2717+ for set in sets: # pylint: disable=redefined-builtin
2718 if set.add_filename(filename, pr):
2719 log.Debug(_(u"File %s is part of known set") % (util.fsdecode(filename),))
2720 break
2721@@ -873,7 +870,7 @@
2722
2723 chains, orphaned_sets = [], []
2724
2725- def add_to_chains(set):
2726+ def add_to_chains(set): # pylint: disable=redefined-builtin
2727 u"""
2728 Try adding set to existing chains, or make new one
2729 """
2730@@ -901,7 +898,7 @@
2731 Sort set list by end time, return (sorted list, incomplete)
2732 """
2733 time_set_pairs, incomplete_sets = [], []
2734- for set in set_list:
2735+ for set in set_list: # pylint: disable=redefined-builtin
2736 if not set.is_complete():
2737 incomplete_sets.append(set)
2738 elif set.type == u"full":
2739@@ -1154,7 +1151,7 @@
2740 if len(self.all_backup_chains) < n:
2741 return None
2742
2743- sorted = self.all_backup_chains[:]
2744+ sorted = self.all_backup_chains[:] # pylint: disable=redefined-builtin
2745 sorted.sort(key=lambda x: x.get_first().time)
2746
2747 sorted.reverse()
2748@@ -1234,7 +1231,7 @@
2749 backup_type = s[0]
2750 backup_set = s[1]
2751 if backup_set.time:
2752- type = _(u"Full")
2753+ type = _(u"Full") # pylint: disable=redefined-builtin
2754 else:
2755 type = _(u"Incremental")
2756 l.append(set_schema % (type, dup_time.timetopretty(backup_set.get_time()), backup_type.title()))
2757
2758=== modified file 'duplicity/dup_main.py'
2759--- duplicity/dup_main.py 2020-02-12 19:33:59 +0000
2760+++ duplicity/dup_main.py 2020-03-22 12:35:54 +0000
2761@@ -1,4 +1,4 @@
2762-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
2763+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
2764 #
2765 # duplicity -- Encrypted bandwidth efficient backup
2766 #
2767@@ -28,7 +28,6 @@
2768 from __future__ import print_function
2769 from future import standard_library
2770 standard_library.install_aliases()
2771-from builtins import filter
2772 from builtins import map
2773 from builtins import next
2774 from builtins import object
2775@@ -36,18 +35,11 @@
2776
2777 import copy
2778 import fasteners
2779-import gzip
2780 import os
2781 import platform
2782-import re
2783 import resource
2784 import sys
2785 import time
2786-import traceback
2787-import types
2788-
2789-from datetime import datetime
2790-from os import statvfs
2791
2792 from duplicity import __version__
2793 from duplicity import asyncscheduler
2794@@ -56,16 +48,14 @@
2795 from duplicity import dup_collections
2796 from duplicity import dup_temp
2797 from duplicity import dup_time
2798-from duplicity import errors
2799 from duplicity import file_naming
2800-from duplicity import globals
2801+from duplicity import config
2802 from duplicity import gpg
2803 from duplicity import log
2804 from duplicity import manifest
2805 from duplicity import patchdir
2806 from duplicity import path
2807 from duplicity import progress
2808-from duplicity import robust
2809 from duplicity import tempdir
2810 from duplicity import util
2811
2812@@ -115,15 +105,15 @@
2813 # check if we can reuse an already set (signing_)passphrase
2814 # if signing key is also an encryption key assume that the passphrase is identical
2815 if (for_signing and
2816- (globals.gpg_profile.sign_key in globals.gpg_profile.recipients or
2817- globals.gpg_profile.sign_key in globals.gpg_profile.hidden_recipients) and
2818+ (config.gpg_profile.sign_key in config.gpg_profile.recipients or
2819+ config.gpg_profile.sign_key in config.gpg_profile.hidden_recipients) and
2820 u'PASSPHRASE' in os.environ): # noqa
2821 log.Notice(_(u"Reuse configured PASSPHRASE as SIGN_PASSPHRASE"))
2822 return os.environ[u'PASSPHRASE']
2823 # if one encryption key is also the signing key assume that the passphrase is identical
2824 if (not for_signing and
2825- (globals.gpg_profile.sign_key in globals.gpg_profile.recipients or
2826- globals.gpg_profile.sign_key in globals.gpg_profile.hidden_recipients) and
2827+ (config.gpg_profile.sign_key in config.gpg_profile.recipients or
2828+ config.gpg_profile.sign_key in config.gpg_profile.hidden_recipients) and
2829 u'SIGN_PASSPHRASE' in os.environ): # noqa
2830 log.Notice(_(u"Reuse configured SIGN_PASSPHRASE as PASSPHRASE"))
2831 return os.environ[u'SIGN_PASSPHRASE']
2832@@ -136,7 +126,7 @@
2833 # - gpg-agent supplies all, no user interaction
2834
2835 # no passphrase if --no-encryption or --use-agent
2836- if not globals.encryption or globals.use_agent:
2837+ if not config.encryption or config.use_agent:
2838 return u""
2839
2840 # these commands don't need a password
2841@@ -151,15 +141,15 @@
2842 # for a full backup, we don't need a password if
2843 # there is no sign_key and there are recipients
2844 elif (action == u"full" and
2845- (globals.gpg_profile.recipients or globals.gpg_profile.hidden_recipients) and not
2846- globals.gpg_profile.sign_key):
2847+ (config.gpg_profile.recipients or config.gpg_profile.hidden_recipients) and not
2848+ config.gpg_profile.sign_key):
2849 return u""
2850
2851 # for an inc backup, we don't need a password if
2852 # there is no sign_key and there are recipients
2853 elif (action == u"inc" and
2854- (globals.gpg_profile.recipients or globals.gpg_profile.hidden_recipients) and not
2855- globals.gpg_profile.sign_key):
2856+ (config.gpg_profile.recipients or config.gpg_profile.hidden_recipients) and not
2857+ config.gpg_profile.sign_key):
2858 return u""
2859
2860 # Finally, ask the user for the passphrase
2861@@ -171,18 +161,18 @@
2862 # if the user made a typo in the first passphrase
2863 if use_cache and n == 2:
2864 if for_signing:
2865- pass1 = globals.gpg_profile.signing_passphrase
2866+ pass1 = config.gpg_profile.signing_passphrase
2867 else:
2868- pass1 = globals.gpg_profile.passphrase
2869+ pass1 = config.gpg_profile.passphrase
2870 else:
2871 if for_signing:
2872- if use_cache and globals.gpg_profile.signing_passphrase:
2873- pass1 = globals.gpg_profile.signing_passphrase
2874+ if use_cache and config.gpg_profile.signing_passphrase:
2875+ pass1 = config.gpg_profile.signing_passphrase
2876 else:
2877 pass1 = getpass_safe(_(u"GnuPG passphrase for signing key:") + u" ")
2878 else:
2879- if use_cache and globals.gpg_profile.passphrase:
2880- pass1 = globals.gpg_profile.passphrase
2881+ if use_cache and config.gpg_profile.passphrase:
2882+ pass1 = config.gpg_profile.passphrase
2883 else:
2884 pass1 = getpass_safe(_(u"GnuPG passphrase:") + u" ")
2885
2886@@ -199,8 +189,8 @@
2887 use_cache = False
2888 continue
2889
2890- if not pass1 and not (globals.gpg_profile.recipients or
2891- globals.gpg_profile.hidden_recipients) and not for_signing:
2892+ if not pass1 and not (config.gpg_profile.recipients or
2893+ config.gpg_profile.hidden_recipients) and not for_signing:
2894 log.Log(_(u"Cannot use empty passphrase with symmetric encryption! Please try again."),
2895 log.WARNING, force_print=True)
2896 use_cache = False
2897@@ -242,8 +232,8 @@
2898 @rtype: int
2899 @return: constant 0 (zero)
2900 """
2901- last_index = globals.restart.last_index
2902- last_block = globals.restart.last_block
2903+ last_index = config.restart.last_index
2904+ last_block = config.restart.last_block
2905 try:
2906 # Just spin our wheels
2907 iter_result = next(tarblock_iter)
2908@@ -326,7 +316,7 @@
2909 instead of copying.
2910 """
2911 putsize = tdp.getsize()
2912- if globals.skip_volume != vol_num: # for testing purposes only
2913+ if config.skip_volume != vol_num: # for testing purposes only
2914 backend.put(tdp, dest_filename)
2915 validate_block(putsize, dest_filename)
2916 if tdp.stat:
2917@@ -345,45 +335,45 @@
2918 from encrypted to non in the middle of a backup chain), so we check
2919 that the vol1 filename on the server matches the settings of this run.
2920 """
2921- if ((globals.gpg_profile.recipients or globals.gpg_profile.hidden_recipients) and
2922- not globals.gpg_profile.sign_key):
2923+ if ((config.gpg_profile.recipients or config.gpg_profile.hidden_recipients) and
2924+ not config.gpg_profile.sign_key):
2925 # When using gpg encryption without a signing key, we skip this validation
2926 # step to ensure that we can still backup without needing the secret key
2927 # on the machine.
2928 return
2929
2930 vol1_filename = file_naming.get(backup_type, 1,
2931- encrypted=globals.encryption,
2932- gzipped=globals.compression)
2933+ encrypted=config.encryption,
2934+ gzipped=config.compression)
2935 if vol1_filename != backup_set.volume_name_dict[1]:
2936 log.FatalError(_(u"Restarting backup, but current encryption "
2937 u"settings do not match original settings"),
2938 log.ErrorCode.enryption_mismatch)
2939
2940 # Settings are same, let's check passphrase itself if we are encrypted
2941- if globals.encryption:
2942- fileobj = restore_get_enc_fileobj(globals.backend, vol1_filename,
2943+ if config.encryption:
2944+ fileobj = restore_get_enc_fileobj(config.backend, vol1_filename,
2945 manifest.volume_info_dict[1])
2946 fileobj.close()
2947
2948- if not globals.restart:
2949+ if not config.restart:
2950 # normal backup start
2951 vol_num = 0
2952 mf = manifest.Manifest(fh=man_outfp)
2953 mf.set_dirinfo()
2954 else:
2955 # restart from last known position
2956- mf = globals.restart.last_backup.get_local_manifest()
2957- globals.restart.checkManifest(mf)
2958- globals.restart.setLastSaved(mf)
2959- validate_encryption_settings(globals.restart.last_backup, mf)
2960+ mf = config.restart.last_backup.get_local_manifest()
2961+ config.restart.checkManifest(mf)
2962+ config.restart.setLastSaved(mf)
2963+ validate_encryption_settings(config.restart.last_backup, mf)
2964 mf.fh = man_outfp
2965- last_block = globals.restart.last_block
2966+ last_block = config.restart.last_block
2967 log.Notice(_(u"Restarting after volume %s, file %s, block %s") %
2968- (globals.restart.start_vol,
2969- util.uindex(globals.restart.last_index),
2970- globals.restart.last_block))
2971- vol_num = globals.restart.start_vol
2972+ (config.restart.start_vol,
2973+ util.uindex(config.restart.last_index),
2974+ config.restart.last_block))
2975+ vol_num = config.restart.start_vol
2976 restart_position_iterator(tarblock_iter)
2977
2978 at_end = 0
2979@@ -391,7 +381,7 @@
2980
2981 # If --progress option is given, initiate a background thread that will
2982 # periodically report progress to the Log.
2983- if globals.progress:
2984+ if config.progress:
2985 progress.tracker.set_start_volume(vol_num + 1)
2986 progress.progress_thread.start()
2987
2988@@ -403,9 +393,9 @@
2989 # is an assert put in place to avoid someone accidentally
2990 # enabling concurrency above 1, before adequate work has been
2991 # done on the backends to make them support concurrency.
2992- assert globals.async_concurrency <= 1
2993+ assert config.async_concurrency <= 1
2994
2995- io_scheduler = asyncscheduler.AsyncScheduler(globals.async_concurrency)
2996+ io_scheduler = asyncscheduler.AsyncScheduler(config.async_concurrency)
2997 async_waiters = []
2998
2999 while not at_end:
3000@@ -415,18 +405,18 @@
3001 # Create volume
3002 vol_num += 1
3003 dest_filename = file_naming.get(backup_type, vol_num,
3004- encrypted=globals.encryption,
3005- gzipped=globals.compression)
3006+ encrypted=config.encryption,
3007+ gzipped=config.compression)
3008 tdp = dup_temp.new_tempduppath(file_naming.parse(dest_filename))
3009
3010 # write volume
3011- if globals.encryption:
3012- at_end = gpg.GPGWriteFile(tarblock_iter, tdp.name, globals.gpg_profile,
3013- globals.volsize)
3014- elif globals.compression:
3015- at_end = gpg.GzipWriteFile(tarblock_iter, tdp.name, globals.volsize)
3016+ if config.encryption:
3017+ at_end = gpg.GPGWriteFile(tarblock_iter, tdp.name, config.gpg_profile,
3018+ config.volsize)
3019+ elif config.compression:
3020+ at_end = gpg.GzipWriteFile(tarblock_iter, tdp.name, config.volsize)
3021 else:
3022- at_end = gpg.PlainWriteFile(tarblock_iter, tdp.name, globals.volsize)
3023+ at_end = gpg.PlainWriteFile(tarblock_iter, tdp.name, config.volsize)
3024 tdp.setdata()
3025
3026 # Add volume information to manifest
3027@@ -452,11 +442,11 @@
3028 log.Progress(_(u'Processed volume %d') % vol_num, diffdir.stats.SourceFileSize)
3029 # Snapshot (serialize) progress now as a Volume has been completed.
3030 # This is always the last restore point when it comes to restart a failed backup
3031- if globals.progress:
3032+ if config.progress:
3033 progress.tracker.snapshot_progress(vol_num)
3034
3035 # for testing purposes only - assert on inc or full
3036- assert globals.fail_on_volume != vol_num, u"Forced assertion for testing at volume %d" % vol_num
3037+ assert config.fail_on_volume != vol_num, u"Forced assertion for testing at volume %d" % vol_num
3038
3039 # Collect byte count from all asynchronous jobs; also implicitly waits
3040 # for them all to complete.
3041@@ -474,7 +464,7 @@
3042 u"""
3043 Return a fileobj opened for writing, save results as manifest
3044
3045- Save manifest in globals.archive_dir_path gzipped.
3046+ Save manifest in config.archive_dir_path gzipped.
3047 Save them on the backend encrypted as needed.
3048
3049 @type man_type: string
3050@@ -492,9 +482,9 @@
3051 manifest=True)
3052 remote_man_filename = file_naming.get(backup_type,
3053 manifest=True,
3054- encrypted=globals.encryption)
3055+ encrypted=config.encryption)
3056
3057- fh = dup_temp.get_fileobj_duppath(globals.archive_dir_path,
3058+ fh = dup_temp.get_fileobj_duppath(config.archive_dir_path,
3059 part_man_filename,
3060 perm_man_filename,
3061 remote_man_filename)
3062@@ -505,7 +495,7 @@
3063 u"""
3064 Return a fileobj opened for writing, save results as signature
3065
3066- Save signatures in globals.archive_dir gzipped.
3067+ Save signatures in config.archive_dir gzipped.
3068 Save them on the backend encrypted as needed.
3069
3070 @type sig_type: string
3071@@ -521,10 +511,10 @@
3072 partial=True)
3073 perm_sig_filename = file_naming.get(sig_type,
3074 gzipped=True)
3075- remote_sig_filename = file_naming.get(sig_type, encrypted=globals.encryption,
3076- gzipped=globals.compression)
3077+ remote_sig_filename = file_naming.get(sig_type, encrypted=config.encryption,
3078+ gzipped=config.compression)
3079
3080- fh = dup_temp.get_fileobj_duppath(globals.archive_dir_path,
3081+ fh = dup_temp.get_fileobj_duppath(config.archive_dir_path,
3082 part_sig_filename,
3083 perm_sig_filename,
3084 remote_sig_filename,
3085@@ -542,30 +532,30 @@
3086 @rtype: void
3087 @return: void
3088 """
3089- if globals.progress:
3090+ if config.progress:
3091 progress.tracker = progress.ProgressTracker()
3092 # Fake a backup to compute total of moving bytes
3093- tarblock_iter = diffdir.DirFull(globals.select)
3094+ tarblock_iter = diffdir.DirFull(config.select)
3095 dummy_backup(tarblock_iter)
3096 # Store computed stats to compute progress later
3097 progress.tracker.set_evidence(diffdir.stats, True)
3098- # Reinit the globals.select iterator, so
3099+ # Reinit the config.select iterator, so
3100 # the core of duplicity can rescan the paths
3101 commandline.set_selection()
3102 progress.progress_thread = progress.LogProgressThread()
3103
3104- if globals.dry_run:
3105- tarblock_iter = diffdir.DirFull(globals.select)
3106+ if config.dry_run:
3107+ tarblock_iter = diffdir.DirFull(config.select)
3108 bytes_written = dummy_backup(tarblock_iter)
3109 col_stats.set_values(sig_chain_warning=None)
3110 else:
3111 sig_outfp = get_sig_fileobj(u"full-sig")
3112 man_outfp = get_man_fileobj(u"full")
3113- tarblock_iter = diffdir.DirFull_WriteSig(globals.select,
3114+ tarblock_iter = diffdir.DirFull_WriteSig(config.select,
3115 sig_outfp)
3116 bytes_written = write_multivol(u"full", tarblock_iter,
3117 man_outfp, sig_outfp,
3118- globals.backend)
3119+ config.backend)
3120
3121 # close sig file, send to remote, and rename to final
3122 sig_outfp.close()
3123@@ -577,7 +567,7 @@
3124 man_outfp.to_remote()
3125 man_outfp.to_final()
3126
3127- if globals.progress:
3128+ if config.progress:
3129 # Terminate the background thread now, if any
3130 progress.progress_thread.finished = True
3131 progress.progress_thread.join()
3132@@ -598,7 +588,7 @@
3133 @param col_stats: collection status
3134 """
3135 if not col_stats.matched_chain_pair:
3136- if globals.incremental:
3137+ if config.incremental:
3138 log.FatalError(_(u"Fatal Error: Unable to start incremental backup. "
3139 u"Old signatures not found and incremental specified"),
3140 log.ErrorCode.inc_without_sigs)
3141@@ -608,14 +598,14 @@
3142 return col_stats.matched_chain_pair[0]
3143
3144
3145-def print_statistics(stats, bytes_written):
3146+def print_statistics(stats, bytes_written): # pylint: disable=unused-argument
3147 u"""
3148- If globals.print_statistics, print stats after adding bytes_written
3149+ If config.print_statistics, print stats after adding bytes_written
3150
3151 @rtype: void
3152 @return: void
3153 """
3154- if globals.print_statistics:
3155+ if config.print_statistics:
3156 diffdir.stats.TotalDestinationSizeChange = bytes_written
3157 logstring = diffdir.stats.get_stats_logstring(_(u"Backup Statistics"))
3158 log.Log(logstring, log.NOTICE, force_print=True)
3159@@ -628,7 +618,7 @@
3160 @rtype: void
3161 @return: void
3162 """
3163- if not globals.restart:
3164+ if not config.restart:
3165 dup_time.setprevtime(sig_chain.end_time)
3166 if dup_time.curtime == dup_time.prevtime:
3167 time.sleep(2)
3168@@ -636,32 +626,32 @@
3169 assert dup_time.curtime != dup_time.prevtime, \
3170 u"time not moving forward at appropriate pace - system clock issues?"
3171
3172- if globals.progress:
3173+ if config.progress:
3174 progress.tracker = progress.ProgressTracker()
3175 # Fake a backup to compute total of moving bytes
3176- tarblock_iter = diffdir.DirDelta(globals.select,
3177+ tarblock_iter = diffdir.DirDelta(config.select,
3178 sig_chain.get_fileobjs())
3179 dummy_backup(tarblock_iter)
3180 # Store computed stats to compute progress later
3181 progress.tracker.set_evidence(diffdir.stats, False)
3182- # Reinit the globals.select iterator, so
3183+ # Reinit the config.select iterator, so
3184 # the core of duplicity can rescan the paths
3185 commandline.set_selection()
3186 progress.progress_thread = progress.LogProgressThread()
3187
3188- if globals.dry_run:
3189- tarblock_iter = diffdir.DirDelta(globals.select,
3190+ if config.dry_run:
3191+ tarblock_iter = diffdir.DirDelta(config.select,
3192 sig_chain.get_fileobjs())
3193 bytes_written = dummy_backup(tarblock_iter)
3194 else:
3195 new_sig_outfp = get_sig_fileobj(u"new-sig")
3196 new_man_outfp = get_man_fileobj(u"inc")
3197- tarblock_iter = diffdir.DirDelta_WriteSig(globals.select,
3198+ tarblock_iter = diffdir.DirDelta_WriteSig(config.select,
3199 sig_chain.get_fileobjs(),
3200 new_sig_outfp)
3201 bytes_written = write_multivol(u"inc", tarblock_iter,
3202 new_man_outfp, new_sig_outfp,
3203- globals.backend)
3204+ config.backend)
3205
3206 # close sig file and rename to final
3207 new_sig_outfp.close()
3208@@ -673,7 +663,7 @@
3209 new_man_outfp.to_remote()
3210 new_man_outfp.to_final()
3211
3212- if globals.progress:
3213+ if config.progress:
3214 # Terminate the background thread now, if any
3215 progress.progress_thread.finished = True
3216 progress.progress_thread.join()
3217@@ -694,7 +684,7 @@
3218 @rtype: void
3219 @return: void
3220 """
3221- time = globals.restore_time or dup_time.curtime
3222+ time = config.restore_time or dup_time.curtime
3223 sig_chain = col_stats.get_signature_chain_at_time(time)
3224 path_iter = diffdir.get_combined_path_iter(sig_chain.get_fileobjs(time))
3225 for path in path_iter:
3226@@ -710,7 +700,7 @@
3227
3228 def restore(col_stats):
3229 u"""
3230- Restore archive in globals.backend to globals.local_path
3231+ Restore archive in config.backend to config.local_path
3232
3233 @type col_stats: CollectionStatus object
3234 @param col_stats: collection status
3235@@ -718,13 +708,13 @@
3236 @rtype: void
3237 @return: void
3238 """
3239- if globals.dry_run:
3240+ if config.dry_run:
3241 return
3242- if not patchdir.Write_ROPaths(globals.local_path,
3243+ if not patchdir.Write_ROPaths(config.local_path,
3244 restore_get_patched_rop_iter(col_stats)):
3245- if globals.restore_dir:
3246+ if config.restore_dir:
3247 log.FatalError(_(u"%s not found in archive - no files restored.")
3248- % (util.fsdecode(globals.restore_dir)),
3249+ % (util.fsdecode(config.restore_dir)),
3250 log.ErrorCode.restore_dir_not_found)
3251 else:
3252 log.FatalError(_(u"No files found in archive - nothing restored."),
3253@@ -738,11 +728,11 @@
3254 @type col_stats: CollectionStatus object
3255 @param col_stats: collection status
3256 """
3257- if globals.restore_dir:
3258- index = tuple(globals.restore_dir.split(b"/"))
3259+ if config.restore_dir:
3260+ index = tuple(config.restore_dir.split(b"/"))
3261 else:
3262 index = ()
3263- time = globals.restore_time or dup_time.curtime
3264+ time = config.restore_time or dup_time.curtime
3265 backup_chain = col_stats.get_backup_chain_at_time(time)
3266 assert backup_chain, col_stats.all_backup_chains
3267 backup_setlist = backup_chain.get_sets_at_time(time)
3268@@ -763,14 +753,14 @@
3269 log.Progress(_(u'Processed volume %d of %d') % (cur_vol[0], num_vols),
3270 cur_vol[0], num_vols)
3271
3272- if hasattr(globals.backend, u'pre_process_download'):
3273+ if hasattr(config.backend, u'pre_process_download'):
3274 file_names = []
3275 for backup_set in backup_setlist:
3276 manifest = backup_set.get_manifest()
3277 volumes = manifest.get_containing_volumes(index)
3278 for vol_num in volumes:
3279 file_names.append(backup_set.volume_name_dict[vol_num])
3280- globals.backend.pre_process_download(file_names)
3281+ config.backend.pre_process_download(file_names)
3282
3283 fileobj_iters = list(map(get_fileobj_iter, backup_setlist))
3284 tarfiles = list(map(patchdir.TarFile_FromFileobjs, fileobj_iters))
3285@@ -782,7 +772,7 @@
3286 Return plaintext fileobj from encrypted filename on backend
3287
3288 If volume_info is set, the hash of the file will be checked,
3289- assuming some hash is available. Also, if globals.sign_key is
3290+ assuming some hash is available. Also, if config.sign_key is
3291 set, a fatal error will be raised if file not signed by sign_key.
3292
3293 """
3294@@ -802,7 +792,7 @@
3295 log.ErrorCode.mismatched_hash)
3296
3297 fileobj = tdp.filtered_open_with_delete(u"rb")
3298- if parseresults.encrypted and globals.gpg_profile.sign_key:
3299+ if parseresults.encrypted and config.gpg_profile.sign_key:
3300 restore_add_sig_check(fileobj)
3301 return fileobj
3302
3303@@ -837,7 +827,7 @@
3304 u"""Thunk run when closing volume file"""
3305 actual_sig = fileobj.fileobj.get_signature()
3306 actual_sig = u"None" if actual_sig is None else actual_sig
3307- sign_key = globals.gpg_profile.sign_key
3308+ sign_key = config.gpg_profile.sign_key
3309 sign_key = u"None" if sign_key is None else sign_key
3310 ofs = -min(len(actual_sig), len(sign_key))
3311 if actual_sig[ofs:] != sign_key[ofs:]:
3312@@ -860,7 +850,7 @@
3313 """
3314 global exit_val
3315 collated = diffdir.collate2iters(restore_get_patched_rop_iter(col_stats),
3316- globals.select)
3317+ config.select)
3318 diff_count = 0
3319 total_count = 0
3320 for backup_ropath, current_path in collated:
3321@@ -868,7 +858,7 @@
3322 backup_ropath = path.ROPath(current_path.index)
3323 if not current_path:
3324 current_path = path.ROPath(backup_ropath.index)
3325- if not backup_ropath.compare_verbose(current_path, globals.compare_data):
3326+ if not backup_ropath.compare_verbose(current_path, config.compare_data):
3327 diff_count += 1
3328 total_count += 1
3329 # Unfortunately, ngettext doesn't handle multiple number variables, so we
3330@@ -899,15 +889,15 @@
3331 return
3332
3333 filestr = u"\n".join(map(util.fsdecode, extraneous))
3334- if globals.force:
3335+ if config.force:
3336 log.Notice(ngettext(u"Deleting this file from backend:",
3337 u"Deleting these files from backend:",
3338 len(extraneous)) + u"\n" + filestr)
3339- if not globals.dry_run:
3340+ if not config.dry_run:
3341 col_stats.backend.delete(ext_remote)
3342 for fn in ext_local:
3343 try:
3344- globals.archive_dir_path.append(fn).delete()
3345+ config.archive_dir_path.append(fn).delete()
3346 except Exception:
3347 pass
3348 else:
3349@@ -927,16 +917,16 @@
3350 @rtype: void
3351 @return: void
3352 """
3353- assert globals.keep_chains is not None
3354+ assert config.keep_chains is not None
3355
3356- globals.remove_time = col_stats.get_nth_last_full_backup_time(globals.keep_chains)
3357+ config.remove_time = col_stats.get_nth_last_full_backup_time(config.keep_chains)
3358
3359 remove_old(col_stats)
3360
3361
3362 def remove_old(col_stats):
3363 u"""
3364- Remove backup files older than globals.remove_time from backend
3365+ Remove backup files older than config.remove_time from backend
3366
3367 @type col_stats: CollectionStatus object
3368 @param col_stats: collection status
3369@@ -944,7 +934,7 @@
3370 @rtype: void
3371 @return: void
3372 """
3373- assert globals.remove_time is not None
3374+ assert config.remove_time is not None
3375
3376 def set_times_str(setlist):
3377 u"""Return string listing times of sets in setlist"""
3378@@ -954,7 +944,7 @@
3379 u"""Return string listing times of chains in chainlist"""
3380 return u"\n".join([dup_time.timetopretty(s.end_time) for s in chainlist])
3381
3382- req_list = col_stats.get_older_than_required(globals.remove_time)
3383+ req_list = col_stats.get_older_than_required(config.remove_time)
3384 if req_list:
3385 log.Warn(u"%s\n%s\n%s" %
3386 (_(u"There are backup set(s) at time(s):"),
3387@@ -962,14 +952,14 @@
3388 _(u"Which can't be deleted because newer sets depend on them.")))
3389
3390 if (col_stats.matched_chain_pair and
3391- col_stats.matched_chain_pair[1].end_time < globals.remove_time):
3392+ col_stats.matched_chain_pair[1].end_time < config.remove_time):
3393 log.Warn(_(u"Current active backup chain is older than specified time. "
3394 u"However, it will not be deleted. To remove all your backups, "
3395 u"manually purge the repository."))
3396
3397- chainlist = col_stats.get_chains_older_than(globals.remove_time)
3398+ chainlist = col_stats.get_chains_older_than(config.remove_time)
3399
3400- if globals.remove_all_inc_of_but_n_full_mode:
3401+ if config.remove_all_inc_of_but_n_full_mode:
3402 # ignore chains without incremental backups:
3403 chainlist = list(x for x in chainlist if
3404 (isinstance(x, dup_collections.SignatureChain) and x.inclist) or
3405@@ -978,18 +968,18 @@
3406 if not chainlist:
3407 log.Notice(_(u"No old backup sets found, nothing deleted."))
3408 return
3409- if globals.force:
3410+ if config.force:
3411 log.Notice(ngettext(u"Deleting backup chain at time:",
3412 u"Deleting backup chains at times:",
3413 len(chainlist)) +
3414 u"\n" + chain_times_str(chainlist))
3415 # Add signature files too, since they won't be needed anymore
3416- chainlist += col_stats.get_signature_chains_older_than(globals.remove_time)
3417+ chainlist += col_stats.get_signature_chains_older_than(config.remove_time)
3418 chainlist.reverse() # save oldest for last
3419 for chain in chainlist:
3420 # if remove_all_inc_of_but_n_full_mode mode, remove only
3421 # incrementals one and not full
3422- if globals.remove_all_inc_of_but_n_full_mode:
3423+ if config.remove_all_inc_of_but_n_full_mode:
3424 if isinstance(chain, dup_collections.SignatureChain):
3425 chain_desc = _(u"Deleting any incremental signature chain rooted at %s")
3426 else:
3427@@ -1000,8 +990,8 @@
3428 else:
3429 chain_desc = _(u"Deleting complete backup chain %s")
3430 log.Notice(chain_desc % dup_time.timetopretty(chain.end_time))
3431- if not globals.dry_run:
3432- chain.delete(keep_full=globals.remove_all_inc_of_but_n_full_mode)
3433+ if not config.dry_run:
3434+ chain.delete(keep_full=config.remove_all_inc_of_but_n_full_mode)
3435 col_stats.set_values(sig_chain_warning=None)
3436 else:
3437 log.Notice(ngettext(u"Found old backup chain at the following time:",
3438@@ -1019,12 +1009,12 @@
3439 @return: void
3440 """
3441 action = u"replicate"
3442- time = globals.restore_time or dup_time.curtime
3443- src_stats = dup_collections.CollectionsStatus(globals.src_backend, None, action).set_values(sig_chain_warning=None)
3444- tgt_stats = dup_collections.CollectionsStatus(globals.backend, None, action).set_values(sig_chain_warning=None)
3445+ time = config.restore_time or dup_time.curtime
3446+ src_stats = dup_collections.CollectionsStatus(config.src_backend, None, action).set_values(sig_chain_warning=None)
3447+ tgt_stats = dup_collections.CollectionsStatus(config.backend, None, action).set_values(sig_chain_warning=None)
3448
3449- src_list = globals.src_backend.list()
3450- tgt_list = globals.backend.list()
3451+ src_list = config.src_backend.list()
3452+ tgt_list = config.backend.list()
3453
3454 src_chainlist = src_stats.get_signature_chains(local=False, filelist=src_list)[0]
3455 tgt_chainlist = tgt_stats.get_signature_chains(local=False, filelist=tgt_list)[0]
3456@@ -1054,14 +1044,14 @@
3457 dup_time.setprevtime(src_sig.start_time)
3458 dup_time.setcurtime(src_sig.time or src_sig.end_time)
3459 log.Notice(_(u"Replicating %s.") % (src_sig_filename,))
3460- fileobj = globals.src_backend.get_fileobj_read(src_sig_filename)
3461- filename = file_naming.get(src_sig.type, encrypted=globals.encryption, gzipped=globals.compression)
3462+ fileobj = config.src_backend.get_fileobj_read(src_sig_filename)
3463+ filename = file_naming.get(src_sig.type, encrypted=config.encryption, gzipped=config.compression)
3464 tdp = dup_temp.new_tempduppath(file_naming.parse(filename))
3465 tmpobj = tdp.filtered_open(mode=u'wb')
3466 util.copyfileobj(fileobj, tmpobj) # decrypt, compress, (re)-encrypt
3467 fileobj.close()
3468 tmpobj.close()
3469- globals.backend.put(tdp, filename)
3470+ config.backend.put(tdp, filename)
3471 tdp.delete()
3472
3473 src_chainlist = src_stats.get_backup_chains(filename_list=src_list)[0]
3474@@ -1093,14 +1083,14 @@
3475 mf = manifest.Manifest(fh=mf_tdp.filtered_open(mode=u'wb'))
3476 for i, filename in list(src_set.volume_name_dict.items()):
3477 log.Notice(_(u"Replicating %s.") % (filename,))
3478- fileobj = restore_get_enc_fileobj(globals.src_backend, filename, rmf.volume_info_dict[i])
3479- filename = file_naming.get(src_set.type, i, encrypted=globals.encryption, gzipped=globals.compression)
3480+ fileobj = restore_get_enc_fileobj(config.src_backend, filename, rmf.volume_info_dict[i])
3481+ filename = file_naming.get(src_set.type, i, encrypted=config.encryption, gzipped=config.compression)
3482 tdp = dup_temp.new_tempduppath(file_naming.parse(filename))
3483 tmpobj = tdp.filtered_open(mode=u'wb')
3484 util.copyfileobj(fileobj, tmpobj) # decrypt, compress, (re)-encrypt
3485 fileobj.close()
3486 tmpobj.close()
3487- globals.backend.put(tdp, filename)
3488+ config.backend.put(tdp, filename)
3489
3490 vi = copy.copy(rmf.volume_info_dict[i])
3491 vi.set_hash(u"SHA1", gpg.get_hash(u"SHA1", tdp))
3492@@ -1113,18 +1103,18 @@
3493 mf_fileobj = mf_tdp.filtered_open_with_delete(mode=u'rb')
3494 mf_final_filename = file_naming.get(src_set.type,
3495 manifest=True,
3496- encrypted=globals.encryption,
3497- gzipped=globals.compression)
3498+ encrypted=config.encryption,
3499+ gzipped=config.compression)
3500 mf_final_tdp = dup_temp.new_tempduppath(file_naming.parse(mf_final_filename))
3501 mf_final_fileobj = mf_final_tdp.filtered_open(mode=u'wb')
3502 util.copyfileobj(mf_fileobj, mf_final_fileobj) # compress, encrypt
3503 mf_fileobj.close()
3504 mf_final_fileobj.close()
3505- globals.backend.put(mf_final_tdp, mf_final_filename)
3506+ config.backend.put(mf_final_tdp, mf_final_filename)
3507 mf_final_tdp.delete()
3508
3509- globals.src_backend.close()
3510- globals.backend.close()
3511+ config.src_backend.close()
3512+ config.backend.close()
3513
3514
3515 def sync_archive(col_stats):
3516@@ -1146,13 +1136,13 @@
3517
3518 Otherwise, only the metadata for the target chain needs sync.
3519 """
3520- if globals.metadata_sync_mode == u"full":
3521+ if config.metadata_sync_mode == u"full":
3522 return True
3523- assert globals.metadata_sync_mode == u"partial"
3524+ assert config.metadata_sync_mode == u"partial"
3525 parsed = file_naming.parse(filename)
3526 try:
3527 target_chain = col_stats.get_backup_chain_at_time(
3528- globals.restore_time or dup_time.curtime)
3529+ config.restore_time or dup_time.curtime)
3530 except dup_collections.CollectionsError:
3531 # With zero or multiple chains at this time, do a full sync
3532 return True
3533@@ -1226,7 +1216,7 @@
3534 return (pr, loc_name, fn)
3535
3536 def remove_local(fn):
3537- del_name = globals.archive_dir_path.append(fn).name
3538+ del_name = config.archive_dir_path.append(fn).name
3539
3540 log.Notice(_(u"Deleting local %s (not authoritative at backend).") %
3541 util.fsdecode(del_name))
3542@@ -1285,7 +1275,7 @@
3543
3544 pr, loc_name, rem_name = resolve_basename(fn)
3545
3546- fileobj = globals.backend.get_fileobj_read(fn)
3547+ fileobj = config.backend.get_fileobj_read(fn)
3548 src_iter = SrcIter(fileobj)
3549 tdp = dup_temp.new_tempduppath(file_naming.parse(loc_name))
3550 if pr.manifest:
3551@@ -1293,14 +1283,14 @@
3552 else:
3553 gpg.GzipWriteFile(src_iter, tdp.name, size=sys.maxsize)
3554 tdp.setdata()
3555- tdp.move(globals.archive_dir_path.append(loc_name))
3556+ tdp.move(config.archive_dir_path.append(loc_name))
3557
3558 # get remote metafile list
3559- remlist = globals.backend.list()
3560+ remlist = config.backend.list()
3561 remote_metafiles, ignored, rem_needpass = get_metafiles(remlist)
3562
3563 # get local metafile list
3564- loclist = globals.archive_dir_path.listdir()
3565+ loclist = config.archive_dir_path.listdir()
3566 local_metafiles, local_partials, loc_needpass = get_metafiles(loclist)
3567
3568 # we have the list of metafiles on both sides. remote is always
3569@@ -1333,15 +1323,15 @@
3570 else:
3571 local_missing.sort()
3572 local_spurious.sort()
3573- if not globals.dry_run:
3574+ if not config.dry_run:
3575 log.Notice(_(u"Synchronizing remote metadata to local cache..."))
3576 if local_missing and (rem_needpass or loc_needpass):
3577 # password for the --encrypt-key
3578- globals.gpg_profile.passphrase = get_passphrase(1, u"sync")
3579+ config.gpg_profile.passphrase = get_passphrase(1, u"sync")
3580 for fn in local_spurious:
3581 remove_local(fn)
3582- if hasattr(globals.backend, u'pre_process_download'):
3583- globals.backend.pre_process_download(local_missing)
3584+ if hasattr(config.backend, u'pre_process_download'):
3585+ config.backend.pre_process_download(local_missing)
3586 for fn in local_missing:
3587 copy_to_local(fn)
3588 col_stats.set_values()
3589@@ -1367,7 +1357,7 @@
3590 assert col_stats.all_backup_chains
3591 last_backup_set = col_stats.all_backup_chains[-1].get_last()
3592 # check remote manifest only if we can decrypt it (see #1729796)
3593- check_remote = not globals.encryption or globals.gpg_profile.passphrase
3594+ check_remote = not config.encryption or config.gpg_profile.passphrase
3595 last_backup_set.check_manifests(check_remote=check_remote)
3596
3597
3598@@ -1399,8 +1389,8 @@
3599 # Calculate space we need for at least 2 volumes of full or inc
3600 # plus about 30% of one volume for the signature files.
3601 freespace = stats.f_frsize * stats.f_bavail
3602- needspace = (((globals.async_concurrency + 1) * globals.volsize) +
3603- int(0.30 * globals.volsize))
3604+ needspace = (((config.async_concurrency + 1) * config.volsize) +
3605+ int(0.30 * config.volsize))
3606 if freespace < needspace:
3607 log.FatalError(_(u"Temp space has %d available, backup needs approx %d.") %
3608 (freespace, needspace), log.ErrorCode.not_enough_freespace)
3609@@ -1438,7 +1428,7 @@
3610 class Restart(object):
3611 u"""
3612 Class to aid in restart of inc or full backup.
3613- Instance in globals.restart if restart in progress.
3614+ Instance in config.restart if restart in progress.
3615 """
3616
3617 def __init__(self, last_backup):
3618@@ -1524,10 +1514,10 @@
3619 # determine what action we're performing and process command line
3620 action = commandline.ProcessCommandLine(sys.argv[1:])
3621
3622- globals.lockpath = os.path.join(globals.archive_dir_path.name, b"lockfile")
3623- globals.lockfile = fasteners.process_lock.InterProcessLock(globals.lockpath)
3624- log.Debug(_(u"Acquiring lockfile %s") % globals.lockpath)
3625- if not globals.lockfile.acquire(blocking=False):
3626+ config.lockpath = os.path.join(config.archive_dir_path.name, b"lockfile")
3627+ config.lockfile = fasteners.process_lock.InterProcessLock(config.lockpath)
3628+ log.Debug(_(u"Acquiring lockfile %s") % config.lockpath)
3629+ if not config.lockfile.acquire(blocking=False):
3630 log.FatalError(
3631 u"Another duplicity instance is already running with this archive directory\n",
3632 log.ErrorCode.user_error)
3633@@ -1543,8 +1533,8 @@
3634
3635 def do_backup(action):
3636 # set the current time strings again now that we have time separator
3637- if globals.current_time:
3638- dup_time.setcurtime(globals.current_time)
3639+ if config.current_time:
3640+ dup_time.setcurtime(config.current_time)
3641 else:
3642 dup_time.setcurtime()
3643
3644@@ -1555,8 +1545,8 @@
3645 check_resources(action)
3646
3647 # get current collection status
3648- col_stats = dup_collections.CollectionsStatus(globals.backend,
3649- globals.archive_dir_path,
3650+ col_stats = dup_collections.CollectionsStatus(config.backend,
3651+ config.archive_dir_path,
3652 action).set_values()
3653
3654 # check archive synch with remote, fix if needed
3655@@ -1574,15 +1564,15 @@
3656 if last_backup.partial:
3657 if action in [u"full", u"inc"]:
3658 # set restart parms from last_backup info
3659- globals.restart = Restart(last_backup)
3660+ config.restart = Restart(last_backup)
3661 # (possibly) reset action
3662- action = globals.restart.type
3663+ action = config.restart.type
3664 # reset the time strings
3665 if action == u"full":
3666- dup_time.setcurtime(globals.restart.time)
3667+ dup_time.setcurtime(config.restart.time)
3668 else:
3669- dup_time.setcurtime(globals.restart.end_time)
3670- dup_time.setprevtime(globals.restart.start_time)
3671+ dup_time.setcurtime(config.restart.end_time)
3672+ dup_time.setprevtime(config.restart.start_time)
3673 # log it -- main restart heavy lifting is done in write_multivol
3674 log.Notice(_(u"Last %s backup left a partial set, restarting." % action))
3675 break
3676@@ -1590,8 +1580,8 @@
3677 # remove last partial backup and get new collection status
3678 log.Notice(_(u"Cleaning up previous partial %s backup set, restarting." % action))
3679 last_backup.delete()
3680- col_stats = dup_collections.CollectionsStatus(globals.backend,
3681- globals.archive_dir_path,
3682+ col_stats = dup_collections.CollectionsStatus(config.backend,
3683+ config.archive_dir_path,
3684 action).set_values()
3685 continue
3686 break
3687@@ -1603,8 +1593,8 @@
3688 log.Notice(_(u"Last full backup date:") + u" " + dup_time.timetopretty(last_full_time))
3689 else:
3690 log.Notice(_(u"Last full backup date: none"))
3691- if not globals.restart and action == u"inc" and globals.full_force_time is not None and \
3692- last_full_time < globals.full_force_time:
3693+ if not config.restart and action == u"inc" and config.full_force_time is not None and \
3694+ last_full_time < config.full_force_time:
3695 log.Notice(_(u"Last full backup is too old, forcing full backup"))
3696 action = u"full"
3697 log.PrintCollectionStatus(col_stats)
3698@@ -1612,7 +1602,7 @@
3699 os.umask(0o77)
3700
3701 # get the passphrase if we need to based on action/options
3702- globals.gpg_profile.passphrase = get_passphrase(1, action)
3703+ config.gpg_profile.passphrase = get_passphrase(1, action)
3704
3705 if action == u"restore":
3706 restore(col_stats)
3707@@ -1621,10 +1611,10 @@
3708 elif action == u"list-current":
3709 list_current(col_stats)
3710 elif action == u"collection-status":
3711- if not globals.file_changed:
3712+ if not config.file_changed:
3713 log.PrintCollectionStatus(col_stats, True)
3714 else:
3715- log.PrintCollectionFileChangedStatus(col_stats, globals.file_changed, True)
3716+ log.PrintCollectionFileChangedStatus(col_stats, config.file_changed, True)
3717 elif action == u"cleanup":
3718 cleanup(col_stats)
3719 elif action == u"remove-old":
3720@@ -1640,19 +1630,19 @@
3721 # the passphrase for full and inc is used by --sign-key
3722 # the sign key can have a different passphrase than the encrypt
3723 # key, therefore request a passphrase
3724- if globals.gpg_profile.sign_key:
3725- globals.gpg_profile.signing_passphrase = get_passphrase(1, action, True)
3726+ if config.gpg_profile.sign_key:
3727+ config.gpg_profile.signing_passphrase = get_passphrase(1, action, True)
3728
3729 # if there are no recipients (no --encrypt-key), it must be a
3730 # symmetric key. Therefore, confirm the passphrase
3731- if not (globals.gpg_profile.recipients or globals.gpg_profile.hidden_recipients):
3732- globals.gpg_profile.passphrase = get_passphrase(2, action)
3733+ if not (config.gpg_profile.recipients or config.gpg_profile.hidden_recipients):
3734+ config.gpg_profile.passphrase = get_passphrase(2, action)
3735 # a limitation in the GPG implementation does not allow for
3736 # inputting different passphrases, this affects symmetric+sign.
3737 # Allow an empty passphrase for the key though to allow a non-empty
3738 # symmetric key
3739- if (globals.gpg_profile.signing_passphrase and
3740- globals.gpg_profile.passphrase != globals.gpg_profile.signing_passphrase):
3741+ if (config.gpg_profile.signing_passphrase and
3742+ config.gpg_profile.passphrase != config.gpg_profile.signing_passphrase):
3743 log.FatalError(_(
3744 u"When using symmetric encryption, the signing passphrase "
3745 u"must equal the encryption passphrase."),
3746@@ -1666,13 +1656,13 @@
3747 if not sig_chain:
3748 full_backup(col_stats)
3749 else:
3750- if not globals.restart:
3751+ if not config.restart:
3752 # only ask for a passphrase if there was a previous backup
3753 if col_stats.all_backup_chains:
3754- globals.gpg_profile.passphrase = get_passphrase(1, action)
3755+ config.gpg_profile.passphrase = get_passphrase(1, action)
3756 check_last_manifest(col_stats) # not needed for full backups
3757 incremental_backup(sig_chain)
3758- globals.backend.close()
3759+ config.backend.close()
3760 log.shutdown()
3761 if exit_val is not None:
3762 sys.exit(exit_val)
3763
3764=== modified file 'duplicity/dup_temp.py'
3765--- duplicity/dup_temp.py 2019-09-22 23:44:56 +0000
3766+++ duplicity/dup_temp.py 2020-03-22 12:35:54 +0000
3767@@ -1,4 +1,4 @@
3768-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
3769+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
3770 #
3771 # Copyright 2002 Ben Escoto <ben@emerose.org>
3772 # Copyright 2007 Kenneth Loafman <kenneth@loafman.com>
3773@@ -31,11 +31,10 @@
3774 import shutil
3775
3776 from duplicity import log
3777-from duplicity import util
3778 from duplicity import path
3779 from duplicity import file_naming
3780 from duplicity import tempdir
3781-from duplicity import globals
3782+from duplicity import config
3783 from duplicity import gpg
3784
3785
3786@@ -75,7 +74,7 @@
3787 return fileobject is closed, rename to final position. filename
3788 must be a recognizable duplicity data file.
3789 """
3790- if not globals.restart:
3791+ if not config.restart:
3792 td = tempdir.TemporaryDirectory(dirpath.name)
3793 tdpname = td.mktemp()
3794 tdp = TempDupPath(tdpname, parseresults=file_naming.parse(partname))
3795@@ -93,7 +92,7 @@
3796 tdp.rename(dirpath.append(partname))
3797 td.forget(tdpname)
3798
3799- if not globals.restart:
3800+ if not config.restart:
3801 fh.addhook(rename_and_forget)
3802
3803 return fh
3804@@ -171,7 +170,7 @@
3805 u"""
3806 We have achieved the first checkpoint, make file visible and permanent.
3807 """
3808- assert not globals.restart
3809+ assert not config.restart
3810 self.tdp.rename(self.dirpath.append(self.partname))
3811 self.fileobj.flush()
3812 del self.hooklist[0]
3813@@ -188,10 +187,10 @@
3814 if pr.compressed:
3815 gpg.GzipWriteFile(src_iter, tgt.name, size=sys.maxsize)
3816 elif pr.encrypted:
3817- gpg.GPGWriteFile(src_iter, tgt.name, globals.gpg_profile, size=sys.maxsize)
3818+ gpg.GPGWriteFile(src_iter, tgt.name, config.gpg_profile, size=sys.maxsize)
3819 else:
3820 shutil.copyfile(src.name, tgt.name)
3821- globals.backend.move(tgt) # @UndefinedVariable
3822+ config.backend.move(tgt)
3823
3824 def to_final(self):
3825 u"""
3826
3827=== modified file 'duplicity/dup_threading.py'
3828--- duplicity/dup_threading.py 2018-11-29 19:00:15 +0000
3829+++ duplicity/dup_threading.py 2020-03-22 12:35:54 +0000
3830@@ -1,4 +1,4 @@
3831-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
3832+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
3833 #
3834 # Copyright 2002 Ben Escoto <ben@emerose.org>
3835 # Copyright 2007 Kenneth Loafman <kenneth@loafman.com>
3836@@ -186,7 +186,7 @@
3837 # not care about hash lookup overhead since this is intended to be
3838 # used for significant amounts of work.
3839
3840- cv = threading.Condition() # @UndefinedVariable
3841+ cv = threading.Condition()
3842 state = {u'done': False,
3843 u'error': None,
3844 u'trace': None,
3845@@ -259,7 +259,7 @@
3846 """
3847 self.__value = value
3848
3849- self.__cv = threading.Condition() # @UndefinedVariable
3850+ self.__cv = threading.Condition()
3851
3852 def get(self):
3853 u"""
3854
3855=== modified file 'duplicity/dup_time.py'
3856--- duplicity/dup_time.py 2019-08-08 19:31:58 +0000
3857+++ duplicity/dup_time.py 2020-03-22 12:35:54 +0000
3858@@ -1,4 +1,4 @@
3859-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
3860+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
3861 #
3862 # Copyright 2002 Ben Escoto <ben@emerose.org>
3863 # Copyright 2007 Kenneth Loafman <kenneth@loafman.com>
3864@@ -30,7 +30,7 @@
3865 import re
3866 import calendar
3867 import sys
3868-from duplicity import globals
3869+from duplicity import config
3870 from duplicity import util
3871
3872 # For type testing against both int and long types that works in python 2/3
3873@@ -94,13 +94,13 @@
3874 def timetostring(timeinseconds):
3875 u"""Return w3 or duplicity datetime compliant listing of timeinseconds"""
3876
3877- if globals.old_filenames:
3878+ if config.old_filenames:
3879 # We need to know if DST applies to append the correct offset. So
3880 # 1. Save the tuple returned by localtime.
3881 # 2. Pass the DST flag into gettzd
3882 lcltime = time.localtime(timeinseconds)
3883- return time.strftime(u"%Y-%m-%dT%H" + globals.time_separator +
3884- u"%M" + globals.time_separator + u"%S",
3885+ return time.strftime(u"%Y-%m-%dT%H" + config.time_separator +
3886+ u"%M" + config.time_separator + u"%S",
3887 lcltime) + gettzd(lcltime[-1])
3888 else:
3889 # DST never applies to UTC
3890@@ -126,7 +126,7 @@
3891 # old format for filename time
3892 year, month, day = list(map(int, date.split(u"-")))
3893 hour, minute, second = list(map(int,
3894- daytime.split(globals.time_separator)))
3895+ daytime.split(config.time_separator)))
3896 assert 1900 < year < 2100, year
3897 assert 1 <= month <= 12
3898 assert 1 <= day <= 31
3899@@ -247,7 +247,7 @@
3900 hours, minutes = list(map(abs, divmod(offset, 60)))
3901 assert 0 <= hours <= 23
3902 assert 0 <= minutes <= 59
3903- return u"%s%02d%s%02d" % (prefix, hours, globals.time_separator, minutes)
3904+ return u"%s%02d%s%02d" % (prefix, hours, config.time_separator, minutes)
3905
3906
3907 def tzdtoseconds(tzd):
3908@@ -256,7 +256,7 @@
3909 return 0
3910 assert len(tzd) == 6 # only accept forms like +08:00 for now
3911 assert (tzd[0] == u"-" or tzd[0] == u"+") and \
3912- tzd[3] == globals.time_separator
3913+ tzd[3] == config.time_separator
3914 return -60 * (60 * int(tzd[:3]) + int(tzd[4:]))
3915
3916
3917
3918=== modified file 'duplicity/errors.py'
3919--- duplicity/errors.py 2018-09-11 21:35:37 +0000
3920+++ duplicity/errors.py 2020-03-22 12:35:54 +0000
3921@@ -1,4 +1,4 @@
3922-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
3923+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
3924 #
3925 # Copyright 2002 Ben Escoto <ben@emerose.org>
3926 # Copyright 2007 Kenneth Loafman <kenneth@loafman.com>
3927
3928=== modified file 'duplicity/file_naming.py'
3929--- duplicity/file_naming.py 2018-11-29 19:00:15 +0000
3930+++ duplicity/file_naming.py 2020-03-22 12:35:54 +0000
3931@@ -1,4 +1,4 @@
3932-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
3933+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
3934 #
3935 # Copyright 2002 Ben Escoto <ben@emerose.org>
3936 # Copyright 2007 Kenneth Loafman <kenneth@loafman.com>
3937@@ -26,7 +26,7 @@
3938 from builtins import object
3939 import re
3940 from duplicity import dup_time
3941-from duplicity import globals
3942+from duplicity import config
3943 import sys
3944
3945 full_vol_re = None
3946@@ -61,47 +61,47 @@
3947 if full_vol_re and not force:
3948 return
3949
3950- full_vol_re = re.compile(b"^" + globals.file_prefix + globals.file_prefix_archive + b"duplicity-full"
3951+ full_vol_re = re.compile(b"^" + config.file_prefix + config.file_prefix_archive + b"duplicity-full"
3952 b"\\.(?P<time>.*?)"
3953 b"\\.vol(?P<num>[0-9]+)"
3954 b"\\.difftar"
3955 b"(?P<partial>(\\.part))?"
3956 b"($|\\.)")
3957
3958- full_vol_re_short = re.compile(b"^" + globals.file_prefix + globals.file_prefix_archive + b"df"
3959+ full_vol_re_short = re.compile(b"^" + config.file_prefix + config.file_prefix_archive + b"df"
3960 b"\\.(?P<time>[0-9a-z]+?)"
3961 b"\\.(?P<num>[0-9a-z]+)"
3962 b"\\.dt"
3963 b"(?P<partial>(\\.p))?"
3964 b"($|\\.)")
3965
3966- full_manifest_re = re.compile(b"^" + globals.file_prefix + globals.file_prefix_manifest + b"duplicity-full"
3967+ full_manifest_re = re.compile(b"^" + config.file_prefix + config.file_prefix_manifest + b"duplicity-full"
3968 b"\\.(?P<time>.*?)"
3969 b"\\.manifest"
3970 b"(?P<partial>(\\.part))?"
3971 b"($|\\.)")
3972
3973- full_manifest_re_short = re.compile(b"^" + globals.file_prefix + globals.file_prefix_manifest + b"df"
3974+ full_manifest_re_short = re.compile(b"^" + config.file_prefix + config.file_prefix_manifest + b"df"
3975 b"\\.(?P<time>[0-9a-z]+?)"
3976 b"\\.m"
3977 b"(?P<partial>(\\.p))?"
3978 b"($|\\.)")
3979
3980- inc_vol_re = re.compile(b"^" + globals.file_prefix + globals.file_prefix_archive + b"duplicity-inc"
3981+ inc_vol_re = re.compile(b"^" + config.file_prefix + config.file_prefix_archive + b"duplicity-inc"
3982 b"\\.(?P<start_time>.*?)"
3983 b"\\.to\\.(?P<end_time>.*?)"
3984 b"\\.vol(?P<num>[0-9]+)"
3985 b"\\.difftar"
3986 b"($|\\.)")
3987
3988- inc_vol_re_short = re.compile(b"^" + globals.file_prefix + globals.file_prefix_archive + b"di"
3989+ inc_vol_re_short = re.compile(b"^" + config.file_prefix + config.file_prefix_archive + b"di"
3990 b"\\.(?P<start_time>[0-9a-z]+?)"
3991 b"\\.(?P<end_time>[0-9a-z]+?)"
3992 b"\\.(?P<num>[0-9a-z]+)"
3993 b"\\.dt"
3994 b"($|\\.)")
3995
3996- inc_manifest_re = re.compile(b"^" + globals.file_prefix + globals.file_prefix_manifest + b"duplicity-inc"
3997+ inc_manifest_re = re.compile(b"^" + config.file_prefix + config.file_prefix_manifest + b"duplicity-inc"
3998 b"\\.(?P<start_time>.*?)"
3999 b"\\.to"
4000 b"\\.(?P<end_time>.*?)"
4001@@ -109,26 +109,26 @@
4002 b"(?P<partial>(\\.part))?"
4003 b"(\\.|$)")
4004
4005- inc_manifest_re_short = re.compile(b"^" + globals.file_prefix + globals.file_prefix_manifest + b"di"
4006+ inc_manifest_re_short = re.compile(b"^" + config.file_prefix + config.file_prefix_manifest + b"di"
4007 b"\\.(?P<start_time>[0-9a-z]+?)"
4008 b"\\.(?P<end_time>[0-9a-z]+?)"
4009 b"\\.m"
4010 b"(?P<partial>(\\.p))?"
4011 b"(\\.|$)")
4012
4013- full_sig_re = re.compile(b"^" + globals.file_prefix + globals.file_prefix_signature + b"duplicity-full-signatures"
4014+ full_sig_re = re.compile(b"^" + config.file_prefix + config.file_prefix_signature + b"duplicity-full-signatures"
4015 b"\\.(?P<time>.*?)"
4016 b"\\.sigtar"
4017 b"(?P<partial>(\\.part))?"
4018 b"(\\.|$)")
4019
4020- full_sig_re_short = re.compile(b"^" + globals.file_prefix + globals.file_prefix_signature + b"dfs"
4021+ full_sig_re_short = re.compile(b"^" + config.file_prefix + config.file_prefix_signature + b"dfs"
4022 b"\\.(?P<time>[0-9a-z]+?)"
4023 b"\\.st"
4024 b"(?P<partial>(\\.p))?"
4025 b"(\\.|$)")
4026
4027- new_sig_re = re.compile(b"^" + globals.file_prefix + globals.file_prefix_signature + b"duplicity-new-signatures"
4028+ new_sig_re = re.compile(b"^" + config.file_prefix + config.file_prefix_signature + b"duplicity-new-signatures"
4029 b"\\.(?P<start_time>.*?)"
4030 b"\\.to"
4031 b"\\.(?P<end_time>.*?)"
4032@@ -136,7 +136,7 @@
4033 b"(?P<partial>(\\.part))?"
4034 b"(\\.|$)")
4035
4036- new_sig_re_short = re.compile(b"^" + globals.file_prefix + globals.file_prefix_signature + b"dns"
4037+ new_sig_re_short = re.compile(b"^" + config.file_prefix + config.file_prefix_signature + b"dns"
4038 b"\\.(?P<start_time>[0-9a-z]+?)"
4039 b"\\.(?P<end_time>[0-9a-z]+?)"
4040 b"\\.st"
4041@@ -189,12 +189,12 @@
4042 if encrypted:
4043 gzipped = False
4044 if encrypted:
4045- if globals.short_filenames:
4046+ if config.short_filenames:
4047 suffix = b'.g'
4048 else:
4049 suffix = b".gpg"
4050 elif gzipped:
4051- if globals.short_filenames:
4052+ if config.short_filenames:
4053 suffix = b".z"
4054 else:
4055 suffix = b'.gz'
4056@@ -203,7 +203,7 @@
4057 return suffix
4058
4059
4060-def get(type, volume_number=None, manifest=False,
4061+def get(type, volume_number=None, manifest=False, # pylint: disable=redefined-builtin
4062 encrypted=False, gzipped=False, partial=False):
4063 u"""
4064 Return duplicity filename of specified type
4065@@ -217,7 +217,7 @@
4066 gzipped = False
4067 suffix = get_suffix(encrypted, gzipped)
4068 part_string = b""
4069- if globals.short_filenames:
4070+ if config.short_filenames:
4071 if partial:
4072 part_string = b".p"
4073 else:
4074@@ -228,23 +228,23 @@
4075 assert not volume_number and not manifest
4076 assert not (volume_number and part_string)
4077 if type == u"full-sig":
4078- if globals.short_filenames:
4079- return (globals.file_prefix + globals.file_prefix_signature +
4080+ if config.short_filenames:
4081+ return (config.file_prefix + config.file_prefix_signature +
4082 b"dfs.%s.st%s%s" %
4083 (to_base36(dup_time.curtime), part_string, suffix))
4084 else:
4085- return (globals.file_prefix + globals.file_prefix_signature +
4086+ return (config.file_prefix + config.file_prefix_signature +
4087 b"duplicity-full-signatures.%s.sigtar%s%s" %
4088 (dup_time.curtimestr.encode(), part_string, suffix))
4089 elif type == u"new-sig":
4090- if globals.short_filenames:
4091- return (globals.file_prefix + globals.file_prefix_signature +
4092+ if config.short_filenames:
4093+ return (config.file_prefix + config.file_prefix_signature +
4094 b"dns.%s.%s.st%s%s" %
4095 (to_base36(dup_time.prevtime),
4096 to_base36(dup_time.curtime),
4097 part_string, suffix))
4098 else:
4099- return (globals.file_prefix + globals.file_prefix_signature +
4100+ return (config.file_prefix + config.file_prefix_signature +
4101 b"duplicity-new-signatures.%s.to.%s.sigtar%s%s" %
4102 (dup_time.prevtimestr.encode(), dup_time.curtimestr.encode(),
4103 part_string, suffix))
4104@@ -252,30 +252,30 @@
4105 assert volume_number or manifest
4106 assert not (volume_number and manifest)
4107
4108- prefix = globals.file_prefix
4109+ prefix = config.file_prefix
4110
4111 if volume_number:
4112- if globals.short_filenames:
4113+ if config.short_filenames:
4114 vol_string = b"%s.dt" % to_base36(volume_number)
4115 else:
4116 vol_string = b"vol%d.difftar" % volume_number
4117- prefix += globals.file_prefix_archive
4118+ prefix += config.file_prefix_archive
4119 else:
4120- if globals.short_filenames:
4121+ if config.short_filenames:
4122 vol_string = b"m"
4123 else:
4124 vol_string = b"manifest"
4125- prefix += globals.file_prefix_manifest
4126+ prefix += config.file_prefix_manifest
4127
4128 if type == u"full":
4129- if globals.short_filenames:
4130+ if config.short_filenames:
4131 return (b"%sdf.%s.%s%s%s" % (prefix, to_base36(dup_time.curtime),
4132 vol_string, part_string, suffix))
4133 else:
4134 return (b"%sduplicity-full.%s.%s%s%s" % (prefix, dup_time.curtimestr.encode(),
4135 vol_string, part_string, suffix))
4136 elif type == u"inc":
4137- if globals.short_filenames:
4138+ if config.short_filenames:
4139 return (b"%sdi.%s.%s.%s%s%s" % (prefix, to_base36(dup_time.prevtime),
4140 to_base36(dup_time.curtime),
4141 vol_string, part_string, suffix))
4142@@ -324,7 +324,7 @@
4143 short = True
4144 m1 = full_vol_re_short.search(filename)
4145 m2 = full_manifest_re_short.search(filename)
4146- if not m1 and not m2 and not globals.short_filenames:
4147+ if not m1 and not m2 and not config.short_filenames:
4148 short = False
4149 m1 = full_vol_re.search(filename)
4150 m2 = full_manifest_re.search(filename)
4151@@ -347,7 +347,7 @@
4152 short = True
4153 m1 = inc_vol_re_short.search(filename)
4154 m2 = inc_manifest_re_short.search(filename)
4155- if not m1 and not m2 and not globals.short_filenames:
4156+ if not m1 and not m2 and not config.short_filenames:
4157 short = False
4158 m1 = inc_vol_re.search(filename)
4159 m2 = inc_manifest_re.search(filename)
4160@@ -370,7 +370,7 @@
4161 prepare_regex()
4162 short = True
4163 m = full_sig_re_short.search(filename)
4164- if not m and not globals.short_filenames:
4165+ if not m and not config.short_filenames:
4166 short = False
4167 m = full_sig_re.search(filename)
4168 if m:
4169@@ -383,7 +383,7 @@
4170
4171 short = True
4172 m = new_sig_re_short.search(filename)
4173- if not m and not globals.short_filenames:
4174+ if not m and not config.short_filenames:
4175 short = False
4176 m = new_sig_re.search(filename)
4177 if m:
4178@@ -399,13 +399,13 @@
4179 Set encryption and compression flags in ParseResults pr
4180 """
4181 if (filename.endswith(b'.z') or
4182- not globals.short_filenames and filename.endswith(b'gz')):
4183+ not config.short_filenames and filename.endswith(b'gz')):
4184 pr.compressed = 1
4185 else:
4186 pr.compressed = None
4187
4188 if (filename.endswith(b'.g') or
4189- not globals.short_filenames and filename.endswith(b'.gpg')):
4190+ not config.short_filenames and filename.endswith(b'.gpg')):
4191 pr.encrypted = 1
4192 else:
4193 pr.encrypted = None
4194@@ -425,7 +425,7 @@
4195 u"""
4196 Hold information taken from a duplicity filename
4197 """
4198- def __init__(self, type, manifest=None, volume_number=None,
4199+ def __init__(self, type, manifest=None, volume_number=None, # pylint: disable=redefined-builtin
4200 time=None, start_time=None, end_time=None,
4201 encrypted=None, compressed=None, partial=False):
4202
4203
4204=== modified file 'duplicity/filechunkio.py'
4205--- duplicity/filechunkio.py 2018-09-11 21:35:37 +0000
4206+++ duplicity/filechunkio.py 2020-03-22 12:35:54 +0000
4207@@ -1,4 +1,4 @@
4208-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
4209+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
4210 #
4211 # Copyright 2011 Fabian Topfstedt <topfstedt@schneevonmorgen.com>
4212 #
4213@@ -18,8 +18,8 @@
4214 u"""
4215 A class that allows you reading only a chunk of a file.
4216 """
4217- def __init__(self, name, mode=u'r', closefd=True, offset=0, bytes=None,
4218- *args, **kwargs):
4219+ def __init__(self, name, mode=u'r', closefd=True, offset=0, bytes=None, # pylint: disable=redefined-builtin
4220+ *args, **kwargs): # pylint: disable=redefined-builtin
4221 u"""
4222 Open a file chunk. The mode can only be 'r' for reading. Offset
4223 is the amount of bytes that the chunks starts after the real file's
4224
4225=== modified file 'duplicity/globmatch.py'
4226--- duplicity/globmatch.py 2018-11-29 19:00:15 +0000
4227+++ duplicity/globmatch.py 2020-03-22 12:35:54 +0000
4228@@ -1,4 +1,4 @@
4229-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
4230+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
4231 #
4232 # Copyright 2002 Ben Escoto <ben@emerose.org>
4233 # Copyright 2007 Kenneth Loafman <kenneth@loafman.com>
4234
4235=== modified file 'duplicity/gpg.py'
4236--- duplicity/gpg.py 2020-02-06 15:27:43 +0000
4237+++ duplicity/gpg.py 2020-03-22 12:35:54 +0000
4238@@ -1,4 +1,4 @@
4239-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
4240+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
4241 #
4242 # Copyright 2002 Ben Escoto <ben@emerose.org>
4243 # Copyright 2007 Kenneth Loafman <kenneth@loafman.com>
4244@@ -30,16 +30,13 @@
4245 from builtins import object
4246 import os
4247 import sys
4248-import types
4249 import tempfile
4250 import re
4251 import gzip
4252 import locale
4253-import platform
4254
4255-from duplicity import globals
4256+from duplicity import config
4257 from duplicity import gpginterface
4258-from duplicity import log
4259 from duplicity import tempdir
4260 from duplicity import util
4261
4262@@ -92,7 +89,7 @@
4263 else:
4264 self.hidden_recipients = []
4265
4266- self.gpg_version = self.get_gpg_version(globals.gpg_binary)
4267+ self.gpg_version = self.get_gpg_version(config.gpg_binary)
4268
4269 rc = re.compile
4270 _version_re = rc(b'^gpg.*\\(GnuPG(?:/MacGPG2)?\\) (?P<maj>[0-9]+)\\.(?P<min>[0-9]+)\\.(?P<bug>[0-9]+)(-.+)?$')
4271@@ -103,8 +100,8 @@
4272 gnupg.call = binary
4273
4274 # user supplied options
4275- if globals.gpg_options:
4276- for opt in globals.gpg_options.split():
4277+ if config.gpg_options:
4278+ for opt in config.gpg_options.split():
4279 gnupg.options.extra_args.append(opt)
4280
4281 # get gpg version
4282@@ -143,15 +140,15 @@
4283 # Start GPG process - copied from GnuPGInterface docstring.
4284 gnupg = gpginterface.GnuPG()
4285 # overrides default gpg binary 'gpg'
4286- if globals.gpg_binary is not None:
4287- gnupg.call = globals.gpg_binary
4288+ if config.gpg_binary is not None:
4289+ gnupg.call = config.gpg_binary
4290 gnupg.options.meta_interactive = 0
4291 gnupg.options.extra_args.append(u'--no-secmem-warning')
4292 gnupg.options.extra_args.append(u'--ignore-mdc-error')
4293
4294 # Support three versions of gpg present 1.x, 2.0.x, 2.1.x
4295 if profile.gpg_version[:1] == (1,):
4296- if globals.use_agent:
4297+ if config.use_agent:
4298 # gpg1 agent use is optional
4299 gnupg.options.extra_args.append(u'--use-agent')
4300
4301@@ -159,7 +156,7 @@
4302 pass
4303
4304 elif profile.gpg_version[:2] >= (2, 1):
4305- if not globals.use_agent:
4306+ if not config.use_agent:
4307 # This forces gpg2 to ignore the agent.
4308 # Necessary to enforce truly non-interactive operation.
4309 gnupg.options.extra_args.append(u'--pinentry-mode=loopback')
4310@@ -168,8 +165,8 @@
4311 raise GPGError(u"Unsupported GNUPG version, %s" % profile.gpg_version)
4312
4313 # user supplied options
4314- if globals.gpg_options:
4315- for opt in globals.gpg_options.split():
4316+ if config.gpg_options:
4317+ for opt in config.gpg_options.split():
4318 gnupg.options.extra_args.append(opt)
4319
4320 cmdlist = []
4321@@ -200,7 +197,7 @@
4322 # use integrity protection
4323 gnupg.options.extra_args.append(u'--force-mdc')
4324 # Skip the passphrase if using the agent
4325- if globals.use_agent:
4326+ if config.use_agent:
4327 gnupg_fhs = [u'stdin', ]
4328 else:
4329 gnupg_fhs = [u'stdin', u'passphrase']
4330@@ -208,7 +205,7 @@
4331 attach_fhs={u'stdout': encrypt_path.open(u"wb"),
4332 u'stderr': self.stderr_fp,
4333 u'logger': self.logger_fp})
4334- if not globals.use_agent:
4335+ if not config.use_agent:
4336 p1.handles[u'passphrase'].write(passphrase)
4337 p1.handles[u'passphrase'].close()
4338 self.gpg_input = p1.handles[u'stdin']
4339@@ -218,7 +215,7 @@
4340 cmdlist.append(profile.encrypt_secring)
4341 self.status_fp = tempfile.TemporaryFile(dir=tempdir.default().dir())
4342 # Skip the passphrase if using the agent
4343- if globals.use_agent:
4344+ if config.use_agent:
4345 gnupg_fhs = [u'stdout', ]
4346 else:
4347 gnupg_fhs = [u'stdout', u'passphrase']
4348@@ -227,7 +224,7 @@
4349 u'status': self.status_fp,
4350 u'stderr': self.stderr_fp,
4351 u'logger': self.logger_fp})
4352- if not(globals.use_agent):
4353+ if not(config.use_agent):
4354 p1.handles[u'passphrase'].write(passphrase)
4355 p1.handles[u'passphrase'].close()
4356 self.gpg_output = p1.handles[u'stdout']
4357@@ -365,16 +362,16 @@
4358 # workaround for circular module imports
4359 from duplicity import path
4360
4361- def top_off(bytes, file):
4362+ def top_off(bytelen, file):
4363 u"""
4364- Add bytes of incompressible data to to_gpg_fp
4365+ Add bytelen of incompressible data to to_gpg_fp
4366
4367 In this case we take the incompressible data from the
4368 beginning of filename (it should contain enough because size
4369 >> largest block size).
4370 """
4371 incompressible_fp = open(filename, u"rb")
4372- assert util.copyfileobj(incompressible_fp, file.gpg_input, bytes) == bytes
4373+ assert util.copyfileobj(incompressible_fp, file.gpg_input, bytelen) == bytelen
4374 incompressible_fp.close()
4375
4376 def get_current_size():
4377@@ -476,7 +473,7 @@
4378 return GzipWriteFile(block_iter, filename, size, gzipped)
4379
4380
4381-def get_hash(hash, path, hex=1):
4382+def get_hash(hash, path, hex=1): # pylint: disable=redefined-builtin
4383 u"""
4384 Return hash of path
4385
4386
4387=== modified file 'duplicity/gpginterface.py'
4388--- duplicity/gpginterface.py 2018-11-29 19:00:15 +0000
4389+++ duplicity/gpginterface.py 2020-03-22 12:35:54 +0000
4390@@ -1,3 +1,5 @@
4391+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
4392+#
4393 u"""Interface to GNU Privacy Guard (GnuPG)
4394
4395 !!! This was renamed to gpginterface.py.
4396@@ -234,7 +236,7 @@
4397 try:
4398 import threading
4399 except ImportError:
4400- import dummy_threading # @UnusedImport
4401+ import dummy_threading as threading
4402 log.Warn(_(u"Threading not available -- zombie processes may appear"))
4403
4404 __author__ = u"Frank J. Tobin, ftobin@neverending.org"
4405@@ -708,7 +710,6 @@
4406
4407 def _run_doctests():
4408 import doctest
4409- from . import gpginterface
4410 return doctest.testmod(GnuPGInterface)
4411
4412
4413
4414=== modified file 'duplicity/lazy.py'
4415--- duplicity/lazy.py 2019-02-21 21:48:53 +0000
4416+++ duplicity/lazy.py 2020-03-22 12:35:54 +0000
4417@@ -1,4 +1,4 @@
4418-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
4419+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
4420 #
4421 # Copyright 2002 Ben Escoto <ben@emerose.org>
4422 # Copyright 2007 Kenneth Loafman <kenneth@loafman.com>
4423@@ -20,13 +20,16 @@
4424 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
4425
4426 u"""Define some lazy data structures and functions acting on them"""
4427+
4428 from __future__ import print_function
4429
4430 from builtins import map
4431 from builtins import next
4432 from builtins import range
4433 from builtins import object
4434+
4435 import os
4436+
4437 from duplicity import log
4438 from duplicity import robust
4439 from duplicity import util
4440@@ -36,47 +39,47 @@
4441 u"""Hold static methods for the manipulation of lazy iterators"""
4442
4443 @staticmethod
4444- def filter(predicate, iterator): # @NoSelf
4445+ def filter(predicate, iterator):
4446 u"""Like filter in a lazy functional programming language"""
4447 for i in iterator:
4448 if predicate(i):
4449 yield i
4450
4451 @staticmethod
4452- def map(function, iterator): # @NoSelf
4453+ def map(function, iterator):
4454 u"""Like map in a lazy functional programming language"""
4455 for i in iterator:
4456 yield function(i)
4457
4458 @staticmethod
4459- def foreach(function, iterator): # @NoSelf
4460+ def foreach(function, iterator):
4461 u"""Run function on each element in iterator"""
4462 for i in iterator:
4463 function(i)
4464
4465 @staticmethod
4466- def cat(*iters): # @NoSelf
4467+ def cat(*iters):
4468 u"""Lazily concatenate iterators"""
4469- for iter in iters:
4470+ for iter in iters: # pylint: disable=redefined-builtin
4471 for i in iter:
4472 yield i
4473
4474 @staticmethod
4475- def cat2(iter_of_iters): # @NoSelf
4476+ def cat2(iter_of_iters):
4477 u"""Lazily concatenate iterators, iterated by big iterator"""
4478- for iter in iter_of_iters:
4479+ for iter in iter_of_iters: # pylint: disable=redefined-builtin
4480 for i in iter:
4481 yield i
4482
4483 @staticmethod
4484- def empty(iter): # @NoSelf
4485+ def empty(iter): # pylint: disable=redefined-builtin
4486 u"""True if iterator has length 0"""
4487- for i in iter: # @UnusedVariable
4488+ for i in iter:
4489 return None
4490 return 1
4491
4492 @staticmethod
4493- def equal(iter1, iter2, verbose=None, operator=lambda x, y: x == y): # @NoSelf
4494+ def equal(iter1, iter2, verbose=None, operator=lambda x, y: x == y):
4495 u"""True if iterator 1 has same elements as iterator 2
4496
4497 Use equality operator, or == if it is unspecified.
4498@@ -102,7 +105,7 @@
4499 return None
4500
4501 @staticmethod
4502- def Or(iter): # @NoSelf
4503+ def Or(iter): # pylint: disable=redefined-builtin
4504 u"""True if any element in iterator is true. Short circuiting"""
4505 i = None
4506 for i in iter:
4507@@ -111,7 +114,7 @@
4508 return i
4509
4510 @staticmethod
4511- def And(iter): # @NoSelf
4512+ def And(iter): # pylint: disable=redefined-builtin
4513 u"""True if all elements in iterator are true. Short circuiting"""
4514 i = 1
4515 for i in iter:
4516@@ -120,7 +123,7 @@
4517 return i
4518
4519 @staticmethod
4520- def len(iter): # @NoSelf
4521+ def len(iter): # pylint: disable=redefined-builtin
4522 u"""Return length of iterator"""
4523 i = 0
4524 while 1:
4525@@ -131,7 +134,7 @@
4526 i = i + 1
4527
4528 @staticmethod
4529- def foldr(f, default, iter): # @NoSelf
4530+ def foldr(f, default, iter): # pylint: disable=redefined-builtin
4531 u"""foldr the "fundamental list recursion operator"?"""
4532 try:
4533 next_item = next(iter)
4534@@ -140,7 +143,7 @@
4535 return f(next_item, Iter.foldr(f, default, iter))
4536
4537 @staticmethod
4538- def foldl(f, default, iter): # @NoSelf
4539+ def foldl(f, default, iter): # pylint: disable=redefined-builtin
4540 u"""the fundamental list iteration operator.."""
4541 while 1:
4542 try:
4543@@ -150,7 +153,7 @@
4544 default = f(default, next_item)
4545
4546 @staticmethod
4547- def multiplex(iter, num_of_forks, final_func=None, closing_func=None): # @NoSelf
4548+ def multiplex(iter, num_of_forks, final_func=None, closing_func=None): # pylint: disable=redefined-builtin
4549 u"""Split a single iterater into a number of streams
4550
4551 The return val will be a list with length num_of_forks, each
4552@@ -223,14 +226,14 @@
4553 to split it into 2. By profiling, this is a time sensitive class.
4554
4555 """
4556- def __init__(self, iter):
4557+ def __init__(self, iter): # pylint: disable=redefined-builtin
4558 self.a_leading_by = 0 # How many places a is ahead of b
4559 self.buffer = []
4560 self.iter = iter
4561
4562 def yielda(self):
4563 u"""Return first iterator"""
4564- buf, iter = self.buffer, self.iter
4565+ buf, iter = self.buffer, self.iter # pylint: disable=redefined-builtin
4566 while(1):
4567 if self.a_leading_by >= 0:
4568 # a is in front, add new element
4569@@ -247,7 +250,7 @@
4570
4571 def yieldb(self):
4572 u"""Return second iterator"""
4573- buf, iter = self.buffer, self.iter
4574+ buf, iter = self.buffer, self.iter # pylint: disable=redefined-builtin
4575 while(1):
4576 if self.a_leading_by <= 0:
4577 # b is in front, add new element
4578@@ -407,7 +410,7 @@
4579 assert branch.finished
4580 pass
4581
4582- def can_fast_process(self, *args):
4583+ def can_fast_process(self, *args): # pylint: disable=unused-argument
4584 u"""True if object can be processed without new branch (stub)"""
4585 return None
4586
4587
4588=== modified file 'duplicity/librsync.py'
4589--- duplicity/librsync.py 2018-11-29 19:00:15 +0000
4590+++ duplicity/librsync.py 2020-03-22 12:35:54 +0000
4591@@ -1,4 +1,4 @@
4592-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
4593+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
4594 #
4595 # Copyright 2002 Ben Escoto <ben@emerose.org>
4596 # Copyright 2007 Kenneth Loafman <kenneth@loafman.com>
4597@@ -26,13 +26,14 @@
4598
4599 """
4600
4601+from builtins import object
4602 from builtins import str
4603-from builtins import object
4604+
4605+import array
4606 import os
4607 import sys
4608+
4609 from . import _librsync
4610-import types
4611-import array
4612
4613 if os.environ.get(u'READTHEDOCS') == u'True':
4614 import mock # pylint: disable=import-error
4615
4616=== modified file 'duplicity/log.py'
4617--- duplicity/log.py 2019-08-08 19:31:58 +0000
4618+++ duplicity/log.py 2020-03-22 12:35:54 +0000
4619@@ -1,4 +1,4 @@
4620-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
4621+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
4622 #
4623 # Copyright 2002 Ben Escoto <ben@emerose.org>
4624 # Copyright 2007 Kenneth Loafman <kenneth@loafman.com>
4625
4626=== modified file 'duplicity/manifest.py'
4627--- duplicity/manifest.py 2019-06-29 21:05:41 +0000
4628+++ duplicity/manifest.py 2020-03-22 12:35:54 +0000
4629@@ -1,4 +1,4 @@
4630-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
4631+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
4632 #
4633 # Copyright 2002 Ben Escoto <ben@emerose.org>
4634 # Copyright 2007 Kenneth Loafman <kenneth@loafman.com>
4635@@ -21,7 +21,6 @@
4636
4637 u"""Create and edit manifest for session contents"""
4638
4639-from builtins import filter
4640 from builtins import map
4641 from builtins import range
4642 from builtins import object
4643@@ -29,9 +28,9 @@
4644 import re
4645 import sys
4646
4647-from duplicity import globals
4648+from duplicity import config
4649 from duplicity import log
4650-from duplicity import globals
4651+from duplicity import config
4652 from duplicity import util
4653
4654
4655@@ -64,14 +63,14 @@
4656
4657 def set_dirinfo(self):
4658 u"""
4659- Set information about directory from globals,
4660+ Set information about directory from config,
4661 and write to manifest file.
4662
4663 @rtype: Manifest
4664 @return: manifest
4665 """
4666- self.hostname = globals.hostname
4667- self.local_dirname = globals.local_path.name # @UndefinedVariable
4668+ self.hostname = config.hostname
4669+ self.local_dirname = config.local_path.name
4670 if self.fh:
4671 if self.hostname:
4672 self.fh.write(b"Hostname %s\n" % self.hostname.encode())
4673@@ -89,23 +88,23 @@
4674 @rtype: string
4675 @return: None or error message
4676 """
4677- if globals.allow_source_mismatch:
4678+ if config.allow_source_mismatch:
4679 return
4680
4681- if self.hostname and self.hostname != globals.hostname:
4682+ if self.hostname and self.hostname != config.hostname:
4683 errmsg = _(u"Fatal Error: Backup source host has changed.\n"
4684 u"Current hostname: %s\n"
4685- u"Previous hostname: %s") % (globals.hostname, self.hostname)
4686+ u"Previous hostname: %s") % (config.hostname, self.hostname)
4687 code = log.ErrorCode.hostname_mismatch
4688- code_extra = u"%s %s" % (util.escape(globals.hostname), util.escape(self.hostname))
4689+ code_extra = u"%s %s" % (util.escape(config.hostname), util.escape(self.hostname))
4690
4691- elif (self.local_dirname and self.local_dirname != globals.local_path.name): # @UndefinedVariable
4692+ elif (self.local_dirname and self.local_dirname != config.local_path.name):
4693 errmsg = _(u"Fatal Error: Backup source directory has changed.\n"
4694 u"Current directory: %s\n"
4695- u"Previous directory: %s") % (globals.local_path.name, self.local_dirname) # @UndefinedVariable
4696+ u"Previous directory: %s") % (config.local_path.name, self.local_dirname)
4697 code = log.ErrorCode.source_dir_mismatch
4698- code_extra = u"%s %s" % (util.escape(globals.local_path.name),
4699- util.escape(self.local_dirname)) # @UndefinedVariable
4700+ code_extra = u"%s %s" % (util.escape(config.local_path.name),
4701+ util.escape(self.local_dirname))
4702 else:
4703 return
4704
4705@@ -223,7 +222,7 @@
4706
4707 # Get file changed list - not needed if --file-changed not present
4708 filecount = 0
4709- if globals.file_changed is not None:
4710+ if config.file_changed is not None:
4711 filelist_regexp = re.compile(b"(^|\\n)filelist\\s([0-9]+)\\n(.*?)(\\nvolume\\s|$)", re.I | re.S)
4712 match = filelist_regexp.search(s)
4713 if match:
4714
4715=== modified file 'duplicity/patchdir.py'
4716--- duplicity/patchdir.py 2019-06-29 21:05:41 +0000
4717+++ duplicity/patchdir.py 2020-03-22 12:35:54 +0000
4718@@ -1,4 +1,4 @@
4719-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
4720+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
4721 #
4722 # Copyright 2002 Ben Escoto <ben@emerose.org>
4723 # Copyright 2007 Kenneth Loafman <kenneth@loafman.com>
4724@@ -20,26 +20,24 @@
4725 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
4726
4727 from builtins import map
4728-from builtins import filter
4729 from builtins import next
4730+from builtins import object
4731 from builtins import range
4732-from builtins import object
4733
4734-import re # @UnusedImport
4735-import types
4736-import os
4737+import re
4738 import sys
4739 import tempfile
4740
4741-from duplicity import tarfile # @UnusedImport
4742-from duplicity import librsync # @UnusedImport
4743-from duplicity import log # @UnusedImport
4744 from duplicity import diffdir
4745+from duplicity import config
4746+from duplicity import librsync
4747+from duplicity import log
4748 from duplicity import selection
4749+from duplicity import tarfile
4750 from duplicity import tempdir
4751-from duplicity import util # @UnusedImport
4752-from duplicity.path import * # @UnusedWildImport
4753-from duplicity.lazy import * # @UnusedWildImport
4754+from duplicity import util
4755+from duplicity.lazy import * # pylint: disable=unused-wildcard-import,redefined-builtin
4756+from duplicity.path import * # pylint: disable=unused-wildcard-import,redefined-builtin
4757
4758 u"""Functions for patching of directories"""
4759
4760@@ -236,8 +234,7 @@
4761 u"""Add next chunk to buffer"""
4762 if self.at_end:
4763 return None
4764- index, difftype, multivol = get_index_from_tarinfo( # @UnusedVariable
4765- self.tarinfo_list[0])
4766+ index, difftype, multivol = get_index_from_tarinfo(self.tarinfo_list[0])
4767 if not multivol or index != self.index:
4768 # we've moved on
4769 # the following communicates next tarinfo to difftar2path_iter
4770@@ -295,7 +292,7 @@
4771 if self.dir_diff_ropath:
4772 self.dir_diff_ropath.copy_attribs(self.dir_basis_path)
4773
4774- def can_fast_process(self, index, basis_path, diff_ropath):
4775+ def can_fast_process(self, index, basis_path, diff_ropath): # pylint: disable=unused-argument
4776 u"""No need to recurse if diff_ropath isn't a directory"""
4777 return not (diff_ropath and diff_ropath.isdir())
4778
4779@@ -608,7 +605,7 @@
4780 ropath.copy(new_path)
4781
4782 self.dir_new_path = self.base_path.new_index(index)
4783- if self.dir_new_path.exists() and not globals.force:
4784+ if self.dir_new_path.exists() and not config.force:
4785 # base may exist, but nothing else
4786 assert index == (), index
4787 else:
4788@@ -620,7 +617,7 @@
4789 if self.dir_diff_ropath:
4790 self.dir_diff_ropath.copy_attribs(self.dir_new_path)
4791
4792- def can_fast_process(self, index, ropath):
4793+ def can_fast_process(self, index, ropath): # pylint: disable=unused-argument
4794 u"""Can fast process (no recursion) if ropath isn't a directory"""
4795 log.Info(_(u"Writing %s of type %s") %
4796 (util.fsdecode(ropath.get_relative_path()), ropath.type),
4797
4798=== modified file 'duplicity/path.py'
4799--- duplicity/path.py 2020-02-06 15:27:43 +0000
4800+++ duplicity/path.py 2020-03-22 12:35:54 +0000
4801@@ -1,4 +1,4 @@
4802-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
4803+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
4804 #
4805 # Copyright 2002 Ben Escoto <ben@emerose.org>
4806 # Copyright 2007 Kenneth Loafman <kenneth@loafman.com>
4807@@ -29,29 +29,28 @@
4808 from __future__ import print_function
4809 from future import standard_library
4810 standard_library.install_aliases()
4811-from builtins import filter
4812 from builtins import str
4813 from builtins import object
4814
4815-import stat
4816 import errno
4817+import gzip
4818+import os
4819+import re
4820+import shutil
4821 import socket
4822+import stat
4823 import time
4824-import re
4825-import gzip
4826-import shutil
4827-import sys
4828
4829-from duplicity import tarfile
4830+from duplicity import cached_ops
4831+from duplicity import config
4832+from duplicity import dup_time
4833 from duplicity import file_naming
4834-from duplicity import globals
4835 from duplicity import gpg
4836+from duplicity import librsync
4837+from duplicity import log
4838+from duplicity import tarfile
4839 from duplicity import util
4840-from duplicity import librsync
4841-from duplicity import log # @UnusedImport
4842-from duplicity import dup_time
4843-from duplicity import cached_ops
4844-from duplicity.lazy import * # @UnusedWildImport
4845+from duplicity.lazy import * # pylint: disable=unused-wildcard-import,redefined-builtin
4846
4847 _copy_blocksize = 64 * 1024
4848 _tmp_path_counter = 1
4849@@ -75,7 +74,7 @@
4850 have a name. They are required to be indexed though.
4851
4852 """
4853- def __init__(self, index, stat=None):
4854+ def __init__(self, index, stat=None): # pylint: disable=unused-argument
4855 u"""ROPath initializer"""
4856 self.opened, self.fileobj = None, None
4857 self.index = index
4858@@ -99,7 +98,7 @@
4859 elif stat.S_ISSOCK(st_mode):
4860 raise PathException(util.fsdecode(self.get_relative_path()) +
4861 u"is a socket, unsupported by tar")
4862- self.type = u"sock"
4863+ self.type = u"sock" # pylint: disable=unreachable
4864 elif stat.S_ISCHR(st_mode):
4865 self.type = u"chr"
4866 elif stat.S_ISBLK(st_mode):
4867@@ -196,7 +195,7 @@
4868 def init_from_tarinfo(self, tarinfo):
4869 u"""Set data from tarinfo object (part of tarfile module)"""
4870 # Set the typepp
4871- type = tarinfo.type
4872+ type = tarinfo.type # pylint: disable=redefined-builtin
4873 if type == tarfile.REGTYPE or type == tarfile.AREGTYPE:
4874 self.type = u"reg"
4875 elif type == tarfile.LNKTYPE:
4876@@ -228,13 +227,13 @@
4877 --numeric-owner is set
4878 """
4879 try:
4880- if globals.numeric_owner:
4881+ if config.numeric_owner:
4882 raise KeyError
4883 self.stat.st_uid = cached_ops.getpwnam(tarinfo.uname)[2]
4884 except KeyError:
4885 self.stat.st_uid = tarinfo.uid
4886 try:
4887- if globals.numeric_owner:
4888+ if config.numeric_owner:
4889 raise KeyError
4890 self.stat.st_gid = cached_ops.getgrnam(tarinfo.gname)[2]
4891 except KeyError:
4892@@ -504,15 +503,15 @@
4893 regex_chars_to_quote = re.compile(u"[\\\\\\\"\\$`]")
4894
4895 def rename_index(self, index):
4896- if not globals.rename or not index:
4897+ if not config.rename or not index:
4898 return index # early exit
4899 path = os.path.normcase(os.path.join(*index))
4900 tail = []
4901- while path and path not in globals.rename:
4902+ while path and path not in config.rename:
4903 path, extra = os.path.split(path)
4904 tail.insert(0, extra)
4905 if path:
4906- return globals.rename[path].split(os.sep) + tail
4907+ return config.rename[path].split(os.sep) + tail
4908 else:
4909 return index # no rename found
4910
4911@@ -543,7 +542,7 @@
4912 try:
4913 # We may be asked to look at the target of symlinks rather than
4914 # the link itself.
4915- if globals.copy_links:
4916+ if config.copy_links:
4917 self.stat = os.stat(self.name)
4918 else:
4919 self.stat = os.lstat(self.name)
4920@@ -591,7 +590,7 @@
4921 result = open(self.name, mode)
4922 return result
4923
4924- def makedev(self, type, major, minor):
4925+ def makedev(self, type, major, minor): # pylint: disable=redefined-builtin
4926 u"""Make a device file with specified type, major/minor nums"""
4927 cmdlist = [u'mknod', self.name, type, str(major), str(minor)]
4928 if os.spawnvp(os.P_WAIT, u'mknod', cmdlist) != 0:
4929@@ -604,7 +603,7 @@
4930 try:
4931 os.makedirs(self.name)
4932 except OSError:
4933- if (not globals.force):
4934+ if (not config.force):
4935 raise PathException(u"Error creating directory %s" % self.uc_name, 7)
4936 self.setdata()
4937
4938@@ -786,7 +785,7 @@
4939 Return fileobj with appropriate encryption/compression
4940
4941 If encryption is specified but no gpg_profile, use
4942- globals.default_profile.
4943+ config.default_profile.
4944 """
4945 assert not self.opened and not self.fileobj
4946 assert not (self.pr.encrypted and self.pr.compressed)
4947@@ -797,7 +796,7 @@
4948 return gzip.GzipFile(self.name, mode)
4949 elif self.pr.encrypted:
4950 if not gpg_profile:
4951- gpg_profile = globals.gpg_profile
4952+ gpg_profile = config.gpg_profile
4953 if mode == u"rb":
4954 return gpg.GPGFile(False, self, gpg_profile)
4955 elif mode == u"wb":
4956@@ -808,14 +807,14 @@
4957
4958 class PathDeleter(ITRBranch):
4959 u"""Delete a directory. Called by Path.deltree"""
4960- def start_process(self, index, path):
4961+ def start_process(self, index, path): # pylint: disable=unused-argument
4962 self.path = path
4963
4964 def end_process(self):
4965 self.path.delete()
4966
4967- def can_fast_process(self, index, path):
4968+ def can_fast_process(self, index, path): # pylint: disable=unused-argument
4969 return not path.isdir()
4970
4971- def fast_process(self, index, path):
4972+ def fast_process(self, index, path): # pylint: disable=unused-argument
4973 path.delete()
4974
4975=== modified file 'duplicity/progress.py'
4976--- duplicity/progress.py 2019-08-08 19:31:58 +0000
4977+++ duplicity/progress.py 2020-03-22 12:35:54 +0000
4978@@ -1,4 +1,4 @@
4979-# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
4980+# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4; encoding:utf8 -*-
4981 #
4982 # Copyright 2002 Ben Escoto <ben@emerose.org>
4983 # Copyright 2007 Kenneth Loafman <kenneth@loafman.com>
4984@@ -34,17 +34,17 @@
4985
4986 from __future__ import absolute_import
4987 from __future__ import division
4988-
4989 from builtins import object
4990+
4991+from datetime import datetime, timedelta
4992 import collections as sys_collections
4993 import math
4994+import pickle
4995 import threading
4996 import time
4997-from datetime import datetime, timedelta
4998-from duplicity import globals
4999+
5000+from duplicity import config
The diff has been truncated for viewing.

Subscribers

People subscribed via source and target branches