Merge ~cjwatson/launchpad:black-utilities into launchpad:master

Proposed by Colin Watson
Status: Merged
Approved by: Colin Watson
Approved revision: 20b5d69ce2c296606f787b186d993e10cf4a4d89
Merge reported by: Otto Co-Pilot
Merged at revision: not available
Proposed branch: ~cjwatson/launchpad:black-utilities
Merge into: launchpad:master
Diff against target: 4577 lines (+1423/-925)
36 files modified
.git-blame-ignore-revs (+2/-0)
.pre-commit-config.yaml (+8/-0)
utilities/audit-security-settings.py (+5/-7)
utilities/community-contributions.py (+1/-5)
utilities/create-lp-wadl-and-apidoc.py (+42/-34)
utilities/generate-external-bug-status-docs (+41/-31)
utilities/get-branch-info (+10/-9)
utilities/js-deps (+0/-1)
utilities/link-external-sourcecode (+57/-49)
utilities/link-system-packages.py (+8/-5)
utilities/list-pages (+39/-37)
utilities/local-latency (+21/-20)
utilities/lsconf.py (+44/-27)
utilities/make-dev-certificate (+29/-19)
utilities/make-dummy-hosted-branches (+8/-6)
utilities/make-lp-user (+47/-33)
utilities/make-requirements.py (+27/-15)
utilities/massage-bug-import-xml (+90/-65)
utilities/paste (+70/-41)
utilities/pgbackup.py (+36/-33)
utilities/pgcreate.py (+7/-7)
utilities/pgkillactive.py (+61/-28)
utilities/pgkillidle.py (+58/-26)
utilities/pglogwatch.py (+73/-43)
utilities/pgmassacre.py (+64/-33)
utilities/pgstats.py (+68/-50)
utilities/publish-to-swift (+45/-24)
utilities/report-database-stats.py (+173/-95)
utilities/roundup-sniffer.py (+59/-41)
utilities/run-as (+0/-1)
utilities/script_commands.py (+17/-11)
utilities/shhh.py (+2/-5)
utilities/smoke-test-librarian.py (+2/-3)
utilities/soyuz-sampledata-setup.py (+186/-101)
utilities/update-copyright (+23/-18)
utilities/update-sourcecode (+0/-2)
Reviewer Review Type Date Requested Status
Colin Watson (community) Approve
Review via email: mp+427294@code.launchpad.net

Commit message

utilities: Apply black

To post a comment you must log in.
Revision history for this message
Colin Watson (cjwatson) wrote :

Self-approving based on previous discussions.

review: Approve

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs
2index 2c740b8..40d232e 100644
3--- a/.git-blame-ignore-revs
4+++ b/.git-blame-ignore-revs
5@@ -98,3 +98,5 @@ b56a741985ca580c281f142bea589b1ef05d3e93
6 0bf877facbd96ece340dd26a7429ebbf0fb9b65a
7 # apply black to {cronscripts,scripts}
8 86d834967ddae3eecd13deda5ac9eefea538195d
9+# apply black to utilities
10+35ac52c1c63d184b4660abbab4bead59408d3937
11diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
12index 4cc7030..b925ca3 100644
13--- a/.pre-commit-config.yaml
14+++ b/.pre-commit-config.yaml
15@@ -67,7 +67,13 @@ repos:
16 |xmlrpc
17 )
18 |scripts
19+ |utilities
20 )/
21+ exclude: |
22+ (?x)^(
23+ utilities/community-contributions\.py
24+ |utilities/update-sourcecode
25+ )$
26 - repo: https://github.com/PyCQA/isort
27 rev: 5.9.2
28 hooks:
29@@ -110,6 +116,7 @@ repos:
30 |xmlrpc
31 )
32 |scripts
33+ |utilities
34 )/
35 - id: isort
36 alias: isort-black
37@@ -143,6 +150,7 @@ repos:
38 |xmlrpc
39 )
40 |scripts
41+ |utilities
42 )/
43 - repo: https://github.com/PyCQA/flake8
44 rev: 3.9.2
45diff --git a/utilities/audit-security-settings.py b/utilities/audit-security-settings.py
46index 41e133f..5a5aa20 100755
47--- a/utilities/audit-security-settings.py
48+++ b/utilities/audit-security-settings.py
49@@ -18,11 +18,8 @@ import os
50
51 from lp.scripts.utilities.settingsauditor import SettingsAuditor
52
53-
54-BRANCH_ROOT = os.path.split(
55- os.path.dirname(os.path.abspath(__file__)))[0]
56-SECURITY_PATH = os.path.join(
57- BRANCH_ROOT, 'database', 'schema', 'security.cfg')
58+BRANCH_ROOT = os.path.split(os.path.dirname(os.path.abspath(__file__)))[0]
59+SECURITY_PATH = os.path.join(BRANCH_ROOT, "database", "schema", "security.cfg")
60
61
62 def main():
63@@ -30,9 +27,10 @@ def main():
64 data = f.read()
65 auditor = SettingsAuditor(data)
66 settings = auditor.audit()
67- with open(SECURITY_PATH, 'w') as f:
68+ with open(SECURITY_PATH, "w") as f:
69 f.write(settings)
70 print(auditor.error_data)
71
72-if __name__ == '__main__':
73+
74+if __name__ == "__main__":
75 main()
76diff --git a/utilities/community-contributions.py b/utilities/community-contributions.py
77index 8080d94..edaac47 100755
78--- a/utilities/community-contributions.py
79+++ b/utilities/community-contributions.py
80@@ -39,10 +39,7 @@ Options:
81 # For understanding the code, you may find it helpful to see
82 # bzrlib/log.py and http://bazaar-vcs.org/Integrating_with_Bazaar.
83
84-from __future__ import (
85- absolute_import,
86- print_function,
87- )
88+from __future__ import absolute_import, print_function
89
90 import getopt
91 import re
92@@ -52,7 +49,6 @@ from bzrlib import log
93 from bzrlib.branch import Branch
94 from bzrlib.osutils import format_date
95
96-
97 try:
98 from editmoin import editshortcut
99 except ImportError:
100diff --git a/utilities/create-lp-wadl-and-apidoc.py b/utilities/create-lp-wadl-and-apidoc.py
101index ec91422..9bd13d8 100755
102--- a/utilities/create-lp-wadl-and-apidoc.py
103+++ b/utilities/create-lp-wadl-and-apidoc.py
104@@ -13,14 +13,14 @@ Example:
105
106 import _pythonpath # noqa: F401
107
108-from multiprocessing import Process
109 import optparse
110 import os
111 import subprocess
112 import sys
113+from multiprocessing import Process
114
115-from lazr.restful.interfaces import IWebServiceConfiguration
116 import six
117+from lazr.restful.interfaces import IWebServiceConfiguration
118 from zope.component import getUtility
119 from zope.pagetemplate.pagetemplatefile import PageTemplateFile
120
121@@ -29,27 +29,27 @@ from lp.services.webservice.wadl import (
122 generate_html,
123 generate_json,
124 generate_wadl,
125- )
126+)
127 from lp.systemhomes import WebServiceApplication
128
129
130 def write(filename, content, timestamp):
131 """Replace the named file with the given string."""
132- with open(filename, 'wb') as f:
133+ with open(filename, "wb") as f:
134 f.write(six.ensure_binary(content))
135 os.utime(filename, (timestamp, timestamp)) # (atime, mtime)
136
137
138 def make_files(directory, version, timestamp, force):
139 version_directory = os.path.join(directory, version)
140- base_filename = os.path.join(version_directory, os.environ['LPCONFIG'])
141- wadl_filename = base_filename + '.wadl'
142- json_filename = base_filename + '.json'
143+ base_filename = os.path.join(version_directory, os.environ["LPCONFIG"])
144+ wadl_filename = base_filename + ".wadl"
145+ json_filename = base_filename + ".json"
146 html_filename = os.path.join(directory, version + ".html")
147- wadl_index = os.path.join(version_directory, 'index.wadl')
148- json_index = os.path.join(version_directory, 'index.json')
149- html_index = os.path.join(version_directory, 'index.html')
150- brokenwadl_index = os.path.join(version_directory, 'index.brokenwadl')
151+ wadl_index = os.path.join(version_directory, "index.wadl")
152+ json_index = os.path.join(version_directory, "index.json")
153+ html_index = os.path.join(version_directory, "index.html")
154+ brokenwadl_index = os.path.join(version_directory, "index.brokenwadl")
155
156 # Make sure we have our dir.
157 if not os.path.exists(version_directory):
158@@ -58,16 +58,15 @@ def make_files(directory, version, timestamp, force):
159
160 # Make wadl and json files.
161 for src, dest, gen, name in (
162- (wadl_filename, wadl_index, generate_wadl, 'WADL'),
163- (json_filename, json_index, generate_json, 'JSON')):
164+ (wadl_filename, wadl_index, generate_wadl, "WADL"),
165+ (json_filename, json_index, generate_json, "JSON"),
166+ ):
167 # If the src doesn't exist or we are forced to regenerate it...
168- if (not os.path.exists(src) or force):
169- print("Writing %s for version %s to %s." % (
170- name, version, src))
171+ if not os.path.exists(src) or force:
172+ print("Writing %s for version %s to %s." % (name, version, src))
173 write(src, gen(version), timestamp)
174 else:
175- print("Skipping already present %s file: %s" % (
176- name, src))
177+ print("Skipping already present %s file: %s" % (name, src))
178 # Make "index" symlinks, removing any preexisting ones.
179 if os.path.exists(dest):
180 os.remove(dest)
181@@ -104,11 +103,13 @@ def make_files(directory, version, timestamp, force):
182 # put the HTML in the same directory as the WADL.
183 # If the HTML file doesn't exist or we're being forced to regenerate
184 # it...
185- if (not os.path.exists(html_filename) or force):
186- print("Writing apidoc for version %s to %s" % (
187- version, html_filename))
188- write(html_filename, generate_html(wadl_filename,
189- suppress_stderr=False), timestamp)
190+ if not os.path.exists(html_filename) or force:
191+ print("Writing apidoc for version %s to %s" % (version, html_filename))
192+ write(
193+ html_filename,
194+ generate_html(wadl_filename, suppress_stderr=False),
195+ timestamp,
196+ )
197 else:
198 print("Skipping already present HTML file:", html_filename)
199
200@@ -117,7 +118,8 @@ def make_files(directory, version, timestamp, force):
201 if not os.path.exists(html_index):
202 os.symlink(
203 os.path.join(os.path.pardir, os.path.basename(html_filename)),
204- html_index)
205+ html_index,
206+ )
207
208
209 def main(directory, force=False):
210@@ -127,25 +129,29 @@ def main(directory, force=False):
211
212 # First, create an index.html with links to all the HTML
213 # documentation files we're about to generate.
214- template_file = 'apidoc-index.pt'
215+ template_file = "apidoc-index.pt"
216 template = PageTemplateFile(template_file)
217 index_filename = os.path.join(directory, "index.html")
218 print("Writing index:", index_filename)
219- f = open(index_filename, 'w')
220+ f = open(index_filename, "w")
221 f.write(template(config=config))
222
223 # Get the time of the last commit. We will use this as the mtime for the
224 # generated files so that we can safely use it as part of Apache's etag
225 # generation in the face of multiple servers/filesystems.
226- timestamp = int(subprocess.check_output(
227- ["git", "log", "-1", "--format=%ct", "HEAD"],
228- universal_newlines=True))
229+ timestamp = int(
230+ subprocess.check_output(
231+ ["git", "log", "-1", "--format=%ct", "HEAD"],
232+ universal_newlines=True,
233+ )
234+ )
235
236 # Start a process to build each set of WADL and HTML files.
237 processes = []
238 for version in config.active_versions:
239- p = Process(target=make_files,
240- args=(directory, version, timestamp, force))
241+ p = Process(
242+ target=make_files, args=(directory, version, timestamp, force)
243+ )
244 p.start()
245 processes.append(p)
246
247@@ -160,8 +166,10 @@ def parse_args(args):
248 usage = "usage: %prog [options] DIR"
249 parser = optparse.OptionParser(usage=usage)
250 parser.add_option(
251- "--force", action="store_true",
252- help="Replace any already-existing files.")
253+ "--force",
254+ action="store_true",
255+ help="Replace any already-existing files.",
256+ )
257 parser.set_defaults(force=False)
258 options, args = parser.parse_args(args)
259 if len(args) != 2:
260@@ -170,6 +178,6 @@ def parse_args(args):
261 return options, args
262
263
264-if __name__ == '__main__':
265+if __name__ == "__main__":
266 options, args = parse_args(sys.argv)
267 sys.exit(main(args[1], options.force))
268diff --git a/utilities/generate-external-bug-status-docs b/utilities/generate-external-bug-status-docs
269index 94ad229..cbe193e 100755
270--- a/utilities/generate-external-bug-status-docs
271+++ b/utilities/generate-external-bug-status-docs
272@@ -21,10 +21,10 @@
273 import _pythonpath # noqa: F401
274
275 import codecs
276+import sys
277 from datetime import datetime
278 from itertools import chain
279 from optparse import OptionParser
280-import sys
281
282 from lp.bugs.externalbugtracker import BUG_TRACKER_CLASSES
283
284@@ -32,16 +32,17 @@ from lp.bugs.externalbugtracker import BUG_TRACKER_CLASSES
285 def generate_blank_lines(num):
286 """Generate `num` blank lines."""
287 for i in range(num):
288- yield ''
289+ yield ""
290
291
292 def generate_page_header():
293 """Generate the header for the page."""
294- yield '<<Anchor(StatusTables)>>'
295- yield '== Status mapping tables =='
296- yield ''
297- yield 'Last generated: %s.' % (
298- datetime.utcnow().strftime('%Y-%m-%d %H:%M UTC'),)
299+ yield "<<Anchor(StatusTables)>>"
300+ yield "== Status mapping tables =="
301+ yield ""
302+ yield "Last generated: %s." % (
303+ datetime.utcnow().strftime("%Y-%m-%d %H:%M UTC"),
304+ )
305
306
307 def generate_table_header(typ):
308@@ -49,8 +50,8 @@ def generate_table_header(typ):
309
310 :param typ: A member of `BugTrackerType`.
311 """
312- yield '<<Anchor(%s)>>' % (typ.name,)
313- yield '=== %s ===' % (typ.title,)
314+ yield "<<Anchor(%s)>>" % (typ.name,)
315+ yield "=== %s ===" % (typ.title,)
316
317
318 def generate_table_grid(lookup, titles):
319@@ -69,44 +70,46 @@ def generate_table(typ, cls):
320 lookup = cls._status_lookup
321
322 # Find or fabricate titles.
323- titles = getattr(cls, '_status_lookup_titles', None)
324+ titles = getattr(cls, "_status_lookup_titles", None)
325 if titles is None:
326- titles = ['Key %d' % (i + 1) for i in range(lookup.max_depth)]
327+ titles = ["Key %d" % (i + 1) for i in range(lookup.max_depth)]
328 else:
329 titles = list(titles)
330- titles.append('Launchpad status')
331+ titles.append("Launchpad status")
332
333 # Format the table.
334 return chain(
335 generate_table_header(typ),
336 generate_blank_lines(1),
337- generate_table_grid(lookup, titles))
338+ generate_table_grid(lookup, titles),
339+ )
340
341
342 def generate_documentable_classes():
343 """Yield each class that has a mapping table defined."""
344 for typ, cls in BUG_TRACKER_CLASSES.items():
345- if getattr(cls, '_status_lookup', None) is not None:
346+ if getattr(cls, "_status_lookup", None) is not None:
347 yield typ, cls
348
349
350 def generate_tables():
351 """Generate all the tables."""
352 documentable_classes = sorted(
353- generate_documentable_classes(),
354- key=(lambda typ_cls: typ_cls[0].title))
355+ generate_documentable_classes(), key=(lambda typ_cls: typ_cls[0].title)
356+ )
357 return chain(
358- *(chain(generate_table(typ, cls),
359- generate_blank_lines(2))
360- for (typ, cls) in documentable_classes))
361+ *(
362+ chain(generate_table(typ, cls), generate_blank_lines(2))
363+ for (typ, cls) in documentable_classes
364+ )
365+ )
366
367
368 def generate_page():
369 """Generate the MoinMoin-style page."""
370 return chain(
371- generate_page_header(),
372- generate_blank_lines(2),
373- generate_tables())
374+ generate_page_header(), generate_blank_lines(2), generate_tables()
375+ )
376
377
378 def write_page(outfile):
379@@ -115,22 +118,29 @@ def write_page(outfile):
380 :param outfile: A `file`-like object.
381 """
382 # By default, encode using UTF-8.
383- write = codecs.getwriter('UTF-8')(outfile).write
384+ write = codecs.getwriter("UTF-8")(outfile).write
385 for line in generate_page():
386 write(line)
387- write('\n')
388+ write("\n")
389
390
391 def get_option_parser():
392 """Return the option parser for this program."""
393 usage = "Usage: %prog [options]"
394 parser = OptionParser(
395- usage=usage, description=(
396+ usage=usage,
397+ description=(
398 "Generates MoinMoin-style tables to document the mapping of "
399- "remote bug statuses to Launchpad statuses."))
400+ "remote bug statuses to Launchpad statuses."
401+ ),
402+ )
403 parser.add_option(
404- "-o", "--file-out", dest="outfile", default='-', help=(
405- "write data to OUTFILE"))
406+ "-o",
407+ "--file-out",
408+ dest="outfile",
409+ default="-",
410+ help=("write data to OUTFILE"),
411+ )
412 return parser
413
414
415@@ -139,12 +149,12 @@ def main(args):
416 (options, args) = parser.parse_args(args)
417 if len(args) > 0:
418 parser.error("Incorrect number of arguments.")
419- if options.outfile == '-':
420+ if options.outfile == "-":
421 outfile = sys.stdout
422 else:
423- outfile = open(options.outfile, 'wb')
424+ outfile = open(options.outfile, "wb")
425 write_page(outfile)
426
427
428-if __name__ == '__main__':
429+if __name__ == "__main__":
430 sys.exit(main(sys.argv[1:]))
431diff --git a/utilities/get-branch-info b/utilities/get-branch-info
432index 911d8f4..cea827a 100755
433--- a/utilities/get-branch-info
434+++ b/utilities/get-branch-info
435@@ -32,18 +32,19 @@ def main(args):
436 return
437 print(branch.bzr_identity)
438 print()
439- print('Unique name:', branch.unique_name)
440- print('ID:', branch.id)
441- print('Private:', branch.private)
442- print('Type:', branch.branch_type)
443- print('URL:', canonical_url(branch))
444+ print("Unique name:", branch.unique_name)
445+ print("ID:", branch.id)
446+ print("Private:", branch.private)
447+ print("Type:", branch.branch_type)
448+ print("URL:", canonical_url(branch))
449 if branch.url is not None:
450- print('External URL:', branch.url)
451+ print("External URL:", branch.url)
452 branch_path = branch_id_to_path(branch.id)
453 mirrored_path = join(
454- config.codehosting.mirrored_branches_root, branch_path)
455- print('Mirrored copy:', mirrored_path)
456+ config.codehosting.mirrored_branches_root, branch_path
457+ )
458+ print("Mirrored copy:", mirrored_path)
459
460
461-if __name__ == '__main__':
462+if __name__ == "__main__":
463 main(sys.argv)
464diff --git a/utilities/js-deps b/utilities/js-deps
465index 30d9b36..06cd626 100755
466--- a/utilities/js-deps
467+++ b/utilities/js-deps
468@@ -4,5 +4,4 @@ import _pythonpath # noqa: F401
469
470 from convoy.meta import main
471
472-
473 main()
474diff --git a/utilities/link-external-sourcecode b/utilities/link-external-sourcecode
475index 400ff4d..bc2a58b 100755
476--- a/utilities/link-external-sourcecode
477+++ b/utilities/link-external-sourcecode
478@@ -4,34 +4,18 @@
479 # Affero General Public License version 3 (see the file LICENSE).
480
481 import optparse
482-from os import (
483- curdir,
484- listdir,
485- makedirs,
486- symlink,
487- unlink,
488- )
489-from os.path import (
490- abspath,
491- basename,
492- exists,
493- islink,
494- join,
495- realpath,
496- relpath,
497- )
498 import subprocess
499-from sys import (
500- stderr,
501- stdout,
502- )
503+from os import curdir, listdir, makedirs, symlink, unlink
504+from os.path import abspath, basename, exists, islink, join, realpath, relpath
505+from sys import stderr, stdout
506
507
508 def get_main_worktree(branch_dir):
509 """Return the main worktree directory, otherwise `None`."""
510 worktree_list = subprocess.check_output(
511- ['git', 'worktree', 'list', '--porcelain'], universal_newlines=True)
512- main_worktree = worktree_list.splitlines()[0].split(' ', 1)[1]
513+ ["git", "worktree", "list", "--porcelain"], universal_newlines=True
514+ )
515+ main_worktree = worktree_list.splitlines()[0].split(" ", 1)[1]
516 if realpath(main_worktree) != realpath(branch_dir):
517 return main_worktree
518 return None
519@@ -60,38 +44,58 @@ def link(source, destination):
520 symlink(source, destination)
521 except OSError as error:
522 stderr.write(
523- ' Error linking %s: %s\n' % (basename(destination), error))
524+ " Error linking %s: %s\n" % (basename(destination), error)
525+ )
526 else:
527 if options.verbose:
528- stdout.write('%s -> %s\n' % (relpath(destination), source))
529+ stdout.write("%s -> %s\n" % (relpath(destination), source))
530
531
532-if __name__ == '__main__':
533+if __name__ == "__main__":
534 parser = optparse.OptionParser(
535 usage="%prog [options] [parent]",
536 description=(
537 "Add a symlink in <target>/sourcecode for each corresponding "
538- "file in <parent>/sourcecode."),
539+ "file in <parent>/sourcecode."
540+ ),
541 epilog=(
542 "Most of the time this does the right thing if run "
543- "with no arguments."),
544- add_help_option=False)
545+ "with no arguments."
546+ ),
547+ add_help_option=False,
548+ )
549 parser.add_option(
550- '-p', '--parent', dest='parent', default=None,
551- help=("The directory of the parent tree. If not specified, "
552- "the main Git worktree."),
553- metavar="DIR")
554+ "-p",
555+ "--parent",
556+ dest="parent",
557+ default=None,
558+ help=(
559+ "The directory of the parent tree. If not specified, "
560+ "the main Git worktree."
561+ ),
562+ metavar="DIR",
563+ )
564 parser.add_option(
565- '-t', '--target', dest='target', default=curdir,
566- help=("The directory of the target tree. If not specified, "
567- "the current working directory."),
568- metavar="DIR")
569+ "-t",
570+ "--target",
571+ dest="target",
572+ default=curdir,
573+ help=(
574+ "The directory of the target tree. If not specified, "
575+ "the current working directory."
576+ ),
577+ metavar="DIR",
578+ )
579 parser.add_option(
580- '-q', '--quiet', dest='verbose', action='store_false',
581- help="Be less verbose.")
582+ "-q",
583+ "--quiet",
584+ dest="verbose",
585+ action="store_false",
586+ help="Be less verbose.",
587+ )
588 parser.add_option(
589- '-h', '--help', action='help',
590- help="Show this help message and exit.")
591+ "-h", "--help", action="help", help="Show this help message and exit."
592+ )
593 parser.set_defaults(verbose=True)
594
595 options, args = parser.parse_args()
596@@ -101,8 +105,10 @@ if __name__ == '__main__':
597 if options.parent is None:
598 options.parent = args[0]
599 else:
600- parser.error("Cannot specify parent tree as named "
601- "argument and positional argument.")
602+ parser.error(
603+ "Cannot specify parent tree as named "
604+ "argument and positional argument."
605+ )
606 elif len(args) >= 2:
607 parser.error("Too many arguments.")
608
609@@ -113,23 +119,25 @@ if __name__ == '__main__':
610 if options.parent is None:
611 parser.error("Parent tree not specified.")
612
613- if not exists(join(options.target, 'sourcecode')):
614- makedirs(join(options.target, 'sourcecode'))
615+ if not exists(join(options.target, "sourcecode")):
616+ makedirs(join(options.target, "sourcecode"))
617
618 missing_files = gen_missing_files(
619- abspath(join(options.parent, 'sourcecode')),
620- abspath(join(options.target, 'sourcecode')))
621+ abspath(join(options.parent, "sourcecode")),
622+ abspath(join(options.target, "sourcecode")),
623+ )
624
625 for source, destination in missing_files:
626 link(source, destination)
627
628- for folder_name in ('download-cache',):
629+ for folder_name in ("download-cache",):
630 source = abspath(join(options.parent, folder_name))
631 destination = abspath(join(options.target, folder_name))
632 if not exists(destination):
633 if not exists(source):
634 stderr.write(
635- ' Wanted to link %s to %s but source does not exist\n' %
636- (source, destination))
637+ " Wanted to link %s to %s but source does not exist\n"
638+ % (source, destination)
639+ )
640 else:
641 link(source, destination)
642diff --git a/utilities/link-system-packages.py b/utilities/link-system-packages.py
643index 14d4bda..17ced92 100755
644--- a/utilities/link-system-packages.py
645+++ b/utilities/link-system-packages.py
646@@ -5,11 +5,11 @@
647
648 """Link system-installed Python modules into Launchpad's virtualenv."""
649
650-from argparse import ArgumentParser
651-from distutils.sysconfig import get_python_lib
652 import importlib
653 import os.path
654 import re
655+from argparse import ArgumentParser
656+from distutils.sysconfig import get_python_lib
657
658 # Importing this from the vendored version in pkg_resources is a bit dodgy
659 # (using packaging.markers directly would be better), but we want to
660@@ -32,9 +32,11 @@ def link_module(name, virtualenv_libdir, optional=False):
661 system_libdir = get_python_lib(plat_specific=path.endswith(".so"))
662 if os.path.commonprefix([path, system_libdir]) != system_libdir:
663 raise RuntimeError(
664- "%s imported from outside %s (%s)" % (name, system_libdir, path))
665+ "%s imported from outside %s (%s)" % (name, system_libdir, path)
666+ )
667 target_path = os.path.join(
668- virtualenv_libdir, os.path.relpath(path, system_libdir))
669+ virtualenv_libdir, os.path.relpath(path, system_libdir)
670+ )
671 if os.path.lexists(target_path) and os.path.islink(target_path):
672 os.unlink(target_path)
673 os.symlink(path, target_path)
674@@ -52,7 +54,8 @@ def main():
675 continue
676 match = re.match(
677 r"^(\[optional\])?\s*([A-Za-z_][A-Za-z0-9_]*)(?:\s*;\s*(.*))?",
678- line)
679+ line,
680+ )
681 if not match:
682 raise ValueError("Parse error: %s" % line)
683 optional = bool(match.group(1))
684diff --git a/utilities/list-pages b/utilities/list-pages
685index 3dd9314..5c720cf 100755
686--- a/utilities/list-pages
687+++ b/utilities/list-pages
688@@ -44,29 +44,22 @@ because our cheat objects don't match the app-encoded business logic.
689
690 import _pythonpath # noqa: F401
691
692-from inspect import getmro
693 import os
694+from inspect import getmro
695
696 from zope.app.wsgi.testlayer import BrowserLayer
697 from zope.browserpage.simpleviewclass import simple
698-from zope.component import (
699- adapter,
700- getGlobalSiteManager,
701- )
702-from zope.interface import (
703- directlyProvides,
704- implementer,
705- )
706+from zope.component import adapter, getGlobalSiteManager
707+from zope.interface import directlyProvides, implementer
708 from zope.publisher.interfaces.browser import IDefaultBrowserLayer
709
710+import zcml
711 from lp.services.config import config
712 from lp.services.scripts import execute_zcml_for_scripts
713 from lp.services.webapp.interfaces import ICanonicalUrlData
714 from lp.services.webapp.publisher import canonical_url
715-import zcml
716
717-
718-ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
719+ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
720
721
722 def load_zcml(zopeless=False):
723@@ -76,7 +69,7 @@ def load_zcml(zopeless=False):
724 if zopeless:
725 execute_zcml_for_scripts()
726 else:
727- BrowserLayer(zcml, zcml_file='webapp.zcml').setUp()
728+ BrowserLayer(zcml, zcml_file="webapp.zcml").setUp()
729
730
731 def is_page_adapter(a):
732@@ -117,27 +110,33 @@ def has_page_title(a):
733 template = get_template_filename(view)
734 if template is None:
735 return False
736- if (getattr(view, 'page_title', marker) is marker):
737+ if getattr(view, "page_title", marker) is marker:
738 return False
739 return has_html_element(template)
740
741
742 def has_html_element(template):
743 with open(template) as f:
744- return '</html>' in f.read()
745+ return "</html>" in f.read()
746
747
748 def iter_page_adapters():
749 """Iterate over adapters for browser:page directives."""
750 gsm = getGlobalSiteManager()
751- return (a for a in gsm.registeredAdapters()
752- if is_page_adapter(a) and has_page_title(a))
753+ return (
754+ a
755+ for a in gsm.registeredAdapters()
756+ if is_page_adapter(a) and has_page_title(a)
757+ )
758
759
760 def iter_url_adapters():
761 gsm = getGlobalSiteManager()
762- return (a for a in gsm.registeredAdapters()
763- if issubclass(a.provided, ICanonicalUrlData))
764+ return (
765+ a
766+ for a in gsm.registeredAdapters()
767+ if issubclass(a.provided, ICanonicalUrlData)
768+ )
769
770
771 def format_page_adapter(a):
772@@ -156,28 +155,31 @@ def format_page_adapter(a):
773 # z.app.pagetemplate.simpleviewclass). Ideally, we'd just construct the
774 # view, but that's hard, since in general they won't construct without an
775 # object that implements the interface they need.
776- bases = [b for b in bases
777- if b is not simple
778- and 'SimpleViewClass' not in b.__name__]
779+ bases = [
780+ b
781+ for b in bases
782+ if b is not simple and "SimpleViewClass" not in b.__name__
783+ ]
784 template = get_template_filename(get_view(a))
785 if template:
786 template = os.path.relpath(template, ROOT)
787- url = '%s/%s' % (get_example_canonical_url(a.required[0]), a.name)
788- return '%s:%s,%s,%s,%s,%s' % (
789+ url = "%s/%s" % (get_example_canonical_url(a.required[0]), a.name)
790+ return "%s:%s,%s,%s,%s,%s" % (
791 bases[0].__name__,
792 a.name,
793 a.required[0].__name__,
794 a.required[1].__name__,
795 template,
796 url,
797- )
798+ )
799
800
801 _BLACKLIST = [
802- '__conform__',
803- ]
804-class Whatever:
805+ "__conform__",
806+]
807+
808
809+class Whatever:
810 def __init__(self, name, interface=None):
811 self._name = name
812 self._interface = interface
813@@ -185,23 +187,23 @@ class Whatever:
814 directlyProvides(self, interface)
815
816 def __str__(self):
817- return '<%s>' % (self._name,)
818+ return "<%s>" % (self._name,)
819
820 def __repr__(self):
821- return '<%s>' % str(self)
822+ return "<%s>" % str(self)
823
824 def __int__(self):
825 return 1
826
827 def __call__(self, *args, **kwargs):
828 args = map(repr, args)
829- args.extend('%s=%r' % (k, v) for k, v in kwargs.items())
830+ args.extend("%s=%r" % (k, v) for k, v in kwargs.items())
831 # If we're being called with no args, assume this is part of crazy
832 # TALES stuff:
833 # webapp/metazcml.py(365)path()
834 # -> return self._compiled_path_expression(self._expression_context)
835 if args:
836- name = '%s(%s)' % (self._name, ', '.join(args))
837+ name = "%s(%s)" % (self._name, ", ".join(args))
838 return Whatever(name)
839 else:
840 return str(self)
841@@ -210,11 +212,11 @@ class Whatever:
842 if name in _BLACKLIST:
843 raise AttributeError
844 if self._interface:
845- interface = getattr(self._interface[name], 'schema', None)
846+ interface = getattr(self._interface[name], "schema", None)
847 else:
848 interface = None
849- child = Whatever('%s.%s' % (self._name, name), interface)
850- if 'name' in name:
851+ child = Whatever("%s.%s" % (self._name, name), interface)
852+ if "name" in name:
853 # Then it's probably displayname, or something.
854 return str(child)
855 else:
856@@ -225,7 +227,7 @@ class Whatever:
857 @adapter(object)
858 class DefaultCanonicalUrlData:
859 def __init__(self, name):
860- self.path = '[[%s]]' % (name,)
861+ self.path = "[[%s]]" % (name,)
862 self.rootsite = None
863 self.inside = None
864
865@@ -253,5 +255,5 @@ def main():
866 print(format_page_adapter(a))
867
868
869-if __name__ == '__main__':
870+if __name__ == "__main__":
871 main()
872diff --git a/utilities/local-latency b/utilities/local-latency
873index 1c7dec1..dd33c58 100755
874--- a/utilities/local-latency
875+++ b/utilities/local-latency
876@@ -3,11 +3,7 @@
877 import subprocess
878 import sys
879
880-from script_commands import (
881- helps,
882- run_subcommand,
883- UserError,
884- )
885+from script_commands import UserError, helps, run_subcommand
886
887
888 def tc(command):
889@@ -15,35 +11,40 @@ def tc(command):
890
891 :param tc: The remainder of the command (leaving out tc).
892 """
893- subprocess.call('sudo tc ' + command, shell=True)
894+ subprocess.call("sudo tc " + command, shell=True)
895
896
897-@helps(delay='Length of delay in miliseconds (each way).',
898- port='Port to induce delay on.')
899+@helps(
900+ delay="Length of delay in miliseconds (each way).",
901+ port="Port to induce delay on.",
902+)
903 def start(delay=500, port=443):
904 """Add artificial latency to the lo interface on the specified port."""
905- tc('qdisc add dev lo root handle 1: prio')
906- tc('qdisc add dev lo parent 1:3 handle 30: netem delay %dms' % delay)
907- tc('filter add dev lo protocol ip parent 1:0 prio 3 u32 match ip'
908- ' dport %d 0xffff flowid 1:3' % port)
909- tc('filter add dev lo protocol ip parent 1:0 prio 3 u32 match ip'
910- ' sport %d 0xffff flowid 1:3' % port)
911+ tc("qdisc add dev lo root handle 1: prio")
912+ tc("qdisc add dev lo parent 1:3 handle 30: netem delay %dms" % delay)
913+ tc(
914+ "filter add dev lo protocol ip parent 1:0 prio 3 u32 match ip"
915+ " dport %d 0xffff flowid 1:3" % port
916+ )
917+ tc(
918+ "filter add dev lo protocol ip parent 1:0 prio 3 u32 match ip"
919+ " sport %d 0xffff flowid 1:3" % port
920+ )
921
922
923 def stop():
924 """Remove latency from the lo."""
925- tc('qdisc del dev lo root')
926+ tc("qdisc del dev lo root")
927
928
929 subcommands = {
930- 'start': start,
931- 'stop': stop,
932- }
933-
934+ "start": start,
935+ "stop": stop,
936+}
937
938
939 if __name__ == "__main__":
940 try:
941 run_subcommand(subcommands, sys.argv[1:])
942 except UserError as e:
943- sys.stderr.write(str(e)+'\n')
944+ sys.stderr.write(str(e) + "\n")
945diff --git a/utilities/lsconf.py b/utilities/lsconf.py
946index ba23a7b..c05e502 100755
947--- a/utilities/lsconf.py
948+++ b/utilities/lsconf.py
949@@ -10,24 +10,24 @@ __metatype__ = type
950 # Scripts may have relative imports.
951 import _pythonpath # noqa: F401
952
953-from operator import attrgetter
954-from optparse import OptionParser
955 import os
956 import sys
957+from operator import attrgetter
958+from optparse import OptionParser
959 from textwrap import dedent
960
961 from lazr.config import ImplicitTypeSchema
962
963 import lp.services.config
964
965-
966 _schema_dir = os.path.abspath(os.path.dirname(lp.services.config.__file__))
967 _root = os.path.dirname(os.path.dirname(os.path.dirname(_schema_dir)))
968
969
970 class Configuration:
971 """A lazr.config configuration."""
972- _schema_path = os.path.join(_schema_dir, 'schema-lazr.conf')
973+
974+ _schema_path = os.path.join(_schema_dir, "schema-lazr.conf")
975
976 def __init__(self, config):
977 self.config = config
978@@ -52,13 +52,15 @@ class Configuration:
979 previous_config_data = self.config.data
980 # Walk the stack of config_data until a change is found.
981 for config_data in self.config.overlays:
982- if (section.name in config_data
983- and config_data[section.name][key] != value):
984+ if (
985+ section.name in config_data
986+ and config_data[section.name][key] != value
987+ ):
988 conf_file_name = previous_config_data.filename
989 break
990 previous_config_data = config_data
991 conf_path = os.path.abspath(conf_file_name)
992- return conf_path[len(_root) + 1:]
993+ return conf_path[len(_root) + 1 :]
994
995 def list_config(self, verbose=False, section_name=None):
996 """Print all the sections and keys in a configuration.
997@@ -70,51 +72,65 @@ class Configuration:
998 was defined.
999 :param section_name: Only print the named section.
1000 """
1001- print('# This configuration derives from:')
1002+ print("# This configuration derives from:")
1003 for config_data in self.config.overlays:
1004- print('# %s' % config_data.filename)
1005+ print("# %s" % config_data.filename)
1006 print()
1007- name_key = attrgetter('name')
1008+ name_key = attrgetter("name")
1009 for count, section in enumerate(sorted(self.config, key=name_key)):
1010 if section_name is not None and section_name != section.name:
1011 continue
1012 if count > 0:
1013 # Separate sections by a blank line, or two when verbose.
1014 print()
1015- print('[%s]' % section.name)
1016+ print("[%s]" % section.name)
1017 if verbose and section.optional:
1018- print('# This section is optional.\n')
1019+ print("# This section is optional.\n")
1020 for count, key in enumerate(sorted(section)):
1021 if verbose:
1022 if count > 0:
1023 # Separate keys by a blank line.
1024 print()
1025 conf_file_name = self.config_file_for_value(section, key)
1026- print('# Defined in: %s' % conf_file_name)
1027- print('%s: %s' % (key, section[key]))
1028+ print("# Defined in: %s" % conf_file_name)
1029+ print("%s: %s" % (key, section[key]))
1030
1031
1032 def get_option_parser():
1033 """Return the option parser for this program."""
1034- usage = dedent(""" %prog [options] lazr-config.conf
1035+ usage = dedent(
1036+ """ %prog [options] lazr-config.conf
1037
1038 List all the sections and keys in an environment's lazr configuration.
1039 The configuration is assembled from the schema and conf files. Verbose
1040 annotates each key with the location of the file that set its value.
1041- The 'section' option limits the list to just the named section.""")
1042+ The 'section' option limits the list to just the named section."""
1043+ )
1044 parser = OptionParser(usage=usage)
1045 parser.add_option(
1046- "-l", "--schema", dest="schema_path",
1047- help="the path to the lazr.config schema file")
1048+ "-l",
1049+ "--schema",
1050+ dest="schema_path",
1051+ help="the path to the lazr.config schema file",
1052+ )
1053 parser.add_option(
1054- "-v", "--verbose", action="store_true",
1055- help="explain where the section and keys are set")
1056+ "-v",
1057+ "--verbose",
1058+ action="store_true",
1059+ help="explain where the section and keys are set",
1060+ )
1061 parser.add_option(
1062- "-s", "--section", dest="section_name",
1063- help="restrict the listing to the section")
1064+ "-s",
1065+ "--section",
1066+ dest="section_name",
1067+ help="restrict the listing to the section",
1068+ )
1069 parser.add_option(
1070- '-i', "--instance", dest="instance_name",
1071- help="the configuration instance to use")
1072+ "-i",
1073+ "--instance",
1074+ dest="instance_name",
1075+ help="the configuration instance to use",
1076+ )
1077 return parser
1078
1079
1080@@ -134,11 +150,12 @@ def main(argv=None):
1081 conf_path = arguments[0]
1082 configuration = Configuration.load(conf_path, options.schema_path)
1083 else:
1084- parser.error('Too many arguments.')
1085+ parser.error("Too many arguments.")
1086 # Does not return.
1087 configuration.list_config(
1088- verbose=options.verbose, section_name=options.section_name)
1089+ verbose=options.verbose, section_name=options.section_name
1090+ )
1091
1092
1093-if __name__ == '__main__':
1094+if __name__ == "__main__":
1095 sys.exit(main())
1096diff --git a/utilities/make-dev-certificate b/utilities/make-dev-certificate
1097index 0a6859a..7142882 100755
1098--- a/utilities/make-dev-certificate
1099+++ b/utilities/make-dev-certificate
1100@@ -17,7 +17,6 @@ import subprocess
1101 import sys
1102 import tempfile
1103
1104-
1105 vhosts = [
1106 "launchpad.test",
1107 "answers.launchpad.test",
1108@@ -30,7 +29,7 @@ vhosts = [
1109 "translations.launchpad.test",
1110 "xmlrpc.launchpad.test",
1111 "testopenid.test",
1112- ]
1113+]
1114
1115
1116 def main():
1117@@ -43,26 +42,37 @@ def main():
1118 shutil.copy2("/etc/ssl/openssl.cnf", config)
1119 with open(config, "a") as f:
1120 f.write(
1121- "\n[subjectAltName]\nsubjectAltName=%s\n" %
1122- ",".join("DNS:%s" % vhost for vhost in vhosts))
1123- subprocess.check_call([
1124- "openssl", "req",
1125- "-config", config,
1126- "-new",
1127- "-newkey", "rsa:4096",
1128- "-nodes",
1129- "-sha256",
1130- "-subj", "/CN=%s/" % vhosts[0],
1131- "-extensions", "subjectAltName",
1132- "-x509",
1133- "-days", "3650",
1134- "-keyout", key,
1135- "-out", certificate,
1136- ])
1137+ "\n[subjectAltName]\nsubjectAltName=%s\n"
1138+ % ",".join("DNS:%s" % vhost for vhost in vhosts)
1139+ )
1140+ subprocess.check_call(
1141+ [
1142+ "openssl",
1143+ "req",
1144+ "-config",
1145+ config,
1146+ "-new",
1147+ "-newkey",
1148+ "rsa:4096",
1149+ "-nodes",
1150+ "-sha256",
1151+ "-subj",
1152+ "/CN=%s/" % vhosts[0],
1153+ "-extensions",
1154+ "subjectAltName",
1155+ "-x509",
1156+ "-days",
1157+ "3650",
1158+ "-keyout",
1159+ key,
1160+ "-out",
1161+ certificate,
1162+ ]
1163+ )
1164 print("Created new local key and self-signed certificate.")
1165 subprocess.check_call(["openssl", "x509", "-in", certificate, "-text"])
1166 return 0
1167
1168
1169-if __name__ == '__main__':
1170+if __name__ == "__main__":
1171 sys.exit(main())
1172diff --git a/utilities/make-dummy-hosted-branches b/utilities/make-dummy-hosted-branches
1173index def0bed..2b40eb8 100755
1174--- a/utilities/make-dummy-hosted-branches
1175+++ b/utilities/make-dummy-hosted-branches
1176@@ -36,22 +36,24 @@ from lp.services.scripts import execute_zcml_for_scripts
1177 def make_bazaar_branch_and_tree(db_branch):
1178 """Make a dummy Bazaar branch and working tree from a database Branch."""
1179 assert db_branch.branch_type == BranchType.HOSTED, (
1180- "Can only create branches for HOSTED branches: %r"
1181- % db_branch)
1182+ "Can only create branches for HOSTED branches: %r" % db_branch
1183+ )
1184 branch_dir = os.path.join(
1185 config.codehosting.mirrored_branches_root,
1186- branch_id_to_path(db_branch.id))
1187+ branch_id_to_path(db_branch.id),
1188+ )
1189 return create_branch_with_one_revision(branch_dir)
1190
1191
1192 def main(argv):
1193- os.environ['BZR_HOME'] = tempfile.mkdtemp()
1194+ os.environ["BZR_HOME"] = tempfile.mkdtemp()
1195 if os.path.exists(config.codehosting.mirrored_branches_root):
1196 shutil.rmtree(config.codehosting.mirrored_branches_root)
1197 execute_zcml_for_scripts()
1198 try:
1199 branches = Branch.select(
1200- "Branch.branch_type = %s" % sqlvalues(BranchType.HOSTED))
1201+ "Branch.branch_type = %s" % sqlvalues(BranchType.HOSTED)
1202+ )
1203 for branch in branches:
1204 make_bazaar_branch_and_tree(branch)
1205 finally:
1206@@ -59,5 +61,5 @@ def main(argv):
1207 print("Created %d branches based on sample data." % len(list(branches)))
1208
1209
1210-if __name__ == '__main__':
1211+if __name__ == "__main__":
1212 main(sys.argv)
1213diff --git a/utilities/make-lp-user b/utilities/make-lp-user
1214index 6278075..8be2a16 100755
1215--- a/utilities/make-lp-user
1216+++ b/utilities/make-lp-user
1217@@ -31,33 +31,30 @@ production environments.
1218
1219 import _pythonpath # noqa: F401
1220
1221-from optparse import OptionParser
1222 import os
1223 import re
1224 import subprocess
1225 import sys
1226+from optparse import OptionParser
1227
1228-from storm.store import Store
1229 import transaction
1230+from storm.store import Store
1231 from zope.component import getUtility
1232
1233 from lp.registry.interfaces.gpg import IGPGKeySet
1234 from lp.registry.interfaces.person import IPersonSet
1235 from lp.registry.interfaces.ssh import ISSHKeySet
1236 from lp.registry.interfaces.teammembership import TeamMembershipStatus
1237-from lp.services.gpg.interfaces import (
1238- get_gpg_path,
1239- IGPGHandler,
1240- )
1241+from lp.services.gpg.interfaces import IGPGHandler, get_gpg_path
1242 from lp.services.scripts import execute_zcml_for_scripts
1243 from lp.services.timeout import set_default_timeout_function
1244 from lp.testing.factory import LaunchpadObjectFactory
1245
1246-
1247 factory = LaunchpadObjectFactory()
1248
1249 set_default_timeout_function(lambda: 100)
1250
1251+
1252 def make_person(username, email):
1253 """Create and return a person with the given username.
1254
1255@@ -87,10 +84,9 @@ def add_person_to_teams(person, team_names):
1256 if not team.is_team:
1257 print("ERROR: %s is not a team." % (team_name,))
1258 continue
1259- team.addMember(
1260- person, person, status=TeamMembershipStatus.APPROVED)
1261+ team.addMember(person, person, status=TeamMembershipStatus.APPROVED)
1262 teams_joined.append(team_name)
1263- print("teams: %s" % ' '.join(teams_joined))
1264+ print("teams: %s" % " ".join(teams_joined))
1265
1266
1267 def add_ssh_public_keys(person):
1268@@ -99,9 +95,9 @@ def add_ssh_public_keys(person):
1269 This function looks in ~/.ssh/id_rsa.pub and ~/.ssh/id_dsa.pub for SSH
1270 public keys and registers them as SSH keys for `person`.
1271 """
1272- ssh_dir = os.path.expanduser('~/.ssh')
1273+ ssh_dir = os.path.expanduser("~/.ssh")
1274 key_set = getUtility(ISSHKeySet)
1275- for filename in ('id_rsa.pub', 'id_dsa.pub'):
1276+ for filename in ("id_rsa.pub", "id_dsa.pub"):
1277 try:
1278 public_key_file = open(os.path.join(ssh_dir, filename))
1279 try:
1280@@ -111,10 +107,10 @@ def add_ssh_public_keys(person):
1281 except OSError:
1282 continue
1283 key_set.new(person, public_key)
1284- print('Registered SSH key: %s' % (filename,))
1285+ print("Registered SSH key: %s" % (filename,))
1286 break
1287 else:
1288- print('No SSH key files found in %s' % ssh_dir)
1289+ print("No SSH key files found in %s" % ssh_dir)
1290
1291
1292 def parse_fingerprints(gpg_output):
1293@@ -133,28 +129,36 @@ def run_native_gpg(arguments):
1294 # Need to override GNUPGHOME or we'll get a dummy GPG in a temp
1295 # directory, which won't find any keys.
1296 env = os.environ.copy()
1297- if 'GNUPGHOME' in env:
1298- del env['GNUPGHOME']
1299+ if "GNUPGHOME" in env:
1300+ del env["GNUPGHOME"]
1301
1302 command_line = [get_gpg_path()] + arguments
1303 pipe = subprocess.Popen(
1304- command_line, env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
1305- universal_newlines=True)
1306+ command_line,
1307+ env=env,
1308+ stdout=subprocess.PIPE,
1309+ stderr=subprocess.PIPE,
1310+ universal_newlines=True,
1311+ )
1312 stdout, stderr = pipe.communicate()
1313- if stderr != '':
1314+ if stderr != "":
1315 print(stderr)
1316 if pipe.returncode != 0:
1317- raise Exception('GPG error during "%s"' % ' '.join(command_line))
1318+ raise Exception('GPG error during "%s"' % " ".join(command_line))
1319
1320 return stdout
1321
1322
1323 def add_gpg_key(person, fingerprint):
1324 """Add the GPG key with the given fingerprint to `person`."""
1325- run_native_gpg([
1326- '--keyserver', 'hkp://keyserver.launchpad.test:11371',
1327- '--send-key', fingerprint
1328- ])
1329+ run_native_gpg(
1330+ [
1331+ "--keyserver",
1332+ "hkp://keyserver.launchpad.test:11371",
1333+ "--send-key",
1334+ fingerprint,
1335+ ]
1336+ )
1337
1338 gpghandler = getUtility(IGPGHandler)
1339 key = gpghandler.retrieveKey(fingerprint)
1340@@ -166,14 +170,20 @@ def add_gpg_key(person, fingerprint):
1341
1342 can_encrypt = True
1343 lpkey = gpgkeyset.new(
1344- person.id, key.keyid, fingerprint, key.keysize, key.algorithm,
1345- active=True, can_encrypt=can_encrypt)
1346+ person.id,
1347+ key.keyid,
1348+ fingerprint,
1349+ key.keysize,
1350+ key.algorithm,
1351+ active=True,
1352+ can_encrypt=can_encrypt,
1353+ )
1354 Store.of(person).add(lpkey)
1355
1356
1357 def attach_gpg_keys(email, person):
1358 """Attach the GPG key(s) for `email` to `person`."""
1359- output = run_native_gpg(['--fingerprint', email])
1360+ output = run_native_gpg(["--fingerprint", email])
1361
1362 fingerprints = parse_fingerprints(output)
1363 if len(fingerprints) == 0:
1364@@ -190,11 +200,15 @@ def parse_args(arguments):
1365 """
1366 parser = OptionParser(description="Create a local Launchpad user.")
1367 parser.add_option(
1368- '-e', '--email', action='store', dest='email', default=None,
1369- help="Email address")
1370+ "-e",
1371+ "--email",
1372+ action="store",
1373+ dest="email",
1374+ default=None,
1375+ help="Email address",
1376+ )
1377 parser.add_option(
1378- '-g', '--gpg', action='store_true', dest='gpg',
1379- help="Use real GPG key"
1380+ "-g", "--gpg", action="store_true", dest="gpg", help="Use real GPG key"
1381 )
1382
1383 options, args = parser.parse_args(arguments)
1384@@ -212,7 +226,7 @@ def main(arguments):
1385 """Run the script."""
1386 options = parse_args(arguments)
1387 if options.email is None:
1388- email = '%s@example.com' % options.username
1389+ email = "%s@example.com" % options.username
1390 else:
1391 email = options.email
1392
1393@@ -231,5 +245,5 @@ def main(arguments):
1394 return 0
1395
1396
1397-if __name__ == '__main__':
1398+if __name__ == "__main__":
1399 sys.exit(main(sys.argv[1:]))
1400diff --git a/utilities/make-requirements.py b/utilities/make-requirements.py
1401index 21b68da..e75e246 100755
1402--- a/utilities/make-requirements.py
1403+++ b/utilities/make-requirements.py
1404@@ -5,10 +5,10 @@
1405
1406 """Build a pip constraints file from inputs."""
1407
1408+import logging
1409 from argparse import ArgumentParser
1410 from collections import defaultdict
1411 from configparser import ConfigParser
1412-import logging
1413
1414 from pkg_resources import parse_requirements
1415
1416@@ -29,7 +29,7 @@ def read_buildout_versions(path):
1417 if section == "versions":
1418 python_version = None
1419 elif section.startswith("versions:python"):
1420- python_suffix = section[len("versions:python"):]
1421+ python_suffix = section[len("versions:python") :]
1422 if len(python_suffix) == 1 and python_suffix.isdigit():
1423 python_version = "%s.*" % python_suffix
1424 elif len(python_suffix) == 2 and python_suffix.isdigit():
1425@@ -50,15 +50,18 @@ def read_buildout_versions(path):
1426 if python_version is None:
1427 continue
1428 requirements.append(
1429- '%s==%s; python_version == "%s"' %
1430- (name, versions[name][python_version], python_version))
1431+ '%s==%s; python_version == "%s"'
1432+ % (name, versions[name][python_version], python_version)
1433+ )
1434 if None in python_versions:
1435 marker = " and ".join(
1436 'python_version != "%s"' % python_version
1437 for python_version in python_versions
1438- if python_version is not None)
1439+ if python_version is not None
1440+ )
1441 requirements.append(
1442- "%s==%s; %s" % (name, versions[name][None], marker))
1443+ "%s==%s; %s" % (name, versions[name][None], marker)
1444+ )
1445 return list(parse_requirements(requirements))
1446
1447
1448@@ -93,19 +96,28 @@ def write_requirements(include_requirements, exclude_requirements):
1449 def main():
1450 parser = ArgumentParser()
1451 parser.add_argument(
1452- "--buildout", action="append", metavar="VERSIONS",
1453- help="Include requirements from this buildout versions file")
1454+ "--buildout",
1455+ action="append",
1456+ metavar="VERSIONS",
1457+ help="Include requirements from this buildout versions file",
1458+ )
1459 parser.add_argument(
1460- "--include", action="append", metavar="REQUIREMENTS",
1461- help="Include requirements from this PEP 508 requirements file")
1462+ "--include",
1463+ action="append",
1464+ metavar="REQUIREMENTS",
1465+ help="Include requirements from this PEP 508 requirements file",
1466+ )
1467 parser.add_argument(
1468- "--exclude", action="append", metavar="REQUIREMENTS",
1469- help="Exclude requirements from this PEP 508 requirements file")
1470+ "--exclude",
1471+ action="append",
1472+ metavar="REQUIREMENTS",
1473+ help="Exclude requirements from this PEP 508 requirements file",
1474+ )
1475 args = parser.parse_args()
1476
1477- include_requirements = (
1478- [read_buildout_versions(path) for path in args.buildout] +
1479- [read_requirements(path) for path in args.include])
1480+ include_requirements = [
1481+ read_buildout_versions(path) for path in args.buildout
1482+ ] + [read_requirements(path) for path in args.include]
1483 exclude_requirements = [read_requirements(path) for path in args.exclude]
1484
1485 write_requirements(include_requirements, exclude_requirements)
1486diff --git a/utilities/massage-bug-import-xml b/utilities/massage-bug-import-xml
1487index 7e43831..8142896 100755
1488--- a/utilities/massage-bug-import-xml
1489+++ b/utilities/massage-bug-import-xml
1490@@ -1,13 +1,12 @@
1491 #!/usr/bin/env python
1492 # -*- mode: python -*-
1493
1494+import sys
1495 from base64 import standard_b64encode
1496 from optparse import OptionParser
1497-import sys
1498
1499 from lxml import etree
1500
1501-
1502 NS = "https://launchpad.net/xmlns/2006/bugs"
1503
1504
1505@@ -26,8 +25,7 @@ def truncate(text, message=None):
1506 message = "[Truncated]"
1507 else:
1508 message = "[Truncated; %s]" % message
1509- return "%s...\n\n%s" % (
1510- "\n".join(lines[:30]).strip(), message)
1511+ return "%s...\n\n%s" % ("\n".join(lines[:30]).strip(), message)
1512 else:
1513 return text
1514
1515@@ -70,58 +68,58 @@ def massage(root, project_name, fix_nickname, tag_nickname):
1516 # Resolve duplicates as far as they'll go.
1517 duplicates = {
1518 node.getparent().get("id"): node.text
1519- for node in root.findall('{%s}bug/{%s}duplicateof' % (NS, NS))
1520- if node.text is not None and node.text.isdigit()}
1521+ for node in root.findall("{%s}bug/{%s}duplicateof" % (NS, NS))
1522+ if node.text is not None and node.text.isdigit()
1523+ }
1524
1525 def resolve(bug_id):
1526 dupe_of = duplicates.get(bug_id)
1527- return (bug_id if dupe_of is None else resolve(dupe_of))
1528+ return bug_id if dupe_of is None else resolve(dupe_of)
1529
1530- duplicates = {
1531- bug_id: resolve(bug_id) for bug_id in duplicates}
1532+ duplicates = {bug_id: resolve(bug_id) for bug_id in duplicates}
1533
1534 # Scan the tree, fixing up issues.
1535- for bug in root.findall('{%s}bug' % NS):
1536+ for bug in root.findall("{%s}bug" % NS):
1537 # Get or create the tags element.
1538- tags = bug.find('{%s}tags' % NS)
1539+ tags = bug.find("{%s}tags" % NS)
1540 if tags is None:
1541- tags = etree.SubElement(bug, '{%s}tags' % NS)
1542+ tags = etree.SubElement(bug, "{%s}tags" % NS)
1543
1544- nickname = bug.find('{%s}nickname' % NS)
1545+ nickname = bug.find("{%s}nickname" % NS)
1546 if nickname is None:
1547 # Add an empty nickname to be filled in later.
1548- nickname = etree.SubElement(bug, '{%s}nickname' % NS)
1549+ nickname = etree.SubElement(bug, "{%s}nickname" % NS)
1550 elif tag_nickname:
1551 # Add the original nickname as a tag.
1552- etree.SubElement(tags, '{%s}tag' % NS).text = nickname.text
1553+ etree.SubElement(tags, "{%s}tag" % NS).text = nickname.text
1554
1555 # Change the nickname.
1556 if nickname.text is None or fix_nickname:
1557- nickname.text = "%s-%s" % (project_name, bug.get('id'))
1558+ nickname.text = "%s-%s" % (project_name, bug.get("id"))
1559
1560 # Resolve duplicateof, if it exists.
1561 if bug.get("id") in duplicates:
1562 bug.find("{%s}duplicateof" % NS).text = duplicates[bug.get("id")]
1563
1564 # Get the first comment and its text. We'll need these later.
1565- first_comment = bug.find('{%s}comment' % NS)
1566- first_comment_text = first_comment.find('{%s}text' % NS)
1567+ first_comment = bug.find("{%s}comment" % NS)
1568+ first_comment_text = first_comment.find("{%s}text" % NS)
1569 norm_text(first_comment_text)
1570
1571 # Check the description.
1572- description = bug.find('{%s}description' % NS)
1573+ description = bug.find("{%s}description" % NS)
1574 norm_text(description)
1575 if len(description.text) == 0:
1576- problem("Bug %s has no description." % bug.get('id'))
1577+ problem("Bug %s has no description." % bug.get("id"))
1578 # Try and get the description from the first comment.
1579 if first_comment_text is None:
1580 problem_detail("No comments!")
1581 problem_resolution("Setting description to '-'.")
1582- description.text = '-'
1583+ description.text = "-"
1584 elif len(first_comment_text.text) == 0:
1585 problem_detail("First comment has no text!")
1586 problem_resolution("Setting description to '-'.")
1587- description.text = '-'
1588+ description.text = "-"
1589 else:
1590 problem_detail("First comment has text.")
1591 problem_resolution("Removing description.")
1592@@ -131,8 +129,12 @@ def massage(root, project_name, fix_nickname, tag_nickname):
1593 problem_resolved()
1594 elif len(description.text) > 50000:
1595 problem(
1596- "Bug %s's description is too long (%d chars)." % (
1597- bug.get('id'), len(description.text),))
1598+ "Bug %s's description is too long (%d chars)."
1599+ % (
1600+ bug.get("id"),
1601+ len(description.text),
1602+ )
1603+ )
1604 # Compare the description to the first comment. If it's
1605 # the same, we don't need the description.
1606 if first_comment_text is None:
1607@@ -140,14 +142,15 @@ def massage(root, project_name, fix_nickname, tag_nickname):
1608 problem_resolution("Adding comment.")
1609 raise NotImplementedError("Add a comment.")
1610 elif description.text == first_comment_text.text:
1611- problem_detail('Description is same as first comment.')
1612- problem_resolution('Trimming description.')
1613+ problem_detail("Description is same as first comment.")
1614+ problem_resolution("Trimming description.")
1615 # It's safe to point the user to an attachment here,
1616 # even though it has not yet been created. It will be
1617 # created later because the first comment is also too
1618 # long.
1619 description.text = truncate(
1620- description.text, 'see "Full description" attachment')
1621+ description.text, 'see "Full description" attachment'
1622+ )
1623 else:
1624 problem_resolution("Truncating description.")
1625 raise NotImplementedError("Fix overlong description.")
1626@@ -156,35 +159,45 @@ def massage(root, project_name, fix_nickname, tag_nickname):
1627 # Check first comment text.
1628 if first_comment_text is not None:
1629 if len(first_comment_text.text) == 0:
1630- problem(
1631- "Bug %s's first comment has no text." % bug.get('id'))
1632+ problem("Bug %s's first comment has no text." % bug.get("id"))
1633 problem_resolution("Setting comment text to '-'.")
1634- first_comment_text.text = '-'
1635+ first_comment_text.text = "-"
1636 problem_resolved()
1637 elif len(first_comment_text.text) > 50000:
1638 problem(
1639- "Bug %s's first comment is too long (%d chars)." % (
1640- bug.get('id'), len(first_comment_text.text)))
1641+ "Bug %s's first comment is too long (%d chars)."
1642+ % (bug.get("id"), len(first_comment_text.text))
1643+ )
1644 # Save the original text as an attachment.
1645- problem_resolution('Adding attachment.')
1646+ problem_resolution("Adding attachment.")
1647 attachment = etree.SubElement(
1648- first_comment, '{%s}attachment' % NS)
1649- etree.SubElement(attachment, '{%s}filename' % NS).text = (
1650- "%s-bug-%s-full-description.txt" % (
1651- project_name, bug.get('id')))
1652- etree.SubElement(attachment, '{%s}title' % NS).text = (
1653- "Full description (text/plain, utf-8)")
1654- etree.SubElement(attachment, '{%s}mimetype' % NS).text = (
1655- "text/plain")
1656- etree.SubElement(attachment, '{%s}contents' % NS).text = (
1657- standard_b64encode(
1658- first_comment_text.text.encode('utf-8')
1659- ).decode('ascii'))
1660+ first_comment, "{%s}attachment" % NS
1661+ )
1662+ etree.SubElement(
1663+ attachment, "{%s}filename" % NS
1664+ ).text = "%s-bug-%s-full-description.txt" % (
1665+ project_name,
1666+ bug.get("id"),
1667+ )
1668+ etree.SubElement(
1669+ attachment, "{%s}title" % NS
1670+ ).text = "Full description (text/plain, utf-8)"
1671+ etree.SubElement(
1672+ attachment, "{%s}mimetype" % NS
1673+ ).text = "text/plain"
1674+ etree.SubElement(
1675+ attachment, "{%s}contents" % NS
1676+ ).text = standard_b64encode(
1677+ first_comment_text.text.encode("utf-8")
1678+ ).decode(
1679+ "ascii"
1680+ )
1681 # Trim the comment text.
1682- problem_resolution('Trimming comment text.')
1683+ problem_resolution("Trimming comment text.")
1684 first_comment_text.text = truncate(
1685 first_comment_text.text,
1686- 'see "Full description" attachment')
1687+ 'see "Full description" attachment',
1688+ )
1689 problem_resolved()
1690
1691
1692@@ -200,25 +213,33 @@ def main(arguments):
1693 truncated and an attachment is created to hold the original.
1694 """
1695 parser = OptionParser(
1696- usage=usage,
1697- description=description.strip(),
1698- add_help_option=False)
1699+ usage=usage, description=description.strip(), add_help_option=False
1700+ )
1701 parser.add_option(
1702- "-p", "--project", dest="project_name", metavar="NAME",
1703- help="The project to which this import data refers.")
1704+ "-p",
1705+ "--project",
1706+ dest="project_name",
1707+ metavar="NAME",
1708+ help="The project to which this import data refers.",
1709+ )
1710 parser.add_option(
1711- "--fix-nickname", action="store_true", dest="fix_nickname",
1712- help="Normalize the nickname to ${project_name}-${bug-id}.")
1713+ "--fix-nickname",
1714+ action="store_true",
1715+ dest="fix_nickname",
1716+ help="Normalize the nickname to ${project_name}-${bug-id}.",
1717+ )
1718 parser.add_option(
1719- "--tag-nickname", action="store_true", dest="tag_nickname",
1720- help="Add the original bug nickname as a tag.")
1721+ "--tag-nickname",
1722+ action="store_true",
1723+ dest="tag_nickname",
1724+ help="Add the original bug nickname as a tag.",
1725+ )
1726 parser.add_option(
1727- "-h", "--help", action="help",
1728- help="Show this help message and exit.")
1729+ "-h", "--help", action="help", help="Show this help message and exit."
1730+ )
1731 parser.set_defaults(
1732- project_name=None,
1733- fix_nickname=False,
1734- tag_nickname=False)
1735+ project_name=None, fix_nickname=False, tag_nickname=False
1736+ )
1737
1738 options, filenames = parser.parse_args(arguments)
1739 if options.project_name is None:
1740@@ -233,13 +254,17 @@ def main(arguments):
1741 root=tree.getroot(),
1742 project_name=options.project_name,
1743 fix_nickname=options.fix_nickname,
1744- tag_nickname=options.tag_nickname)
1745+ tag_nickname=options.tag_nickname,
1746+ )
1747 tree.write(
1748- (sys.stdout if filename == "-" else filename), encoding='utf-8',
1749- pretty_print=True, xml_declaration=True)
1750+ (sys.stdout if filename == "-" else filename),
1751+ encoding="utf-8",
1752+ pretty_print=True,
1753+ xml_declaration=True,
1754+ )
1755
1756 return 0
1757
1758
1759-if __name__ == '__main__':
1760+if __name__ == "__main__":
1761 sys.exit(main(sys.argv[1:]))
1762diff --git a/utilities/paste b/utilities/paste
1763index 5d857fb..227f06d 100755
1764--- a/utilities/paste
1765+++ b/utilities/paste
1766@@ -5,24 +5,24 @@
1767
1768 import _pythonpath # noqa: F401
1769
1770-from http.cookiejar import Cookie
1771-from optparse import OptionParser
1772 import os
1773 import pwd
1774 import sys
1775-from urllib.parse import urljoin
1776 import webbrowser
1777+from http.cookiejar import Cookie
1778+from optparse import OptionParser
1779+from urllib.parse import urljoin
1780
1781 from fixtures import MonkeyPatch
1782 from zope.testbrowser.browser import Browser
1783
1784-
1785 # Should we be able to override any of these?
1786-AUTH_FILE = '~/.lp_auth_cookie'
1787-PRIVATE_PASTE_HOST = 'pastebin.canonical.com'
1788-PUBLIC_PASTE_HOST = 'pastebin.ubuntu.com'
1789-PASTE_PATH = ''
1790-LP_AUTH_INSTRUCTIONS = """
1791+AUTH_FILE = "~/.lp_auth_cookie"
1792+PRIVATE_PASTE_HOST = "pastebin.canonical.com"
1793+PUBLIC_PASTE_HOST = "pastebin.ubuntu.com"
1794+PASTE_PATH = ""
1795+LP_AUTH_INSTRUCTIONS = (
1796+ """
1797 %s doesn't contain a valid LP authentication cookie.
1798
1799 Please update this file, with the 'lp' cookie value your browser sends
1800@@ -30,26 +30,43 @@ when visiting https://launchpad.net (while being logged in). It should
1801 look something like this:
1802
1803 sd33JsfeJop3esf6joi8sldfjJoIj3dssD6isfsdweJDe6i9JIKEYK
1804-""" % AUTH_FILE
1805+"""
1806+ % AUTH_FILE
1807+)
1808
1809
1810 def parse_arguments():
1811- parser = OptionParser(usage='%prog [options] < stdin')
1812- parser.add_option('-b', '--browser',
1813- default=False, action='store_true',
1814- help='Open web browser to the pastebin.')
1815- parser.add_option('-p', '--private',
1816- default=False, action='store_true',
1817- help='Use a private pastebin (pastebin.canonical.com).')
1818- parser.add_option('-s', '--syntax',
1819- default='text', type='string',
1820- help='The syntax of the pastebin.')
1821- parser.add_option('-f', '--file',
1822- type='string',
1823- help='File to pastebin instead of stdin.')
1824+ parser = OptionParser(usage="%prog [options] < stdin")
1825+ parser.add_option(
1826+ "-b",
1827+ "--browser",
1828+ default=False,
1829+ action="store_true",
1830+ help="Open web browser to the pastebin.",
1831+ )
1832+ parser.add_option(
1833+ "-p",
1834+ "--private",
1835+ default=False,
1836+ action="store_true",
1837+ help="Use a private pastebin (pastebin.canonical.com).",
1838+ )
1839+ parser.add_option(
1840+ "-s",
1841+ "--syntax",
1842+ default="text",
1843+ type="string",
1844+ help="The syntax of the pastebin.",
1845+ )
1846+ parser.add_option(
1847+ "-f",
1848+ "--file",
1849+ type="string",
1850+ help="File to pastebin instead of stdin.",
1851+ )
1852 options, arguments = parser.parse_args()
1853 if arguments:
1854- parser.error('Too many arguments')
1855+ parser.error("Too many arguments")
1856 # Does not return
1857 parser.options = options
1858 parser.arguments = arguments
1859@@ -67,25 +84,37 @@ def get_lp_auth_cookie(path):
1860 finally:
1861 f.close()
1862 return Cookie(
1863- version=0, name='lp', value=cookie_value,
1864- port=None, port_specified=False,
1865- domain='login.launchpad.net', domain_specified=True,
1866- domain_initial_dot=False, path='', path_specified=None,
1867- secure=True, expires=None, discard=True,
1868- comment=None, comment_url=None, rest=None, rfc2109=False)
1869+ version=0,
1870+ name="lp",
1871+ value=cookie_value,
1872+ port=None,
1873+ port_specified=False,
1874+ domain="login.launchpad.net",
1875+ domain_specified=True,
1876+ domain_initial_dot=False,
1877+ path="",
1878+ path_specified=None,
1879+ secure=True,
1880+ expires=None,
1881+ discard=True,
1882+ comment=None,
1883+ comment_url=None,
1884+ rest=None,
1885+ rfc2109=False,
1886+ )
1887
1888
1889 def authenticate(browser):
1890 """Go through the OpenID process and authenticate."""
1891 # First click on the page where it says we have to log in.
1892- browser.getControl('Continue').click()
1893+ browser.getControl("Continue").click()
1894 return True
1895
1896
1897 def main():
1898 parser = parse_arguments()
1899 try:
1900- poster = os.environ['USER']
1901+ poster = os.environ["USER"]
1902 except KeyError:
1903 poster = pwd.getpwuid(os.getuid()).pw_name
1904
1905@@ -99,10 +128,10 @@ def main():
1906 content = sys.stdin.read()
1907
1908 form = (
1909- ('poster', poster),
1910- ('syntax', [parser.options.syntax]),
1911- ('content', content),
1912- )
1913+ ("poster", poster),
1914+ ("syntax", [parser.options.syntax]),
1915+ ("content", content),
1916+ )
1917
1918 browser = Browser()
1919 paste_host = PUBLIC_PASTE_HOST
1920@@ -118,12 +147,12 @@ def main():
1921 # Remove the check for robots.txt, since the one on
1922 # pastebin.ubuntu.com doesn't allow us to open the page. We're not
1923 # really a robot.
1924- with MonkeyPatch('urllib.robotparser.RobotFileParser.allow_all', True):
1925- browser.open(urljoin('https://' + paste_host, PASTE_PATH))
1926+ with MonkeyPatch("urllib.robotparser.RobotFileParser.allow_all", True):
1927+ browser.open(urljoin("https://" + paste_host, PASTE_PATH))
1928
1929 if parser.options.private:
1930 # We need to authenticate before pasting.
1931- oid_form = browser.getForm(id='openid_message')
1932+ oid_form = browser.getForm(id="openid_message")
1933 if oid_form is not None:
1934 authenticated = authenticate(browser)
1935 if not authenticated:
1936@@ -131,11 +160,11 @@ def main():
1937 return
1938 for name, value in form:
1939 browser.getControl(name=name).value = value
1940- browser.getControl('Paste!').click()
1941+ browser.getControl("Paste!").click()
1942 print(browser.url)
1943 if parser.options.browser:
1944 webbrowser.open(browser.url)
1945
1946
1947-if __name__ == '__main__':
1948+if __name__ == "__main__":
1949 main()
1950diff --git a/utilities/pgbackup.py b/utilities/pgbackup.py
1951index 3454dc9..d78c9a1 100755
1952--- a/utilities/pgbackup.py
1953+++ b/utilities/pgbackup.py
1954@@ -11,34 +11,35 @@ Suitable for use in crontab for daily backups.
1955
1956 __all__ = []
1957
1958-from datetime import datetime
1959 import logging
1960-from optparse import OptionParser
1961 import os
1962 import os.path
1963 import stat
1964 import subprocess
1965 import sys
1966+from datetime import datetime
1967+from optparse import OptionParser
1968
1969+MB = float(1024 * 1024)
1970
1971-MB = float(1024*1024)
1972+return_code = 0 # Return code of this script. Set to the most recent failed
1973+# system call's return code
1974
1975-return_code = 0 # Return code of this script. Set to the most recent failed
1976- # system call's return code
1977
1978 def call(cmd, **kw):
1979- log.debug(' '.join(cmd))
1980+ log.debug(" ".join(cmd))
1981 rv = subprocess.call(cmd, **kw)
1982 if rv != 0:
1983 global return_code
1984 return_code = rv
1985 return rv
1986
1987+
1988 def main(options, databases):
1989 global return_code
1990- #Need longer file names if this is used more than daily
1991- #today = datetime.now().strftime('%Y%m%d_%H:%M:%S')
1992- today = datetime.now().strftime('%Y%m%d')
1993+ # Need longer file names if this is used more than daily
1994+ # today = datetime.now().strftime('%Y%m%d_%H:%M:%S')
1995+ today = datetime.now().strftime("%Y%m%d")
1996
1997 backup_dir = options.backup_dir
1998
1999@@ -47,13 +48,13 @@ def main(options, databases):
2000 # Better to bomb out now rather than to bomb out later, as later might
2001 # be several hours away.
2002 for database in databases:
2003- dest = os.path.join(backup_dir, '%s.%s.dump' % (database, today))
2004+ dest = os.path.join(backup_dir, "%s.%s.dump" % (database, today))
2005 if os.path.exists(dest):
2006 log.fatal("%s already exists." % dest)
2007 return 1
2008
2009 for database in databases:
2010- dest = os.path.join(backup_dir, '%s.%s.dump' % (database, today))
2011+ dest = os.path.join(backup_dir, "%s.%s.dump" % (database, today))
2012
2013 if os.path.exists(dest):
2014 log.fatal("%s already exists." % dest)
2015@@ -61,55 +62,57 @@ def main(options, databases):
2016
2017 cmd = [
2018 "/usr/bin/pg_dump",
2019- "-U", "postgres",
2020+ "-U",
2021+ "postgres",
2022 "--format=c",
2023 "--compress=9",
2024 "--blobs",
2025 "--file=%s" % dest,
2026 database,
2027- ]
2028+ ]
2029
2030- rv = call(cmd, stdin=subprocess.PIPE) # Sets return_code on failure.
2031+ rv = call(cmd, stdin=subprocess.PIPE) # Sets return_code on failure.
2032 if rv != 0:
2033 log.critical("Failed to backup %s (%d)" % (database, rv))
2034 continue
2035 size = os.stat(dest)[stat.ST_SIZE]
2036
2037- log.info("Backed up %s (%0.2fMB)" % (database, size/MB))
2038+ log.info("Backed up %s (%0.2fMB)" % (database, size / MB))
2039
2040 return return_code
2041
2042-if __name__ == '__main__':
2043+
2044+if __name__ == "__main__":
2045 parser = OptionParser(
2046- usage="usage: %prog [options] database [database ..]"
2047- )
2048- parser.add_option("-v", "--verbose", dest="verbose", default=0,
2049- action="count")
2050- parser.add_option("-q", "--quiet", dest="quiet", default=0,
2051- action="count")
2052- parser.add_option("-d", "--dir", dest="backup_dir",
2053- default="/var/lib/postgres/backups")
2054+ usage="usage: %prog [options] database [database ..]"
2055+ )
2056+ parser.add_option(
2057+ "-v", "--verbose", dest="verbose", default=0, action="count"
2058+ )
2059+ parser.add_option("-q", "--quiet", dest="quiet", default=0, action="count")
2060+ parser.add_option(
2061+ "-d", "--dir", dest="backup_dir", default="/var/lib/postgres/backups"
2062+ )
2063 (options, databases) = parser.parse_args()
2064 if len(databases) == 0:
2065 parser.error("must specify at least one database")
2066 if not os.path.isdir(options.backup_dir):
2067 parser.error(
2068- "Incorrect --dir. %s does not exist or is not a directory" % (
2069- options.backup_dir
2070- )
2071- )
2072+ "Incorrect --dir. %s does not exist or is not a directory"
2073+ % (options.backup_dir)
2074+ )
2075
2076 # Setup our log
2077- log = logging.getLogger('pgbackup')
2078+ log = logging.getLogger("pgbackup")
2079 hdlr = logging.StreamHandler(strm=sys.stderr)
2080- hdlr.setFormatter(logging.Formatter(
2081- fmt='%(asctime)s %(levelname)s %(message)s'
2082- ))
2083+ hdlr.setFormatter(
2084+ logging.Formatter(fmt="%(asctime)s %(levelname)s %(message)s")
2085+ )
2086 log.addHandler(hdlr)
2087 verbosity = options.verbose - options.quiet
2088 if verbosity > 0:
2089 log.setLevel(logging.DEBUG)
2090- elif verbosity == 0: # Default
2091+ elif verbosity == 0: # Default
2092 log.setLevel(logging.INFO)
2093 elif verbosity == -1:
2094 log.setLevel(logging.WARN)
2095diff --git a/utilities/pgcreate.py b/utilities/pgcreate.py
2096index 9a67a9a..b6d8ac4 100755
2097--- a/utilities/pgcreate.py
2098+++ b/utilities/pgcreate.py
2099@@ -16,21 +16,20 @@ import psycopg2
2100
2101 def main():
2102 if len(sys.argv) != 3:
2103- print('Usage: %s [template] [dbname]' % sys.argv[0], file=sys.stderr)
2104+ print("Usage: %s [template] [dbname]" % sys.argv[0], file=sys.stderr)
2105 return 1
2106
2107 template, dbname = sys.argv[1:]
2108
2109 for attempt in range(0, 10):
2110- con = psycopg2.connect('dbname=template1')
2111+ con = psycopg2.connect("dbname=template1")
2112 con.set_isolation_level(0)
2113 try:
2114 cur = con.cursor()
2115 cur.execute(
2116- "CREATE DATABASE %s TEMPLATE = %s ENCODING = 'UTF8'" % (
2117- dbname, template
2118- )
2119- )
2120+ "CREATE DATABASE %s TEMPLATE = %s ENCODING = 'UTF8'"
2121+ % (dbname, template)
2122+ )
2123 except psycopg2.Error:
2124 if attempt == 9:
2125 raise
2126@@ -40,5 +39,6 @@ def main():
2127 return 0
2128 return 1
2129
2130-if __name__ == '__main__':
2131+
2132+if __name__ == "__main__":
2133 sys.exit(main())
2134diff --git a/utilities/pgkillactive.py b/utilities/pgkillactive.py
2135index 25d722a..d06c481 100755
2136--- a/utilities/pgkillactive.py
2137+++ b/utilities/pgkillactive.py
2138@@ -10,10 +10,10 @@ __all__ = []
2139
2140 import _pythonpath # noqa: F401
2141
2142-from optparse import OptionParser
2143 import os
2144 import signal
2145 import sys
2146+from optparse import OptionParser
2147
2148 import psycopg2
2149
2150@@ -23,59 +23,92 @@ from lp.services.database import activity_cols
2151 def main():
2152 parser = OptionParser()
2153 parser.add_option(
2154- '-c', '--connection', type='string', dest='connect_string',
2155- default='', help="Psycopg connection string",
2156- )
2157+ "-c",
2158+ "--connection",
2159+ type="string",
2160+ dest="connect_string",
2161+ default="",
2162+ help="Psycopg connection string",
2163+ )
2164 parser.add_option(
2165- '-s', '--max-seconds', type='int',
2166- dest='max_seconds', default=60 * 60,
2167- help='Maximum seconds time connections are allowed to remain active.',
2168- )
2169+ "-s",
2170+ "--max-seconds",
2171+ type="int",
2172+ dest="max_seconds",
2173+ default=60 * 60,
2174+ help="Maximum seconds time connections are allowed to remain active.",
2175+ )
2176 parser.add_option(
2177- '-q', '--quiet', action='store_true', dest="quiet",
2178- default=False, help='Silence output',
2179- )
2180+ "-q",
2181+ "--quiet",
2182+ action="store_true",
2183+ dest="quiet",
2184+ default=False,
2185+ help="Silence output",
2186+ )
2187 parser.add_option(
2188- '-n', '--dry-run', action='store_true', default=False,
2189- dest='dry_run', help="Dry run - don't kill anything",
2190- )
2191+ "-n",
2192+ "--dry-run",
2193+ action="store_true",
2194+ default=False,
2195+ dest="dry_run",
2196+ help="Dry run - don't kill anything",
2197+ )
2198 parser.add_option(
2199- '-u', '--user', action='append', dest='users',
2200- help='Kill connection of users matching REGEXP', metavar='REGEXP')
2201+ "-u",
2202+ "--user",
2203+ action="append",
2204+ dest="users",
2205+ help="Kill connection of users matching REGEXP",
2206+ metavar="REGEXP",
2207+ )
2208 options, args = parser.parse_args()
2209 if len(args) > 0:
2210- parser.error('Too many arguments')
2211+ parser.error("Too many arguments")
2212 if not options.users:
2213- parser.error('--user is required')
2214+ parser.error("--user is required")
2215
2216- user_match_sql = 'AND (%s)' % ' OR '.join(
2217- ['usename ~* %s'] * len(options.users))
2218+ user_match_sql = "AND (%s)" % " OR ".join(
2219+ ["usename ~* %s"] * len(options.users)
2220+ )
2221
2222 con = psycopg2.connect(options.connect_string)
2223 cur = con.cursor()
2224- cur.execute(("""
2225+ cur.execute(
2226+ (
2227+ """
2228 SELECT usename, %(pid)s, backend_start, xact_start
2229 FROM pg_stat_activity
2230 WHERE xact_start < CURRENT_TIMESTAMP - '%%d seconds'::interval %%s
2231 ORDER BY %(pid)s
2232- """ % activity_cols(cur))
2233- % (options.max_seconds, user_match_sql), options.users)
2234+ """
2235+ % activity_cols(cur)
2236+ )
2237+ % (options.max_seconds, user_match_sql),
2238+ options.users,
2239+ )
2240
2241 rows = list(cur.fetchall())
2242
2243 if len(rows) == 0:
2244 if not options.quiet:
2245- print('No transactions to kill')
2246+ print("No transactions to kill")
2247 return 0
2248
2249 for usename, pid, backend_start, transaction_start in rows:
2250- print('Killing %s (%d), %s, %s' % (
2251- usename, pid, backend_start, transaction_start,
2252- ))
2253+ print(
2254+ "Killing %s (%d), %s, %s"
2255+ % (
2256+ usename,
2257+ pid,
2258+ backend_start,
2259+ transaction_start,
2260+ )
2261+ )
2262 if not options.dry_run:
2263 os.kill(pid, signal.SIGTERM)
2264 return 0
2265
2266
2267-if __name__ == '__main__':
2268+if __name__ == "__main__":
2269 sys.exit(main())
2270diff --git a/utilities/pgkillidle.py b/utilities/pgkillidle.py
2271index 917c4ae..6dab70b 100755
2272--- a/utilities/pgkillidle.py
2273+++ b/utilities/pgkillidle.py
2274@@ -10,10 +10,10 @@ __all__ = []
2275
2276 import _pythonpath # noqa: F401
2277
2278-from optparse import OptionParser
2279 import os
2280 import signal
2281 import sys
2282+from optparse import OptionParser
2283
2284 import psycopg2
2285
2286@@ -23,57 +23,89 @@ from lp.services.database import activity_cols
2287 def main():
2288 parser = OptionParser()
2289 parser.add_option(
2290- '-c', '--connection', type='string', dest='connect_string',
2291- default='', help="Psycopg connection string",
2292- )
2293+ "-c",
2294+ "--connection",
2295+ type="string",
2296+ dest="connect_string",
2297+ default="",
2298+ help="Psycopg connection string",
2299+ )
2300 parser.add_option(
2301- '-s', '--max-idle-seconds', type='int',
2302- dest='max_idle_seconds', default=10 * 60,
2303- help='Maximum seconds time idle but open transactions are allowed',
2304- )
2305+ "-s",
2306+ "--max-idle-seconds",
2307+ type="int",
2308+ dest="max_idle_seconds",
2309+ default=10 * 60,
2310+ help="Maximum seconds time idle but open transactions are allowed",
2311+ )
2312 parser.add_option(
2313- '-q', '--quiet', action='store_true', dest="quiet",
2314- default=False, help='Silence output',
2315- )
2316+ "-q",
2317+ "--quiet",
2318+ action="store_true",
2319+ dest="quiet",
2320+ default=False,
2321+ help="Silence output",
2322+ )
2323 parser.add_option(
2324- '-n', '--dry-run', action='store_true', default=False,
2325- dest='dryrun', help="Dry run - don't kill anything",
2326- )
2327+ "-n",
2328+ "--dry-run",
2329+ action="store_true",
2330+ default=False,
2331+ dest="dryrun",
2332+ help="Dry run - don't kill anything",
2333+ )
2334 parser.add_option(
2335- '-i', '--ignore', action='append', dest='ignore',
2336- help='Ignore connections by USER', metavar='USER')
2337+ "-i",
2338+ "--ignore",
2339+ action="append",
2340+ dest="ignore",
2341+ help="Ignore connections by USER",
2342+ metavar="USER",
2343+ )
2344 options, args = parser.parse_args()
2345 if len(args) > 0:
2346- parser.error('Too many arguments')
2347+ parser.error("Too many arguments")
2348
2349- ignore_sql = ' AND usename <> %s' * len(options.ignore or [])
2350+ ignore_sql = " AND usename <> %s" * len(options.ignore or [])
2351
2352 con = psycopg2.connect(options.connect_string)
2353 cur = con.cursor()
2354- cur.execute(("""
2355+ cur.execute(
2356+ (
2357+ """
2358 SELECT usename, %(pid)s, backend_start, query_start
2359 FROM pg_stat_activity
2360 WHERE %(query)s = '<IDLE> in transaction'
2361 AND query_start < CURRENT_TIMESTAMP - '%%d seconds'::interval %%s
2362 ORDER BY %(pid)s
2363- """ % activity_cols(cur))
2364- % (options.max_idle_seconds, ignore_sql), options.ignore)
2365+ """
2366+ % activity_cols(cur)
2367+ )
2368+ % (options.max_idle_seconds, ignore_sql),
2369+ options.ignore,
2370+ )
2371
2372 rows = cur.fetchall()
2373
2374 if len(rows) == 0:
2375 if not options.quiet:
2376- print('No IDLE transactions to kill')
2377+ print("No IDLE transactions to kill")
2378 return 0
2379
2380 for usename, pid, backend_start, query_start in rows:
2381- print('Killing %s(%d), %s, %s' % (
2382- usename, pid, backend_start, query_start,
2383- ))
2384+ print(
2385+ "Killing %s(%d), %s, %s"
2386+ % (
2387+ usename,
2388+ pid,
2389+ backend_start,
2390+ query_start,
2391+ )
2392+ )
2393 if not options.dryrun:
2394 os.kill(pid, signal.SIGTERM)
2395 return 0
2396
2397
2398-if __name__ == '__main__':
2399+if __name__ == "__main__":
2400 sys.exit(main())
2401diff --git a/utilities/pglogwatch.py b/utilities/pglogwatch.py
2402index 8d61ed1..1f61d3b 100755
2403--- a/utilities/pglogwatch.py
2404+++ b/utilities/pglogwatch.py
2405@@ -7,30 +7,39 @@
2406 Watch live PostgreSQL logs for interesting stuff
2407 """
2408
2409-from optparse import OptionParser
2410 import re
2411 import subprocess
2412 import sys
2413+from optparse import OptionParser
2414
2415
2416 def get_options(args=None):
2417 parser = OptionParser()
2418- parser.add_option("-l", "--logfile", dest="logfile",
2419- default="/var/log/postgresql/postgres.log",
2420- metavar="LOG", help="Monitor LOG instead of the default"
2421- )
2422- parser.add_option("--slow", dest="slow",
2423- type="float", default=100.0, metavar="TIME",
2424- help="Report slow queries taking over TIME seconds",
2425- )
2426+ parser.add_option(
2427+ "-l",
2428+ "--logfile",
2429+ dest="logfile",
2430+ default="/var/log/postgresql/postgres.log",
2431+ metavar="LOG",
2432+ help="Monitor LOG instead of the default",
2433+ )
2434+ parser.add_option(
2435+ "--slow",
2436+ dest="slow",
2437+ type="float",
2438+ default=100.0,
2439+ metavar="TIME",
2440+ help="Report slow queries taking over TIME seconds",
2441+ )
2442 (options, args) = parser.parse_args(args)
2443 return options
2444
2445+
2446 def generate_loglines(logfile):
2447 """Generator returning the next line in the logfile (blocking)"""
2448 cmd = subprocess.Popen(
2449- ['tail', '-f', logfile],
2450- stdout=subprocess.PIPE, stderr=subprocess.PIPE)
2451+ ["tail", "-f", logfile], stdout=subprocess.PIPE, stderr=subprocess.PIPE
2452+ )
2453 while cmd.poll() is None:
2454 yield cmd.stdout.readline()
2455 if cmd.returncode != 0:
2456@@ -49,34 +58,55 @@ class Process:
2457
2458
2459 class Watcher:
2460- _line_re = re.compile(r"""
2461+ _line_re = re.compile(
2462+ r"""
2463 ^\d{4}-\d\d-\d\d \s \d\d:\d\d:\d\d \s
2464 \[(?P<pid>\d+)\] \s (?P<type>LOG|ERROR|DETAIL): \s+ (?P<rest>.*)$
2465- """, re.X)
2466+ """,
2467+ re.X,
2468+ )
2469
2470- _statement_re = re.compile(r"""
2471+ _statement_re = re.compile(
2472+ r"""
2473 ^statement: \s (?P<statement>.*)$
2474- """, re.X)
2475+ """,
2476+ re.X,
2477+ )
2478
2479- _duration_re = re.compile(r"""
2480+ _duration_re = re.compile(
2481+ r"""
2482 ^duration: \s (?P<duration>\d+\.\d+) \s ms$
2483- """, re.X)
2484+ """,
2485+ re.X,
2486+ )
2487
2488- _connection_received_re = re.compile(r"""
2489+ _connection_received_re = re.compile(
2490+ r"""
2491 ^connection \s received: \s+ (?P<connection>.*)$
2492- """, re.X)
2493+ """,
2494+ re.X,
2495+ )
2496
2497- _connection_authorized_re = re.compile(r"""
2498+ _connection_authorized_re = re.compile(
2499+ r"""
2500 ^connection \s authorized: \s+ (?P<auth>.*)$
2501- """, re.X)
2502+ """,
2503+ re.X,
2504+ )
2505
2506- _ignored_rest_re = re.compile(r"""
2507+ _ignored_rest_re = re.compile(
2508+ r"""
2509 ^(received \s | ERROR: \s | unexpected \s EOF \s) .*$
2510- """, re.X)
2511+ """,
2512+ re.X,
2513+ )
2514
2515- _ignored_statements_re = re.compile(r"""
2516+ _ignored_statements_re = re.compile(
2517+ r"""
2518 ^(BEGIN.*|END)$
2519- """, re.X)
2520+ """,
2521+ re.X,
2522+ )
2523
2524 def __init__(self, options):
2525 self.processes = {}
2526@@ -91,23 +121,23 @@ class Watcher:
2527 def feed(self, line):
2528
2529 # Handle continuations of previous statement
2530- if line.startswith('\t'):
2531+ if line.startswith("\t"):
2532 if self.previous_process is not None:
2533- self.previous_process.statement += '\n%s' % line[1:-1]
2534+ self.previous_process.statement += "\n%s" % line[1:-1]
2535 return
2536
2537 match = self._line_re.search(line)
2538 if match is None:
2539- raise ValueError('Badly formatted line %r' % (line,))
2540+ raise ValueError("Badly formatted line %r" % (line,))
2541
2542- t = match.group('type')
2543- if t in ['ERROR', 'DETAIL']:
2544+ t = match.group("type")
2545+ if t in ["ERROR", "DETAIL"]:
2546 return
2547- if t != 'LOG':
2548- raise ValueError('Unknown line type %s (%r)' % (t, line))
2549+ if t != "LOG":
2550+ raise ValueError("Unknown line type %s (%r)" % (t, line))
2551
2552- pid = int(match.group('pid'))
2553- rest = match.group('rest')
2554+ pid = int(match.group("pid"))
2555+ rest = match.group("rest")
2556
2557 process = self.processes.get(pid, None)
2558 if process is None:
2559@@ -117,16 +147,16 @@ class Watcher:
2560
2561 match = self._statement_re.search(rest)
2562 if match is not None:
2563- statement = match.group('statement')
2564+ statement = match.group("statement")
2565 if process.statement:
2566- process.statement += '\n%s' % statement
2567+ process.statement += "\n%s" % statement
2568 else:
2569 process.statement = statement
2570 return
2571
2572 match = self._duration_re.search(rest)
2573 if match is not None:
2574- process.duration = float(match.group('duration'))
2575+ process.duration = float(match.group("duration"))
2576 self.reportDuration(process)
2577 self.previous_process = None
2578 del self.processes[process.pid]
2579@@ -134,19 +164,19 @@ class Watcher:
2580
2581 match = self._connection_received_re.search(rest)
2582 if match is not None:
2583- process.connection = match.group('connection')
2584+ process.connection = match.group("connection")
2585 return
2586
2587 match = self._connection_authorized_re.search(rest)
2588 if match is not None:
2589- process.auth = match.group('auth')
2590+ process.auth = match.group("auth")
2591 return
2592
2593 match = self._ignored_rest_re.search(rest)
2594 if match is not None:
2595 return
2596
2597- raise ValueError('Unknown entry: %r' % (rest,))
2598+ raise ValueError("Unknown entry: %r" % (rest,))
2599
2600 def reportDuration(self, process):
2601 """Report a slow statement if it is above a threshold"""
2602@@ -158,11 +188,11 @@ class Watcher:
2603 return
2604
2605 if process.duration > options.slow:
2606- print('[%5d] %s' % (process.pid, process.statement))
2607- print(' Duration: %0.3f' % (process.duration,))
2608+ print("[%5d] %s" % (process.pid, process.statement))
2609+ print(" Duration: %0.3f" % (process.duration,))
2610
2611
2612-if __name__ == '__main__':
2613+if __name__ == "__main__":
2614 options = get_options()
2615
2616 watcher = Watcher(options)
2617diff --git a/utilities/pgmassacre.py b/utilities/pgmassacre.py
2618index 503c14c..4fbf9e2 100755
2619--- a/utilities/pgmassacre.py
2620+++ b/utilities/pgmassacre.py
2621@@ -15,9 +15,9 @@ Cut off access, slaughter connections and burn the database to the ground
2622
2623 import _pythonpath # noqa: F401
2624
2625-from optparse import OptionParser
2626 import sys
2627 import time
2628+from optparse import OptionParser
2629
2630 import psycopg2
2631 import psycopg2.extensions
2632@@ -25,7 +25,7 @@ import psycopg2.extensions
2633 from lp.services.database import activity_cols
2634
2635
2636-def connect(dbname='template1'):
2637+def connect(dbname="template1"):
2638 """Connect to the database, returning the DB-API connection."""
2639 if options.user is not None:
2640 return psycopg2.connect("dbname=%s user=%s" % (dbname, options.user))
2641@@ -45,8 +45,8 @@ def rollback_prepared_transactions(database):
2642
2643 # Get a list of outstanding prepared transactions.
2644 cur.execute(
2645- "SELECT gid FROM pg_prepared_xacts WHERE database=%(database)s",
2646- vars())
2647+ "SELECT gid FROM pg_prepared_xacts WHERE database=%(database)s", vars()
2648+ )
2649 xids = [row[0] for row in cur.fetchall()]
2650 for xid in xids:
2651 cur.execute("ROLLBACK PREPARED %(xid)s", vars())
2652@@ -67,13 +67,17 @@ def still_open(database, max_wait=120):
2653 # of the backends are gone.
2654 start = time.time()
2655 while time.time() < start + max_wait:
2656- cur.execute("""
2657+ cur.execute(
2658+ """
2659 SELECT TRUE FROM pg_stat_activity
2660 WHERE
2661 datname=%%s
2662 AND %(pid)s != pg_backend_pid()
2663 LIMIT 1
2664- """ % activity_cols(cur), [database])
2665+ """
2666+ % activity_cols(cur),
2667+ [database],
2668+ )
2669 if cur.fetchone() is None:
2670 return False
2671 time.sleep(0.6) # Stats only updated every 500ms.
2672@@ -89,8 +93,8 @@ def massacre(database):
2673 # Allow connections to the doomed database if something turned this off,
2674 # such as an aborted run of this script.
2675 cur.execute(
2676- "UPDATE pg_database SET datallowconn=TRUE WHERE datname=%s",
2677- [database])
2678+ "UPDATE pg_database SET datallowconn=TRUE WHERE datname=%s", [database]
2679+ )
2680
2681 # Rollback prepared transactions.
2682 rollback_prepared_transactions(database)
2683@@ -99,7 +103,8 @@ def massacre(database):
2684 # Stop connections to the doomed database.
2685 cur.execute(
2686 "UPDATE pg_database SET datallowconn=FALSE WHERE datname=%s",
2687- [database])
2688+ [database],
2689+ )
2690
2691 # New connections are disabled, but pg_stat_activity is only
2692 # updated every 500ms. Ensure that pg_stat_activity has
2693@@ -108,21 +113,25 @@ def massacre(database):
2694 time.sleep(1)
2695
2696 # Terminate open connections.
2697- cur.execute("""
2698+ cur.execute(
2699+ """
2700 SELECT %(pid)s, pg_terminate_backend(%(pid)s)
2701 FROM pg_stat_activity
2702 WHERE datname=%%s AND %(pid)s <> pg_backend_pid()
2703- """ % activity_cols(cur), [database])
2704+ """
2705+ % activity_cols(cur),
2706+ [database],
2707+ )
2708 for pid, success in cur.fetchall():
2709 if not success:
2710- print(
2711- "pg_terminate_backend(%s) failed" % pid, file=sys.stderr)
2712+ print("pg_terminate_backend(%s) failed" % pid, file=sys.stderr)
2713 con.close()
2714
2715 if still_open(database):
2716 print(
2717 "Unable to kill all backends! Database not destroyed.",
2718- file=sys.stderr)
2719+ file=sys.stderr,
2720+ )
2721 return 9
2722
2723 # Destroy the database.
2724@@ -140,8 +149,9 @@ def massacre(database):
2725 con.set_isolation_level(0)
2726 cur = con.cursor()
2727 cur.execute(
2728- "UPDATE pg_database SET datallowconn=TRUE WHERE datname=%s",
2729- [database])
2730+ "UPDATE pg_database SET datallowconn=TRUE WHERE datname=%s",
2731+ [database],
2732+ )
2733 con.close()
2734
2735
2736@@ -149,7 +159,8 @@ def rebuild(database, template):
2737 if still_open(template, 20):
2738 print(
2739 "Giving up waiting for connections to %s to drop." % template,
2740- file=sys.stderr)
2741+ file=sys.stderr,
2742+ )
2743 report_open_connections(template)
2744 return 10
2745
2746@@ -160,7 +171,10 @@ def rebuild(database, template):
2747 con.set_isolation_level(0) # Autocommit required for CREATE DATABASE.
2748 create_db_cmd = """
2749 CREATE DATABASE %s WITH ENCODING='UTF8' TEMPLATE=%s
2750- """ % (database, template)
2751+ """ % (
2752+ database,
2753+ template,
2754+ )
2755 # 8.4 allows us to create empty databases with a different locale
2756 # to template1 by using the template0 database as a template.
2757 # We make use of this feature so we don't have to care what locale
2758@@ -187,17 +201,21 @@ def rebuild(database, template):
2759 def report_open_connections(database):
2760 con = connect()
2761 cur = con.cursor()
2762- cur.execute("""
2763+ cur.execute(
2764+ """
2765 SELECT usename, datname, count(*)
2766 FROM pg_stat_activity
2767 WHERE %(pid)s != pg_backend_pid()
2768 GROUP BY usename, datname
2769 ORDER BY datname, usename
2770- """ % activity_cols(cur))
2771+ """
2772+ % activity_cols(cur)
2773+ )
2774 for usename, datname, num_connections in cur.fetchall():
2775 print(
2776 "%d connections by %s to %s" % (num_connections, usename, datname),
2777- file=sys.stderr)
2778+ file=sys.stderr,
2779+ )
2780 con.close()
2781
2782
2783@@ -206,24 +224,36 @@ options = None
2784
2785 def main():
2786 parser = OptionParser("Usage: %prog [options] DBNAME")
2787- parser.add_option("-U", "--user", dest="user", default=None,
2788- help="Connect as USER", metavar="USER")
2789- parser.add_option("-t", "--template", dest="template", default=None,
2790+ parser.add_option(
2791+ "-U",
2792+ "--user",
2793+ dest="user",
2794+ default=None,
2795+ help="Connect as USER",
2796+ metavar="USER",
2797+ )
2798+ parser.add_option(
2799+ "-t",
2800+ "--template",
2801+ dest="template",
2802+ default=None,
2803 help="Recreate database using DBNAME as a template database."
2804- " If template0, database will be created in the C locale.",
2805- metavar="DBNAME")
2806+ " If template0, database will be created in the C locale.",
2807+ metavar="DBNAME",
2808+ )
2809 global options
2810 (options, args) = parser.parse_args()
2811
2812 if len(args) != 1:
2813- parser.error('Must specify one, and only one, database to destroy')
2814+ parser.error("Must specify one, and only one, database to destroy")
2815
2816 database = args[0]
2817
2818 # Don't be stupid protection.
2819- if database in ('template1', 'template0'):
2820+ if database in ("template1", "template0"):
2821 parser.error(
2822- "Running this script against template1 or template0 is nuts.")
2823+ "Running this script against template1 or template0 is nuts."
2824+ )
2825
2826 con = connect()
2827 cur = con.cursor()
2828@@ -231,11 +261,12 @@ def main():
2829 # Ensure the template database exists.
2830 if options.template is not None:
2831 cur.execute(
2832- "SELECT TRUE FROM pg_database WHERE datname=%s",
2833- [options.template])
2834+ "SELECT TRUE FROM pg_database WHERE datname=%s", [options.template]
2835+ )
2836 if cur.fetchone() is None:
2837 parser.error(
2838- "Template database %s does not exist." % options.template)
2839+ "Template database %s does not exist." % options.template
2840+ )
2841 # If the database doesn't exist, no point attempting to drop it.
2842 cur.execute("SELECT TRUE FROM pg_database WHERE datname=%s", [database])
2843 db_exists = cur.fetchone() is not None
2844@@ -253,5 +284,5 @@ def main():
2845 return 0
2846
2847
2848-if __name__ == '__main__':
2849+if __name__ == "__main__":
2850 sys.exit(main())
2851diff --git a/utilities/pgstats.py b/utilities/pgstats.py
2852index 546350d..d470605 100755
2853--- a/utilities/pgstats.py
2854+++ b/utilities/pgstats.py
2855@@ -16,53 +16,58 @@ import psycopg2
2856 def percentage(num, total):
2857 """Return a percentage string of num/total"""
2858 if total == 0:
2859- return 'Unknown'
2860+ return "Unknown"
2861 else:
2862- return '%3.2f%%' % ( (num * 100.0) / total, )
2863+ return "%3.2f%%" % ((num * 100.0) / total,)
2864
2865
2866 def print_row(key, value):
2867- print('%(key)-20s: %(value)s' % vars())
2868+ print("%(key)-20s: %(value)s" % vars())
2869
2870
2871 def pgstattuple(cur, table):
2872- """Return the result of PostgreSQL contribs's pgstattuple function
2873- """
2874- cur.execute("""
2875+ """Return the result of PostgreSQL contribs's pgstattuple function"""
2876+ cur.execute(
2877+ """
2878 SELECT
2879 table_len, tuple_count, tuple_len, tuple_percent,
2880 dead_tuple_count, dead_tuple_len, dead_tuple_percent,
2881 free_space, free_percent
2882 FROM pgstattuple(%(table)s)
2883- """, vars())
2884+ """,
2885+ vars(),
2886+ )
2887 pgstattuple = cur.fetchone()
2888 return {
2889- 'name': table,
2890- 'table_len': pgstattuple[0],
2891- 'tuple_count': pgstattuple[1],
2892- 'tuple_len': pgstattuple[2],
2893- 'tuple_percent': pgstattuple[3],
2894- 'dead_tuple_count': pgstattuple[4],
2895- 'dead_tuple_len': pgstattuple[5],
2896- 'dead_tuple_percent': pgstattuple[6],
2897- 'free_space': pgstattuple[7],
2898- 'free_percent': pgstattuple[8],
2899- }
2900+ "name": table,
2901+ "table_len": pgstattuple[0],
2902+ "tuple_count": pgstattuple[1],
2903+ "tuple_len": pgstattuple[2],
2904+ "tuple_percent": pgstattuple[3],
2905+ "dead_tuple_count": pgstattuple[4],
2906+ "dead_tuple_len": pgstattuple[5],
2907+ "dead_tuple_percent": pgstattuple[6],
2908+ "free_space": pgstattuple[7],
2909+ "free_percent": pgstattuple[8],
2910+ }
2911
2912
2913 def main(dbname):
2914 con = psycopg2.connect("dbname=%s" % dbname)
2915 cur = con.cursor()
2916
2917- print('Statistics for %s' % dbname)
2918- print('===============' + '=' * (len(dbname)))
2919+ print("Statistics for %s" % dbname)
2920+ print("===============" + "=" * (len(dbname)))
2921
2922 # Database level statistics
2923- cur.execute("""
2924+ cur.execute(
2925+ """
2926 SELECT blks_hit, blks_read, numbackends,xact_commit, xact_rollback
2927 FROM pg_stat_database
2928 WHERE datname=%(dbname)s
2929- """, vars())
2930+ """,
2931+ vars(),
2932+ )
2933 hit, read, backends, commits, rollbacks = cur.fetchone()
2934
2935 hit_rate = percentage(hit, hit + read)
2936@@ -75,56 +80,67 @@ def main(dbname):
2937 print_row("Commit rate", commit_rate)
2938
2939 # Determine dead tuple bloat, if we have pgstattuple installed
2940- cur.execute("""
2941+ cur.execute(
2942+ """
2943 SELECT COUNT(*) FROM pg_proc, pg_namespace
2944 WHERE pg_proc.pronamespace = pg_namespace.oid
2945 AND pg_namespace.nspname = 'public'
2946 AND proname = 'pgstattuple'
2947- """)
2948- pgstattuple_installed = (cur.fetchone()[0] > 0)
2949+ """
2950+ )
2951+ pgstattuple_installed = cur.fetchone()[0] > 0
2952 if pgstattuple_installed:
2953- cur.execute("""
2954+ cur.execute(
2955+ """
2956 SELECT nspname || '.' || relname
2957 FROM pg_class, pg_namespace
2958 WHERE pg_class.relnamespace = pg_namespace.oid
2959 AND pg_class.relkind = 'r'
2960 ORDER BY nspname, relname
2961- """)
2962+ """
2963+ )
2964 all_tables = [r[0] for r in cur.fetchall()]
2965 total_live_bytes = 0
2966 total_dead_bytes = 0
2967 stats = []
2968 for table in all_tables:
2969 stat = pgstattuple(cur, table)
2970- total_live_bytes += stat['tuple_len']
2971- total_dead_bytes += stat['dead_tuple_len']
2972+ total_live_bytes += stat["tuple_len"]
2973+ total_dead_bytes += stat["dead_tuple_len"]
2974 stats.append(stat)
2975 # Just report the worst offenders
2976- stats.sort(key=lambda x: x['dead_tuple_percent'], reverse=True)
2977+ stats.sort(key=lambda x: x["dead_tuple_percent"], reverse=True)
2978 stats = [
2979- s for s in stats if s['dead_tuple_percent'] >= 10
2980- and s['dead_tuple_len'] >= 25 * 1024 * 1024
2981- ]
2982+ s
2983+ for s in stats
2984+ if s["dead_tuple_percent"] >= 10
2985+ and s["dead_tuple_len"] >= 25 * 1024 * 1024
2986+ ]
2987+
2988 def statstr(stat):
2989- name = stat['name']
2990- dead_tuple_percent = stat['dead_tuple_percent']
2991- dead_len = stat['dead_tuple_len'] / (1024*1024)
2992+ name = stat["name"]
2993+ dead_tuple_percent = stat["dead_tuple_percent"]
2994+ dead_len = stat["dead_tuple_len"] / (1024 * 1024)
2995 return (
2996- '%(name)s (%(dead_len)0.2fMB, '
2997- '%(dead_tuple_percent)0.2f%%)' % {
2998- 'name': name,
2999- 'dead_len': dead_len,
3000- 'dead_tuple_percent': dead_tuple_percent,
3001- })
3002+ "%(name)s (%(dead_len)0.2fMB, "
3003+ "%(dead_tuple_percent)0.2f%%)"
3004+ % {
3005+ "name": name,
3006+ "dead_len": dead_len,
3007+ "dead_tuple_percent": dead_tuple_percent,
3008+ }
3009+ )
3010+
3011 if len(stats) > 0:
3012- print_row('Needing vacuum', statstr(stats[0]))
3013+ print_row("Needing vacuum", statstr(stats[0]))
3014 for stat in stats[1:]:
3015- print_row('', statstr(stat))
3016+ print_row("", statstr(stat))
3017
3018 # Unused indexes, ignoring primary keys.
3019 # XXX Stuart Bishop 2005-06-28:
3020 # We should identify constraints used to enforce uniqueness too
3021- cur.execute("""
3022+ cur.execute(
3023+ """
3024 SELECT relname, indexrelname
3025 FROM pg_stat_user_indexes AS u JOIN pg_indexes AS i
3026 ON u.schemaname = i.schemaname
3027@@ -135,17 +151,19 @@ def main(dbname):
3028 AND indexrelname NOT LIKE '%_pkey'
3029 AND indexdef NOT LIKE 'CREATE UNIQUE %'
3030 ORDER BY relname, indexrelname
3031- """)
3032+ """
3033+ )
3034
3035 rows = cur.fetchall()
3036 if len(rows) == 0:
3037- print_row('Unused indexes', 'N/A')
3038+ print_row("Unused indexes", "N/A")
3039 else:
3040- print_row('Unused indexes', rows[0][1])
3041+ print_row("Unused indexes", rows[0][1])
3042 for table, index in rows[1:]:
3043- print_row('', index)
3044+ print_row("", index)
3045+
3046
3047-if __name__ == '__main__':
3048+if __name__ == "__main__":
3049 if len(sys.argv) != 2:
3050 print("Usage: %s [DBNAME]" % sys.argv[0], file=sys.stderr)
3051 sys.exit(1)
3052diff --git a/utilities/publish-to-swift b/utilities/publish-to-swift
3053index 84de4c1..93280d8 100755
3054--- a/utilities/publish-to-swift
3055+++ b/utilities/publish-to-swift
3056@@ -7,17 +7,17 @@
3057
3058 import _pythonpath # noqa: F401
3059
3060-from argparse import ArgumentParser
3061 import os
3062+from argparse import ArgumentParser
3063
3064 import iso8601
3065 import requests
3066 from swiftclient.service import (
3067- get_conn,
3068- process_options,
3069 SwiftService,
3070 SwiftUploadObject,
3071- )
3072+ get_conn,
3073+ process_options,
3074+)
3075 from swiftclient.shell import add_default_args
3076
3077
3078@@ -38,30 +38,42 @@ def get_swift_storage_url(options):
3079 return get_conn(options).get_auth()[0]
3080
3081
3082-def publish_file_to_swift(options, container_name, object_path, local_path,
3083- overwrite=True):
3084+def publish_file_to_swift(
3085+ options, container_name, object_path, local_path, overwrite=True
3086+):
3087 """Publish a file to a Swift container."""
3088 storage_url = get_swift_storage_url(options)
3089
3090 with SwiftService(options=options) as swift:
3091 stat_results = swift.stat(
3092- container=container_name, objects=[object_path])
3093+ container=container_name, objects=[object_path]
3094+ )
3095 if stat_results and next(stat_results)["success"]:
3096- print("Object {} already published to {}.".format(
3097- object_path, container_name))
3098+ print(
3099+ "Object {} already published to {}.".format(
3100+ object_path, container_name
3101+ )
3102+ )
3103 if not overwrite:
3104 return
3105
3106- print("Publishing {} to {} as {}.".format(
3107- local_path, container_name, object_path))
3108+ print(
3109+ "Publishing {} to {} as {}.".format(
3110+ local_path, container_name, object_path
3111+ )
3112+ )
3113 for r in swift.upload(
3114- container_name,
3115- [SwiftUploadObject(local_path, object_name=object_path)]):
3116+ container_name,
3117+ [SwiftUploadObject(local_path, object_name=object_path)],
3118+ ):
3119 if not r["success"]:
3120 raise r["error"]
3121
3122- print("Published file: {}/{}/{}".format(
3123- storage_url, container_name, object_path))
3124+ print(
3125+ "Published file: {}/{}/{}".format(
3126+ storage_url, container_name, object_path
3127+ )
3128+ )
3129
3130
3131 def prune_old_files_from_swift(options, container_name, object_dir):
3132@@ -74,8 +86,9 @@ def prune_old_files_from_swift(options, container_name, object_dir):
3133 objs = {}
3134 production_mtime = None
3135 for stats in swift.list(
3136- container=container_name,
3137- options={"prefix": "{}/".format(object_dir)}):
3138+ container=container_name,
3139+ options={"prefix": "{}/".format(object_dir)},
3140+ ):
3141 if not stats["success"]:
3142 raise stats["error"]
3143 for item in stats["listing"]:
3144@@ -83,20 +96,23 @@ def prune_old_files_from_swift(options, container_name, object_dir):
3145 mtime = iso8601.parse_date(item["last_modified"])
3146 objs[item["name"]] = mtime
3147 if item["name"].startswith(
3148- "{}/{}/".format(object_dir, production_revision)):
3149+ "{}/{}/".format(object_dir, production_revision)
3150+ ):
3151 production_mtime = mtime
3152
3153 if production_mtime is None:
3154 print(
3155 "No file in {} corresponding to production revision {}; "
3156- "not pruning.".format(container_name, production_revision))
3157+ "not pruning.".format(container_name, production_revision)
3158+ )
3159 return
3160
3161 for object_name, mtime in sorted(objs.items()):
3162 if mtime < production_mtime:
3163 print("Pruning {} (older than production)".format(object_name))
3164 for r in swift.delete(
3165- container=container_name, objects=[object_name]):
3166+ container=container_name, objects=[object_name]
3167+ ):
3168 if not r["success"]:
3169 raise r["error"]
3170
3171@@ -137,7 +153,7 @@ def main():
3172 "OS_USER_DOMAIN_ID",
3173 "OS_USER_DOMAIN_NAME",
3174 "OS_USER_ID",
3175- }
3176+ }
3177 for key, value in sorted(os.environ.items()):
3178 if key.startswith("OS_"):
3179 if key not in safe_keys:
3180@@ -150,10 +166,15 @@ def main():
3181 overwrite = "FORCE_REBUILD" in os.environ
3182 ensure_container_privs(options, args.container_name)
3183 publish_file_to_swift(
3184- options, args.container_name, args.swift_object_path, args.local_path,
3185- overwrite=overwrite)
3186+ options,
3187+ args.container_name,
3188+ args.swift_object_path,
3189+ args.local_path,
3190+ overwrite=overwrite,
3191+ )
3192 prune_old_files_from_swift(
3193- options, args.container_name, args.swift_object_path.split("/")[0])
3194+ options, args.container_name, args.swift_object_path.split("/")[0]
3195+ )
3196
3197
3198 if __name__ == "__main__":
3199diff --git a/utilities/report-database-stats.py b/utilities/report-database-stats.py
3200index a63b487..4396ef5 100755
3201--- a/utilities/report-database-stats.py
3202+++ b/utilities/report-database-stats.py
3203@@ -8,17 +8,11 @@ import _pythonpath # noqa: F401
3204
3205 from datetime import datetime
3206 from operator import attrgetter
3207-from textwrap import (
3208- dedent,
3209- fill,
3210- )
3211+from textwrap import dedent, fill
3212
3213 from lp.scripts.helpers import LPOptionParser
3214 from lp.services.database.namedrow import named_fetchall
3215-from lp.services.database.sqlbase import (
3216- connect,
3217- sqlvalues,
3218- )
3219+from lp.services.database.sqlbase import connect, sqlvalues
3220 from lp.services.scripts import db_options
3221
3222
3223@@ -26,7 +20,7 @@ class Table:
3224 pass
3225
3226
3227-def get_where_clause(options, fuzz='0 seconds'):
3228+def get_where_clause(options, fuzz="0 seconds"):
3229 "Generate a WHERE clause referencing the date_created column."
3230 # We have two of the from timestamp, the until timestamp and an
3231 # interval. The interval is in a format unsuitable for processing in
3232@@ -36,40 +30,49 @@ def get_where_clause(options, fuzz='0 seconds'):
3233 # generate the SQL representation of the from timestamp and the
3234 # until timestamp.
3235 if options.from_ts:
3236- from_sql = ("CAST(%s AS timestamp without time zone)"
3237- % sqlvalues(options.from_ts))
3238+ from_sql = "CAST(%s AS timestamp without time zone)" % sqlvalues(
3239+ options.from_ts
3240+ )
3241 elif options.interval and options.until_ts:
3242 from_sql = (
3243 "CAST(%s AS timestamp without time zone) - CAST(%s AS interval)"
3244- % sqlvalues(options.until_ts, options.interval))
3245+ % sqlvalues(options.until_ts, options.interval)
3246+ )
3247 elif options.interval:
3248 from_sql = (
3249 "(CURRENT_TIMESTAMP AT TIME ZONE 'UTC') - CAST(%s AS interval)"
3250- % sqlvalues(options.interval))
3251+ % sqlvalues(options.interval)
3252+ )
3253 else:
3254 from_sql = "CAST('1970-01-01' AS timestamp without time zone)"
3255
3256 if options.until_ts:
3257- until_sql = (
3258- "CAST(%s AS timestamp without time zone)"
3259- % sqlvalues(options.until_ts))
3260+ until_sql = "CAST(%s AS timestamp without time zone)" % sqlvalues(
3261+ options.until_ts
3262+ )
3263 elif options.interval and options.from_ts:
3264 until_sql = (
3265 "CAST(%s AS timestamp without time zone) + CAST(%s AS interval)"
3266- % sqlvalues(options.from_ts, options.interval))
3267+ % sqlvalues(options.from_ts, options.interval)
3268+ )
3269 else:
3270 until_sql = "CURRENT_TIMESTAMP AT TIME ZONE 'UTC'"
3271
3272 fuzz_sql = "CAST(%s AS interval)" % sqlvalues(fuzz)
3273 clause = "date_created BETWEEN (%s - %s) AND (%s + %s)" % (
3274- from_sql, fuzz_sql, until_sql, fuzz_sql)
3275+ from_sql,
3276+ fuzz_sql,
3277+ until_sql,
3278+ fuzz_sql,
3279+ )
3280
3281 return clause
3282
3283
3284 def get_table_stats(cur, options):
3285- params = {'where': get_where_clause(options)}
3286- tablestats_query = dedent("""\
3287+ params = {"where": get_where_clause(options)}
3288+ tablestats_query = dedent(
3289+ """\
3290 SELECT
3291 Earliest.date_created AS date_start,
3292 Latest.date_created AS date_end,
3293@@ -101,7 +104,9 @@ def get_table_stats(cur, options):
3294 WHERE %(where)s)
3295 AND Earliest.schemaname = Latest.schemaname
3296 AND Earliest.relname = Latest.relname
3297- """ % params)
3298+ """
3299+ % params
3300+ )
3301 cur.execute(tablestats_query)
3302
3303 # description[0] is the column name, per PEP-0249
3304@@ -113,7 +118,8 @@ def get_table_stats(cur, options):
3305 setattr(table, fields[index], row[index])
3306 table.total_tup_read = table.seq_tup_read + table.idx_tup_fetch
3307 table.total_tup_written = (
3308- table.n_tup_ins + table.n_tup_upd + table.n_tup_del)
3309+ table.n_tup_ins + table.n_tup_upd + table.n_tup_del
3310+ )
3311 tables.add(table)
3312
3313 return tables
3314@@ -127,8 +133,9 @@ def get_cpu_stats(cur, options):
3315 # database users not connected when the sample was taken are not
3316 # recorded - we want the average utilization over the time period,
3317 # not the subset of the time period the user was actually connected.
3318- params = {'where': get_where_clause(options)}
3319- query = dedent("""\
3320+ params = {"where": get_where_clause(options)}
3321+ query = dedent(
3322+ """\
3323 SELECT (
3324 CAST(SUM(cpu) AS float) / (
3325 SELECT COUNT(DISTINCT date_created) FROM DatabaseCpuStats
3326@@ -137,7 +144,9 @@ def get_cpu_stats(cur, options):
3327 FROM DatabaseCpuStats
3328 WHERE %(where)s
3329 GROUP BY username
3330- """ % params)
3331+ """
3332+ % params
3333+ )
3334 cur.execute(query)
3335 cpu_stats = set(cur.fetchall())
3336
3337@@ -148,13 +157,13 @@ def get_cpu_stats(cur, options):
3338 edge_avg_cpu = 0.0
3339 for stats_tuple in list(cpu_stats):
3340 avg_cpu, username = stats_tuple
3341- if username == 'lpnet':
3342+ if username == "lpnet":
3343 lpnet_avg_cpu = avg_cpu
3344 cpu_stats.discard(stats_tuple)
3345- elif username == 'edge':
3346+ elif username == "edge":
3347 edge_avg_cpu = avg_cpu
3348 cpu_stats.discard(stats_tuple)
3349- cpu_stats.add((lpnet_avg_cpu + edge_avg_cpu, 'lpnet'))
3350+ cpu_stats.add((lpnet_avg_cpu + edge_avg_cpu, "lpnet"))
3351
3352 return cpu_stats
3353
3354@@ -168,12 +177,13 @@ def get_bloat_stats(cur, options, kind):
3355 # we cannot guarantee the disk utilization statistics occur
3356 # exactly 24 hours apart. Our most recent snapshot could be 1
3357 # day ago, give or take a few hours.
3358- 'where': get_where_clause(options, fuzz='1 day 6 hours'),
3359- 'bloat': options.bloat,
3360- 'min_bloat': options.min_bloat,
3361- 'kind': kind,
3362- }
3363- query = dedent("""
3364+ "where": get_where_clause(options, fuzz="1 day 6 hours"),
3365+ "bloat": options.bloat,
3366+ "min_bloat": options.min_bloat,
3367+ "kind": kind,
3368+ }
3369+ query = dedent(
3370+ """
3371 SELECT * FROM (
3372 SELECT DISTINCT
3373 namespace,
3374@@ -210,7 +220,9 @@ def get_bloat_stats(cur, options, kind):
3375 table_len >= %(min_bloat)s
3376 AND end_bloat_percent >= %(bloat)s
3377 ORDER BY bloat_len DESC
3378- """ % params)
3379+ """
3380+ % params
3381+ )
3382 cur.execute(query, params)
3383 bloat_stats = named_fetchall(cur)
3384 return list(bloat_stats)
3385@@ -220,38 +232,69 @@ def main():
3386 parser = LPOptionParser()
3387 db_options(parser)
3388 parser.add_option(
3389- "-f", "--from", dest="from_ts", type=datetime,
3390- default=None, metavar="TIMESTAMP",
3391- help="Use statistics collected since TIMESTAMP.")
3392+ "-f",
3393+ "--from",
3394+ dest="from_ts",
3395+ type=datetime,
3396+ default=None,
3397+ metavar="TIMESTAMP",
3398+ help="Use statistics collected since TIMESTAMP.",
3399+ )
3400 parser.add_option(
3401- "-u", "--until", dest="until_ts", type=datetime,
3402- default=None, metavar="TIMESTAMP",
3403- help="Use statistics collected up until TIMESTAMP.")
3404+ "-u",
3405+ "--until",
3406+ dest="until_ts",
3407+ type=datetime,
3408+ default=None,
3409+ metavar="TIMESTAMP",
3410+ help="Use statistics collected up until TIMESTAMP.",
3411+ )
3412 parser.add_option(
3413- "-i", "--interval", dest="interval", type=str,
3414- default=None, metavar="INTERVAL",
3415+ "-i",
3416+ "--interval",
3417+ dest="interval",
3418+ type=str,
3419+ default=None,
3420+ metavar="INTERVAL",
3421 help=(
3422 "Use statistics collected over the last INTERVAL period. "
3423 "INTERVAL is a string parsable by PostgreSQL "
3424- "such as '5 minutes'."))
3425+ "such as '5 minutes'."
3426+ ),
3427+ )
3428 parser.add_option(
3429- "-n", "--limit", dest="limit", type=int,
3430- default=15, metavar="NUM",
3431- help="Display the top NUM items in each category.")
3432+ "-n",
3433+ "--limit",
3434+ dest="limit",
3435+ type=int,
3436+ default=15,
3437+ metavar="NUM",
3438+ help="Display the top NUM items in each category.",
3439+ )
3440 parser.add_option(
3441- "-b", "--bloat", dest="bloat", type=float,
3442- default=40, metavar="BLOAT",
3443- help="Display tables and indexes bloated by more than BLOAT%.")
3444+ "-b",
3445+ "--bloat",
3446+ dest="bloat",
3447+ type=float,
3448+ default=40,
3449+ metavar="BLOAT",
3450+ help="Display tables and indexes bloated by more than BLOAT%.",
3451+ )
3452 parser.add_option(
3453- "--min-bloat", dest="min_bloat", type=int,
3454- default=10000000, metavar="BLOAT",
3455- help="Don't report tables bloated less than BLOAT bytes.")
3456+ "--min-bloat",
3457+ dest="min_bloat",
3458+ type=int,
3459+ default=10000000,
3460+ metavar="BLOAT",
3461+ help="Don't report tables bloated less than BLOAT bytes.",
3462+ )
3463 parser.set_defaults(dbuser="database_stats_report")
3464 options, args = parser.parse_args()
3465
3466 if options.from_ts and options.until_ts and options.interval:
3467 parser.error(
3468- "Only two of --from, --until and --interval may be specified.")
3469+ "Only two of --from, --until and --interval may be specified."
3470+ )
3471
3472 con = connect()
3473 cur = con.cursor()
3474@@ -268,19 +311,27 @@ def main():
3475 user_cpu = get_cpu_stats(cur, options)
3476 print("== Most Active Users ==")
3477 print()
3478- for cpu, username in sorted(user_cpu, reverse=True)[:options.limit]:
3479+ for cpu, username in sorted(user_cpu, reverse=True)[: options.limit]:
3480 print("%40s || %10.2f%% CPU" % (username, float(cpu) / 10))
3481
3482 print()
3483 print("== Most Written Tables ==")
3484 print()
3485 tables_sort = [
3486- 'total_tup_written', 'n_tup_upd', 'n_tup_ins', 'n_tup_del', 'relname']
3487+ "total_tup_written",
3488+ "n_tup_upd",
3489+ "n_tup_ins",
3490+ "n_tup_del",
3491+ "relname",
3492+ ]
3493 most_written_tables = sorted(
3494- tables, key=attrgetter(*tables_sort), reverse=True)
3495- for table in most_written_tables[:options.limit]:
3496- print("%40s || %10.2f tuples/sec" % (
3497- table.relname, table.total_tup_written / per_second))
3498+ tables, key=attrgetter(*tables_sort), reverse=True
3499+ )
3500+ for table in most_written_tables[: options.limit]:
3501+ print(
3502+ "%40s || %10.2f tuples/sec"
3503+ % (table.relname, table.total_tup_written / per_second)
3504+ )
3505
3506 print()
3507 print("== Most Read Tables ==")
3508@@ -289,14 +340,17 @@ def main():
3509 # namespace (normally 'public'), relname is the table (relation)
3510 # name. total_tup_red is the total number of rows read.
3511 # idx_tup_fetch is the number of rows looked up using an index.
3512- tables_sort = ['total_tup_read', 'idx_tup_fetch', 'schemaname', 'relname']
3513+ tables_sort = ["total_tup_read", "idx_tup_fetch", "schemaname", "relname"]
3514 most_read_tables = sorted(
3515- tables, key=attrgetter(*tables_sort), reverse=True)
3516- for table in most_read_tables[:options.limit]:
3517- print("%40s || %10.2f tuples/sec" % (
3518- table.relname, table.total_tup_read / per_second))
3519+ tables, key=attrgetter(*tables_sort), reverse=True
3520+ )
3521+ for table in most_read_tables[: options.limit]:
3522+ print(
3523+ "%40s || %10.2f tuples/sec"
3524+ % (table.relname, table.total_tup_read / per_second)
3525+ )
3526
3527- table_bloat_stats = get_bloat_stats(cur, options, 'r')
3528+ table_bloat_stats = get_bloat_stats(cur, options, "r")
3529
3530 if not table_bloat_stats:
3531 print()
3532@@ -306,67 +360,91 @@ def main():
3533 print()
3534 print("== Most Bloated Tables ==")
3535 print()
3536- for bloated_table in table_bloat_stats[:options.limit]:
3537- print("%40s || %2d%% || %s of %s" % (
3538- bloated_table.name,
3539- bloated_table.end_bloat_percent,
3540- bloated_table.bloat_size,
3541- bloated_table.table_size))
3542-
3543- index_bloat_stats = get_bloat_stats(cur, options, 'i')
3544+ for bloated_table in table_bloat_stats[: options.limit]:
3545+ print(
3546+ "%40s || %2d%% || %s of %s"
3547+ % (
3548+ bloated_table.name,
3549+ bloated_table.end_bloat_percent,
3550+ bloated_table.bloat_size,
3551+ bloated_table.table_size,
3552+ )
3553+ )
3554+
3555+ index_bloat_stats = get_bloat_stats(cur, options, "i")
3556
3557 print()
3558 print("== Most Bloated Indexes ==")
3559 print()
3560- for bloated_index in index_bloat_stats[:options.limit]:
3561- print("%65s || %2d%% || %s of %s" % (
3562- bloated_index.sub_name,
3563- bloated_index.end_bloat_percent,
3564- bloated_index.bloat_size,
3565- bloated_index.table_size))
3566+ for bloated_index in index_bloat_stats[: options.limit]:
3567+ print(
3568+ "%65s || %2d%% || %s of %s"
3569+ % (
3570+ bloated_index.sub_name,
3571+ bloated_index.end_bloat_percent,
3572+ bloated_index.bloat_size,
3573+ bloated_index.table_size,
3574+ )
3575+ )
3576
3577 # Order bloat delta report by size of bloat increase.
3578 # We might want to change this to percentage bloat increase.
3579 bloating_sort_key = lambda x: x.delta_bloat_len
3580
3581 table_bloating_stats = sorted(
3582- table_bloat_stats, key=bloating_sort_key, reverse=True)
3583+ table_bloat_stats, key=bloating_sort_key, reverse=True
3584+ )
3585
3586 if table_bloating_stats[0].num_samples <= 1:
3587 print()
3588- print(fill(dedent("""\
3589+ print(
3590+ fill(
3591+ dedent(
3592+ """\
3593 (There are not enough samples in this time range to display
3594 bloat change statistics)
3595- """)))
3596+ """
3597+ )
3598+ )
3599+ )
3600 else:
3601 print()
3602 print("== Most Bloating Tables ==")
3603 print()
3604
3605- for bloated_table in table_bloating_stats[:options.limit]:
3606+ for bloated_table in table_bloating_stats[: options.limit]:
3607 # Bloat decreases are uninteresting, and would need to be in
3608 # a separate table sorted in reverse anyway.
3609 if bloated_table.delta_bloat_percent > 0:
3610- print("%40s || +%4.2f%% || +%s" % (
3611- bloated_table.name,
3612- bloated_table.delta_bloat_percent,
3613- bloated_table.delta_bloat_size))
3614+ print(
3615+ "%40s || +%4.2f%% || +%s"
3616+ % (
3617+ bloated_table.name,
3618+ bloated_table.delta_bloat_percent,
3619+ bloated_table.delta_bloat_size,
3620+ )
3621+ )
3622
3623 index_bloating_stats = sorted(
3624- index_bloat_stats, key=bloating_sort_key, reverse=True)
3625+ index_bloat_stats, key=bloating_sort_key, reverse=True
3626+ )
3627
3628 print()
3629 print("== Most Bloating Indexes ==")
3630 print()
3631- for bloated_index in index_bloating_stats[:options.limit]:
3632+ for bloated_index in index_bloating_stats[: options.limit]:
3633 # Bloat decreases are uninteresting, and would need to be in
3634 # a separate table sorted in reverse anyway.
3635 if bloated_index.delta_bloat_percent > 0:
3636- print("%65s || +%4.2f%% || +%s" % (
3637- bloated_index.sub_name,
3638- bloated_index.delta_bloat_percent,
3639- bloated_index.delta_bloat_size))
3640+ print(
3641+ "%65s || +%4.2f%% || +%s"
3642+ % (
3643+ bloated_index.sub_name,
3644+ bloated_index.delta_bloat_percent,
3645+ bloated_index.delta_bloat_size,
3646+ )
3647+ )
3648
3649
3650-if __name__ == '__main__':
3651+if __name__ == "__main__":
3652 main()
3653diff --git a/utilities/roundup-sniffer.py b/utilities/roundup-sniffer.py
3654index 16bebbe..6e82bd6 100755
3655--- a/utilities/roundup-sniffer.py
3656+++ b/utilities/roundup-sniffer.py
3657@@ -33,16 +33,13 @@ populate the class-level "fields" variable. See MplayerStatusSniffer
3658 for an example.
3659 """
3660
3661-from base64 import urlsafe_b64encode
3662 import csv
3663 import optparse
3664+import sys
3665+from base64 import urlsafe_b64encode
3666 from os import mkdir
3667-from os.path import (
3668- exists,
3669- join,
3670- )
3671+from os.path import exists, join
3672 from pprint import pprint
3673-import sys
3674 from time import sleep
3675 from urllib.parse import urlencode
3676 from urllib.request import urlopen
3677@@ -53,7 +50,7 @@ from lp.services.beautifulsoup import BeautifulSoup
3678 class RoundupSniffer:
3679 """Sniffs the meaning of numeric fields in remote Roundups."""
3680
3681- fields = ('status',)
3682+ fields = ("status",)
3683
3684 def __init__(self, base_url, cache_dir):
3685 self.base_url = base_url
3686@@ -65,23 +62,24 @@ class RoundupSniffer:
3687 """Fetch the URL, consulting the cache first."""
3688 filename = join(
3689 self.cache_dir,
3690- urlsafe_b64encode(url.encode('UTF-8')).decode('ASCII'))
3691+ urlsafe_b64encode(url.encode("UTF-8")).decode("ASCII"),
3692+ )
3693 if not exists(filename):
3694- open(filename, 'wb').write(urlopen(url).read())
3695- return open(filename, 'rb')
3696+ open(filename, "wb").write(urlopen(url).read())
3697+ return open(filename, "rb")
3698
3699 def get_all_bugs(self):
3700- all_fields = ['id']
3701+ all_fields = ["id"]
3702 all_fields.extend(self.fields)
3703 query = [
3704- ('@action', 'export_csv'),
3705- ('@columns', ','.join(all_fields)),
3706- ('@sort', 'activity'),
3707- ('@group', 'priority'),
3708- ('@pagesize', '50'),
3709- ('@startwith', '0'),
3710- ]
3711- url = '%s?%s' % (self.base_url, urlencode(query))
3712+ ("@action", "export_csv"),
3713+ ("@columns", ",".join(all_fields)),
3714+ ("@sort", "activity"),
3715+ ("@group", "priority"),
3716+ ("@pagesize", "50"),
3717+ ("@startwith", "0"),
3718+ ]
3719+ url = "%s?%s" % (self.base_url, urlencode(query))
3720 bugs = csv.DictReader(self.fetch(url))
3721 return list(bugs)
3722
3723@@ -98,19 +96,22 @@ class MplayerStatusSniffer(RoundupSniffer):
3724 be useful in general.
3725 """
3726
3727- fields = ('status', 'substatus')
3728+ fields = ("status", "substatus")
3729
3730 def get_text_values(self, bug):
3731 """Returns the text of status and substatus for the given bug.
3732
3733 This is done by downloading the HTML bug page and scraping it.
3734 """
3735- url = '%s%s' % (self.base_url, bug['id'])
3736+ url = "%s%s" % (self.base_url, bug["id"])
3737 page = self.fetch(url).read()
3738 soup = BeautifulSoup(page)
3739 return tuple(
3740- node.string for node in
3741- soup.find('th', text='Status').find_next('td').find_all('span'))
3742+ node.string
3743+ for node in soup.find("th", text="Status")
3744+ .find_next("td")
3745+ .find_all("span")
3746+ )
3747
3748
3749 def get_distinct(things, fields):
3750@@ -118,8 +119,10 @@ def get_distinct(things, fields):
3751
3752 For each combination also return one example thing.
3753 """
3754+
3755 def key(thing):
3756 return tuple(thing[field] for field in fields)
3757+
3758 return {key(thing): thing for thing in things}
3759
3760
3761@@ -135,41 +138,56 @@ def gen_mapping(sniffer):
3762 def parse_args(args):
3763 parser = optparse.OptionParser()
3764 parser.add_option(
3765- "--base-url", dest="base_url",
3766+ "--base-url",
3767+ dest="base_url",
3768 help="The base URL at the remote Roundup instance.",
3769- metavar="URL")
3770+ metavar="URL",
3771+ )
3772 parser.add_option(
3773- "--delay", dest="delay", type="int",
3774- help=("The number of seconds to wait between each page "
3775- "load [default: %default]."))
3776+ "--delay",
3777+ dest="delay",
3778+ type="int",
3779+ help=(
3780+ "The number of seconds to wait between each page "
3781+ "load [default: %default]."
3782+ ),
3783+ )
3784 parser.add_option(
3785- "--cache-dir", dest="cache_dir",
3786- help=("A directory in which to cache fetched resources "
3787- "[default: %default]."),
3788- metavar="DIR")
3789+ "--cache-dir",
3790+ dest="cache_dir",
3791+ help=(
3792+ "A directory in which to cache fetched resources "
3793+ "[default: %default]."
3794+ ),
3795+ metavar="DIR",
3796+ )
3797 parser.add_option(
3798- "--sniffer-class", dest="sniffer_class",
3799+ "--sniffer-class",
3800+ dest="sniffer_class",
3801 help="The sniffer class to use [default: %default].",
3802- metavar="CLASSNAME")
3803+ metavar="CLASSNAME",
3804+ )
3805 parser.set_defaults(
3806- delay=0, cache_dir="roundup_sniffer_cache",
3807- sniffer_class="MplayerStatusSniffer")
3808+ delay=0,
3809+ cache_dir="roundup_sniffer_cache",
3810+ sniffer_class="MplayerStatusSniffer",
3811+ )
3812
3813 options, args = parser.parse_args(args)
3814
3815 if not options.base_url:
3816 parser.error("Please specify a base URL.")
3817 if len(args) > 0:
3818- parser.error("Positional arguments are not accepted: %s" %
3819- ' '.join(args))
3820+ parser.error(
3821+ "Positional arguments are not accepted: %s" % " ".join(args)
3822+ )
3823
3824 return options
3825
3826
3827-if __name__ == '__main__':
3828+if __name__ == "__main__":
3829 options = parse_args(sys.argv[1:])
3830- sniffer = eval(options.sniffer_class)(
3831- options.base_url, options.cache_dir)
3832+ sniffer = eval(options.sniffer_class)(options.base_url, options.cache_dir)
3833 mapping = {}
3834 for raw, text in gen_mapping(sniffer):
3835 mapping[raw] = text
3836diff --git a/utilities/run-as b/utilities/run-as
3837index f936e77..d834287 100755
3838--- a/utilities/run-as
3839+++ b/utilities/run-as
3840@@ -15,7 +15,6 @@ import pwd
3841 import resource
3842 import sys
3843
3844-
3845 user = sys.argv[1]
3846 pw = pwd.getpwnam(user)
3847 os.setresgid(pw.pw_gid, pw.pw_gid, pw.pw_gid)
3848diff --git a/utilities/script_commands.py b/utilities/script_commands.py
3849index 13fc27d..27e8afd 100644
3850--- a/utilities/script_commands.py
3851+++ b/utilities/script_commands.py
3852@@ -12,9 +12,11 @@ def add_dict(name, **kwargs):
3853 The kwargs are the contents of the dict.
3854 :param name: The name of the dictionary to add.
3855 """
3856+
3857 def decorator(func):
3858 setattr(func, name, kwargs)
3859 return func
3860+
3861 return decorator
3862
3863
3864@@ -24,7 +26,7 @@ def types(**kwargs):
3865 The kwargs are the values to set, as used by OptionParser.add_option::
3866 @types(port="int", delay=int)
3867 """
3868- return add_dict('_types', **kwargs)
3869+ return add_dict("_types", **kwargs)
3870
3871
3872 def helps(**kwargs):
3873@@ -33,7 +35,7 @@ def helps(**kwargs):
3874 The kwargs are used to assign help::
3875 helps(port="The port to use.", delay="The time to wait.")
3876 """
3877- return add_dict('_helps', **kwargs)
3878+ return add_dict("_helps", **kwargs)
3879
3880
3881 def get_function_parser(function):
3882@@ -50,8 +52,8 @@ def get_function_parser(function):
3883 args, ignore, ignored, defaults = inspect.getargspec(function)
3884 if defaults is None:
3885 defaults = [None] * len(args)
3886- arg_types = getattr(function, '_types', {})
3887- arg_helps = getattr(function, '_helps', {})
3888+ arg_types = getattr(function, "_types", {})
3889+ arg_helps = getattr(function, "_helps", {})
3890 for arg, default in zip(args, defaults):
3891 arg_type = arg_types.get(arg)
3892 if arg_type is None:
3893@@ -60,9 +62,10 @@ def get_function_parser(function):
3894 arg_type = type(default)
3895 arg_help = arg_helps.get(arg)
3896 if arg_help is not None:
3897- arg_help += ' Default: %default.'
3898+ arg_help += " Default: %default."
3899 parser.add_option(
3900- '--%s' % arg, type=arg_type, help=arg_help, default=default)
3901+ "--%s" % arg, type=arg_type, help=arg_help, default=default
3902+ )
3903 return parser
3904
3905
3906@@ -82,7 +85,7 @@ def parse_args(command, args):
3907 # have defaults, but declaring them as positional would prevent them from
3908 # being treated as flags.
3909 if len(args) != 0:
3910- raise UserError('Too many arguments.')
3911+ raise UserError("Too many arguments.")
3912 return {}
3913
3914
3915@@ -107,11 +110,14 @@ def run_subcommand(subcommands, argv):
3916 subcommand name.
3917 """
3918 if len(argv) < 1:
3919- raise UserError('Must supply a command: %s.' %
3920- ', '.join(subcommands.keys()))
3921+ raise UserError(
3922+ "Must supply a command: %s." % ", ".join(subcommands.keys())
3923+ )
3924 try:
3925 command = subcommands[argv[0]]
3926 except KeyError:
3927- raise UserError('%s invalid. Valid commands: %s.' %
3928- (argv[0], ', '.join(subcommands.keys())))
3929+ raise UserError(
3930+ "%s invalid. Valid commands: %s."
3931+ % (argv[0], ", ".join(subcommands.keys()))
3932+ )
3933 run_from_args(command, argv[1:])
3934diff --git a/utilities/shhh.py b/utilities/shhh.py
3935index be6b51e..1ff3322 100755
3936--- a/utilities/shhh.py
3937+++ b/utilities/shhh.py
3938@@ -8,11 +8,8 @@ Run a command and suppress output unless it returns a non-zero exit status.
3939 """
3940
3941 import os
3942-from subprocess import (
3943- PIPE,
3944- Popen,
3945- )
3946 import sys
3947+from subprocess import PIPE, Popen
3948
3949
3950 def shhh(cmd):
3951@@ -90,5 +87,5 @@ def shhh(cmd):
3952 return process.returncode
3953
3954
3955-if __name__ == '__main__':
3956+if __name__ == "__main__":
3957 sys.exit(shhh(sys.argv[1:]))
3958diff --git a/utilities/smoke-test-librarian.py b/utilities/smoke-test-librarian.py
3959index 4b0e269..eeb88ba 100755
3960--- a/utilities/smoke-test-librarian.py
3961+++ b/utilities/smoke-test-librarian.py
3962@@ -15,12 +15,11 @@ from zope.component import getUtility
3963 from lp.services.librarian.interfaces.client import (
3964 ILibrarianClient,
3965 IRestrictedLibrarianClient,
3966- )
3967+)
3968 from lp.services.librarian.smoketest import do_smoketest
3969 from lp.services.scripts import execute_zcml_for_scripts
3970
3971-
3972-if __name__ == '__main__':
3973+if __name__ == "__main__":
3974 execute_zcml_for_scripts()
3975 restricted_client = getUtility(IRestrictedLibrarianClient)
3976 regular_client = getUtility(ILibrarianClient)
3977diff --git a/utilities/soyuz-sampledata-setup.py b/utilities/soyuz-sampledata-setup.py
3978index 71642cf..ed70561 100755
3979--- a/utilities/soyuz-sampledata-setup.py
3980+++ b/utilities/soyuz-sampledata-setup.py
3981@@ -25,8 +25,8 @@ import subprocess
3982 import sys
3983 from textwrap import dedent
3984
3985-from storm.store import Store
3986 import transaction
3987+from storm.store import Store
3988 from zope.component import getUtility
3989 from zope.event import notify
3990 from zope.lifecycleevent import ObjectCreatedEvent
3991@@ -38,25 +38,21 @@ from lp.registry.interfaces.codeofconduct import ISignedCodeOfConductSet
3992 from lp.registry.interfaces.person import IPersonSet
3993 from lp.registry.interfaces.series import SeriesStatus
3994 from lp.registry.model.codeofconduct import SignedCodeOfConduct
3995-from lp.services.database.interfaces import (
3996- IMasterStore,
3997- IStandbyStore,
3998- )
3999+from lp.services.database.interfaces import IMasterStore, IStandbyStore
4000 from lp.services.scripts.base import LaunchpadScript
4001 from lp.soyuz.enums import SourcePackageFormat
4002 from lp.soyuz.interfaces.component import IComponentSet
4003 from lp.soyuz.interfaces.section import ISectionSet
4004 from lp.soyuz.interfaces.sourcepackageformat import (
4005 ISourcePackageFormatSelectionSet,
4006- )
4007+)
4008 from lp.soyuz.model.component import ComponentSelection
4009 from lp.soyuz.model.section import SectionSelection
4010 from lp.soyuz.scripts.initialize_distroseries import InitializeDistroSeries
4011 from lp.testing.factory import LaunchpadObjectFactory
4012
4013-
4014-user_name = 'ppa-user'
4015-default_email = '%s@example.com' % user_name
4016+user_name = "ppa-user"
4017+default_email = "%s@example.com" % user_name
4018
4019
4020 class DoNotRunOnProduction(Exception):
4021@@ -82,19 +78,21 @@ def check_preconditions(options):
4022
4023 # Just a guess, but dev systems aren't likely to have ids this high
4024 # in this table. Production data does.
4025- real_data = (get_max_id(store, "TranslationMessage") >= 1000000)
4026+ real_data = get_max_id(store, "TranslationMessage") >= 1000000
4027 if real_data and not options.force:
4028 raise DoNotRunOnProduction(
4029- "Refusing to delete Ubuntu data unless you --force me.")
4030+ "Refusing to delete Ubuntu data unless you --force me."
4031+ )
4032
4033 # For some configs it's just absolutely clear this script shouldn't
4034 # run. Don't even accept --force there.
4035- forbidden_configs = re.compile('(edge|lpnet|production)')
4036- current_config = os.getenv('LPCONFIG', 'an unknown config')
4037+ forbidden_configs = re.compile("(edge|lpnet|production)")
4038+ current_config = os.getenv("LPCONFIG", "an unknown config")
4039 if forbidden_configs.match(current_config):
4040 raise DoNotRunOnProduction(
4041 "I won't delete Ubuntu data on %s and you can't --force me."
4042- % current_config)
4043+ % current_config
4044+ )
4045
4046
4047 def get_person_set():
4048@@ -112,9 +110,11 @@ def retire_active_publishing_histories(histories, requester):
4049 """Retire all active publishing histories in the given collection."""
4050 # Avoid circular import.
4051 from lp.soyuz.interfaces.publishing import active_publishing_status
4052+
4053 for history in histories(status=active_publishing_status):
4054 history.requestDeletion(
4055- requester, "Cleaned up because of missing Librarian files.")
4056+ requester, "Cleaned up because of missing Librarian files."
4057+ )
4058
4059
4060 def retire_distro_archives(distribution, culprit):
4061@@ -128,9 +128,11 @@ def retire_distro_archives(distribution, culprit):
4062
4063 for archive in distribution.all_distro_archives:
4064 retire_active_publishing_histories(
4065- archive.getPublishedSources, culprit)
4066+ archive.getPublishedSources, culprit
4067+ )
4068 retire_active_publishing_histories(
4069- archive.getAllPublishedBinaries, culprit)
4070+ archive.getAllPublishedBinaries, culprit
4071+ )
4072
4073
4074 def retire_ppas(distribution):
4075@@ -146,9 +148,12 @@ def add_architecture(distroseries, architecture_name):
4076
4077 processor = getUtility(IProcessorSet).getByName(architecture_name)
4078 archseries = DistroArchSeries(
4079- distroseries=distroseries, processor=processor,
4080- owner=distroseries.owner, official=True,
4081- architecturetag=architecture_name)
4082+ distroseries=distroseries,
4083+ processor=processor,
4084+ owner=distroseries.owner,
4085+ official=True,
4086+ architecturetag=architecture_name,
4087+ )
4088 IMasterStore(DistroArchSeries).add(archseries)
4089
4090
4091@@ -162,26 +167,75 @@ def create_sections(distroseries):
4092 for it to be precisely accurate.
4093 """
4094 section_names = (
4095- 'admin', 'cli-mono', 'comm', 'database', 'debug', 'devel', 'doc',
4096- 'editors', 'education', 'electronics', 'embedded', 'fonts', 'games',
4097- 'gnome', 'gnu-r', 'gnustep', 'golang', 'graphics', 'hamradio',
4098- 'haskell', 'httpd', 'interpreters', 'java', 'javascript', 'kde',
4099- 'kernel', 'libdevel', 'libs', 'lisp', 'localization', 'mail',
4100- 'math', 'misc', 'net', 'news', 'ocaml', 'oldlibs', 'otherosfs',
4101- 'perl', 'php', 'python', 'raku', 'ruby', 'rust', 'science',
4102- 'shells', 'sound', 'tex', 'text', 'utils', 'vcs', 'video', 'web',
4103- 'x11', 'xfce', 'zope')
4104+ "admin",
4105+ "cli-mono",
4106+ "comm",
4107+ "database",
4108+ "debug",
4109+ "devel",
4110+ "doc",
4111+ "editors",
4112+ "education",
4113+ "electronics",
4114+ "embedded",
4115+ "fonts",
4116+ "games",
4117+ "gnome",
4118+ "gnu-r",
4119+ "gnustep",
4120+ "golang",
4121+ "graphics",
4122+ "hamradio",
4123+ "haskell",
4124+ "httpd",
4125+ "interpreters",
4126+ "java",
4127+ "javascript",
4128+ "kde",
4129+ "kernel",
4130+ "libdevel",
4131+ "libs",
4132+ "lisp",
4133+ "localization",
4134+ "mail",
4135+ "math",
4136+ "misc",
4137+ "net",
4138+ "news",
4139+ "ocaml",
4140+ "oldlibs",
4141+ "otherosfs",
4142+ "perl",
4143+ "php",
4144+ "python",
4145+ "raku",
4146+ "ruby",
4147+ "rust",
4148+ "science",
4149+ "shells",
4150+ "sound",
4151+ "tex",
4152+ "text",
4153+ "utils",
4154+ "vcs",
4155+ "video",
4156+ "web",
4157+ "x11",
4158+ "xfce",
4159+ "zope",
4160+ )
4161 store = Store.of(distroseries)
4162 for section_name in section_names:
4163 section = getUtility(ISectionSet).ensure(section_name)
4164 if section not in distroseries.sections:
4165 store.add(
4166- SectionSelection(distroseries=distroseries, section=section))
4167+ SectionSelection(distroseries=distroseries, section=section)
4168+ )
4169
4170
4171 def create_components(distroseries, uploader):
4172 """Set up some components for `distroseries`."""
4173- component_names = ('main', 'restricted', 'universe', 'multiverse')
4174+ component_names = ("main", "restricted", "universe", "multiverse")
4175 store = Store.of(distroseries)
4176 main_archive = distroseries.distribution.main_archive
4177 for component_name in component_names:
4178@@ -189,7 +243,9 @@ def create_components(distroseries, uploader):
4179 if component not in distroseries.components:
4180 store.add(
4181 ComponentSelection(
4182- distroseries=distroseries, component=component))
4183+ distroseries=distroseries, component=component
4184+ )
4185+ )
4186 main_archive.newComponentUploader(uploader, component)
4187 main_archive.newQueueAdmin(uploader, component)
4188
4189@@ -201,10 +257,16 @@ def create_series(parent, full_name, version, status):
4190 name = full_name.split()[0].lower()
4191 title = "The " + full_name
4192 display_name = full_name.split()[0]
4193- new_series = distribution.newSeries(name=name, title=title,
4194- display_name=display_name, summary='Ubuntu %s is good.' % version,
4195- description='%s is awesome.' % version, version=version,
4196- previous_series=None, registrant=registrant)
4197+ new_series = distribution.newSeries(
4198+ name=name,
4199+ title=title,
4200+ display_name=display_name,
4201+ summary="Ubuntu %s is good." % version,
4202+ description="%s is awesome." % version,
4203+ version=version,
4204+ previous_series=None,
4205+ registrant=registrant,
4206+ )
4207 new_series.status = status
4208 notify(ObjectCreatedEvent(new_series))
4209
4210@@ -222,48 +284,48 @@ def create_sample_series(original_series, log):
4211 and so on.
4212 """
4213 series_descriptions = [
4214- ('Dapper Drake', SeriesStatus.OBSOLETE, '6.06'),
4215- ('Edgy Eft', SeriesStatus.OBSOLETE, '6.10'),
4216- ('Feisty Fawn', SeriesStatus.OBSOLETE, '7.04'),
4217- ('Gutsy Gibbon', SeriesStatus.OBSOLETE, '7.10'),
4218- ('Hardy Heron', SeriesStatus.OBSOLETE, '8.04'),
4219- ('Intrepid Ibex', SeriesStatus.OBSOLETE, '8.10'),
4220- ('Jaunty Jackalope', SeriesStatus.OBSOLETE, '9.04'),
4221- ('Karmic Koala', SeriesStatus.OBSOLETE, '9.10'),
4222- ('Lucid Lynx', SeriesStatus.OBSOLETE, '10.04'),
4223- ('Maverick Meerkat', SeriesStatus.OBSOLETE, '10.10'),
4224- ('Natty Narwhal', SeriesStatus.OBSOLETE, '11.04'),
4225- ('Oneiric Ocelot', SeriesStatus.OBSOLETE, '11.10'),
4226- ('Precise Pangolin', SeriesStatus.OBSOLETE, '12.04'),
4227- ('Quantal Quetzal', SeriesStatus.OBSOLETE, '12.10'),
4228- ('Raring Ringtail', SeriesStatus.OBSOLETE, '13.04'),
4229- ('Saucy Salamander', SeriesStatus.OBSOLETE, '13.10'),
4230- ('Trusty Tahr', SeriesStatus.SUPPORTED, '14.04'),
4231- ('Utopic Unicorn', SeriesStatus.OBSOLETE, '14.10'),
4232- ('Vivid Vervet', SeriesStatus.OBSOLETE, '15.04'),
4233- ('Wily Werewolf', SeriesStatus.OBSOLETE, '15.10'),
4234- ('Xenial Xerus', SeriesStatus.SUPPORTED, '16.04'),
4235- ('Yakkety Yak', SeriesStatus.OBSOLETE, '16.10'),
4236- ('Zesty Zapus', SeriesStatus.OBSOLETE, '17.04'),
4237- ('Artful Aardvark', SeriesStatus.OBSOLETE, '17.10'),
4238- ('Bionic Beaver', SeriesStatus.SUPPORTED, '18.04'),
4239- ('Cosmic Cuttlefish', SeriesStatus.OBSOLETE, '18.10'),
4240- ('Disco Dingo', SeriesStatus.OBSOLETE, '19.04'),
4241- ('Eoan Ermine', SeriesStatus.OBSOLETE, '19.10'),
4242- ('Focal Fossa', SeriesStatus.SUPPORTED, '20.04'),
4243- ('Groovy Gorilla', SeriesStatus.OBSOLETE, '20.10'),
4244- ('Hirsute Hippo', SeriesStatus.OBSOLETE, '21.04'),
4245- ('Impish Indri', SeriesStatus.CURRENT, '21.10'),
4246- ('Jammy Jellyfish', SeriesStatus.DEVELOPMENT, '22.04'),
4247- ]
4248+ ("Dapper Drake", SeriesStatus.OBSOLETE, "6.06"),
4249+ ("Edgy Eft", SeriesStatus.OBSOLETE, "6.10"),
4250+ ("Feisty Fawn", SeriesStatus.OBSOLETE, "7.04"),
4251+ ("Gutsy Gibbon", SeriesStatus.OBSOLETE, "7.10"),
4252+ ("Hardy Heron", SeriesStatus.OBSOLETE, "8.04"),
4253+ ("Intrepid Ibex", SeriesStatus.OBSOLETE, "8.10"),
4254+ ("Jaunty Jackalope", SeriesStatus.OBSOLETE, "9.04"),
4255+ ("Karmic Koala", SeriesStatus.OBSOLETE, "9.10"),
4256+ ("Lucid Lynx", SeriesStatus.OBSOLETE, "10.04"),
4257+ ("Maverick Meerkat", SeriesStatus.OBSOLETE, "10.10"),
4258+ ("Natty Narwhal", SeriesStatus.OBSOLETE, "11.04"),
4259+ ("Oneiric Ocelot", SeriesStatus.OBSOLETE, "11.10"),
4260+ ("Precise Pangolin", SeriesStatus.OBSOLETE, "12.04"),
4261+ ("Quantal Quetzal", SeriesStatus.OBSOLETE, "12.10"),
4262+ ("Raring Ringtail", SeriesStatus.OBSOLETE, "13.04"),
4263+ ("Saucy Salamander", SeriesStatus.OBSOLETE, "13.10"),
4264+ ("Trusty Tahr", SeriesStatus.SUPPORTED, "14.04"),
4265+ ("Utopic Unicorn", SeriesStatus.OBSOLETE, "14.10"),
4266+ ("Vivid Vervet", SeriesStatus.OBSOLETE, "15.04"),
4267+ ("Wily Werewolf", SeriesStatus.OBSOLETE, "15.10"),
4268+ ("Xenial Xerus", SeriesStatus.SUPPORTED, "16.04"),
4269+ ("Yakkety Yak", SeriesStatus.OBSOLETE, "16.10"),
4270+ ("Zesty Zapus", SeriesStatus.OBSOLETE, "17.04"),
4271+ ("Artful Aardvark", SeriesStatus.OBSOLETE, "17.10"),
4272+ ("Bionic Beaver", SeriesStatus.SUPPORTED, "18.04"),
4273+ ("Cosmic Cuttlefish", SeriesStatus.OBSOLETE, "18.10"),
4274+ ("Disco Dingo", SeriesStatus.OBSOLETE, "19.04"),
4275+ ("Eoan Ermine", SeriesStatus.OBSOLETE, "19.10"),
4276+ ("Focal Fossa", SeriesStatus.SUPPORTED, "20.04"),
4277+ ("Groovy Gorilla", SeriesStatus.OBSOLETE, "20.10"),
4278+ ("Hirsute Hippo", SeriesStatus.OBSOLETE, "21.04"),
4279+ ("Impish Indri", SeriesStatus.CURRENT, "21.10"),
4280+ ("Jammy Jellyfish", SeriesStatus.DEVELOPMENT, "22.04"),
4281+ ]
4282
4283 parent = original_series
4284 for full_name, status, version in series_descriptions:
4285- log.info('Creating %s...' % full_name)
4286+ log.info("Creating %s..." % full_name)
4287 parent = create_series(parent, full_name, version, status)
4288 # Karmic is the first series in which the 3.0 formats are
4289 # allowed. Subsequent series will inherit them.
4290- if version == '9.10':
4291+ if version == "9.10":
4292 spfss = getUtility(ISourcePackageFormatSelectionSet)
4293 spfss.add(parent, SourcePackageFormat.FORMAT_3_0_QUILT)
4294 spfss.add(parent, SourcePackageFormat.FORMAT_3_0_NATIVE)
4295@@ -271,9 +333,10 @@ def create_sample_series(original_series, log):
4296
4297 def add_series_component(series):
4298 """Permit a component in the given series."""
4299- component = getUtility(IComponentSet)['main']
4300+ component = getUtility(IComponentSet)["main"]
4301 IMasterStore(ComponentSelection).add(
4302- ComponentSelection(distroseries=series, component=component))
4303+ ComponentSelection(distroseries=series, component=component)
4304+ )
4305
4306
4307 def clean_up(distribution, log):
4308@@ -285,7 +348,7 @@ def clean_up(distribution, log):
4309 # published binaries without corresponding sources.
4310
4311 log.info("Deleting all items in official archives...")
4312- retire_distro_archives(distribution, get_person_set().getByName('name16'))
4313+ retire_distro_archives(distribution, get_person_set().getByName("name16"))
4314
4315 # Disable publishing of all PPAs, as they probably have broken
4316 # publishings too.
4317@@ -295,7 +358,7 @@ def clean_up(distribution, log):
4318 retire_series(distribution)
4319
4320 # grumpy has no components, which upsets the publisher.
4321- add_series_component(distribution['grumpy'])
4322+ add_series_component(distribution["grumpy"])
4323
4324
4325 def set_source_package_format(distroseries):
4326@@ -312,7 +375,7 @@ def populate(distribution, previous_series_name, uploader_name, options, log):
4327
4328 log.info("Configuring sections...")
4329 create_sections(previous_series)
4330- add_architecture(previous_series, 'amd64')
4331+ add_architecture(previous_series, "amd64")
4332
4333 log.info("Configuring components and permissions...")
4334 uploader = get_person_set().getByName(uploader_name)
4335@@ -333,24 +396,28 @@ def sign_code_of_conduct(person, log):
4336 signedcocset = getUtility(ISignedCodeOfConductSet)
4337 if signedcocset.searchByUser(person).count() == 0:
4338 fake_gpg_key = LaunchpadObjectFactory().makeGPGKey(person)
4339- Store.of(person).add(SignedCodeOfConduct(
4340- owner=person, signing_key_fingerprint=fake_gpg_key.fingerprint,
4341- signedcode="Normally a signed CoC would go here.", active=True))
4342+ Store.of(person).add(
4343+ SignedCodeOfConduct(
4344+ owner=person,
4345+ signing_key_fingerprint=fake_gpg_key.fingerprint,
4346+ signedcode="Normally a signed CoC would go here.",
4347+ active=True,
4348+ )
4349+ )
4350
4351
4352 def create_ppa_user(username, options, approver, log):
4353 """Create new user, with password "test," and sign code of conduct."""
4354 person = get_person_set().getByName(username)
4355 if person is None:
4356- have_email = (options.email != default_email)
4357- command_line = [
4358- 'utilities/make-lp-user', username, 'ubuntu-team']
4359+ have_email = options.email != default_email
4360+ command_line = ["utilities/make-lp-user", username, "ubuntu-team"]
4361 if have_email:
4362- command_line += ['--email', options.email]
4363+ command_line += ["--email", options.email]
4364
4365 pipe = subprocess.Popen(command_line, stderr=subprocess.PIPE)
4366 stdout, stderr = pipe.communicate()
4367- if stderr != '':
4368+ if stderr != "":
4369 print(stderr)
4370 if pipe.returncode != 0:
4371 sys.exit(2)
4372@@ -366,11 +433,16 @@ def create_ppa_user(username, options, approver, log):
4373 def create_ppa(distribution, person, name):
4374 """Create a PPA for `person`."""
4375 ppa = LaunchpadObjectFactory().makeArchive(
4376- distribution=distribution, owner=person, name=name, virtualized=False,
4377- description="Automatically created test PPA.")
4378+ distribution=distribution,
4379+ owner=person,
4380+ name=name,
4381+ virtualized=False,
4382+ description="Automatically created test PPA.",
4383+ )
4384 ppa.external_dependencies = (
4385 "deb http://archive.ubuntu.com/ubuntu %(series)s "
4386- "main restricted universe multiverse\n")
4387+ "main restricted universe multiverse\n"
4388+ )
4389
4390
4391 class SoyuzSampledataSetup(LaunchpadScript):
4392@@ -379,14 +451,23 @@ class SoyuzSampledataSetup(LaunchpadScript):
4393
4394 def add_my_options(self):
4395 self.parser.add_option(
4396- '-f', '--force', action='store_true', dest='force',
4397- help="DANGEROUS: run even if the database looks production-like.")
4398+ "-f",
4399+ "--force",
4400+ action="store_true",
4401+ dest="force",
4402+ help="DANGEROUS: run even if the database looks production-like.",
4403+ )
4404 self.parser.add_option(
4405- '-e', '--email', action='store', dest='email',
4406+ "-e",
4407+ "--email",
4408+ action="store",
4409+ dest="email",
4410 default=default_email,
4411 help=(
4412 "Email address to use for %s. Should match your GPG key."
4413- % user_name))
4414+ % user_name
4415+ ),
4416+ )
4417
4418 def main(self):
4419 check_preconditions(self.options.force)
4420@@ -395,26 +476,30 @@ class SoyuzSampledataSetup(LaunchpadScript):
4421 clean_up(ubuntu, self.logger)
4422
4423 # Use Hoary as the root, as Breezy and Grumpy are broken.
4424- populate(ubuntu, 'hoary', 'ubuntu-team', self.options, self.logger)
4425+ populate(ubuntu, "hoary", "ubuntu-team", self.options, self.logger)
4426
4427- admin = get_person_set().getByName('name16')
4428+ admin = get_person_set().getByName("name16")
4429 person = create_ppa_user(user_name, self.options, admin, self.logger)
4430
4431- create_ppa(ubuntu, person, 'test-ppa')
4432+ create_ppa(ubuntu, person, "test-ppa")
4433
4434 transaction.commit()
4435 self.logger.info("Done.")
4436
4437- print(dedent("""
4438+ print(
4439+ dedent(
4440+ """
4441 Now start your local Launchpad with "make run_codehosting" and log
4442 into https://launchpad.test/ as "%(email)s" with "test" as the
4443 password.
4444 Your user name will be %(user_name)s."""
4445- % {
4446- 'email': self.options.email,
4447- 'user_name': user_name,
4448- }))
4449+ % {
4450+ "email": self.options.email,
4451+ "user_name": user_name,
4452+ }
4453+ )
4454+ )
4455
4456
4457 if __name__ == "__main__":
4458- SoyuzSampledataSetup('soyuz-sampledata-setup').lock_and_run()
4459+ SoyuzSampledataSetup("soyuz-sampledata-setup").lock_and_run()
4460diff --git a/utilities/update-copyright b/utilities/update-copyright
4461index 2910c7e..5c4c027 100755
4462--- a/utilities/update-copyright
4463+++ b/utilities/update-copyright
4464@@ -10,35 +10,34 @@ notice to reflect the current year. Looks for the notice in the first three
4465 lines of the file and leaves the file unchanged if it finds none.
4466 """
4467
4468-from datetime import date
4469 import os
4470 import re
4471-from subprocess import (
4472- PIPE,
4473- Popen,
4474- )
4475 import sys
4476-
4477+from datetime import date
4478+from subprocess import PIPE, Popen
4479
4480 # This script lives in the 'utilites' directory.
4481 UTILITIES_DIR = os.path.dirname(__file__)
4482 CURRENT_YEAR = date.today().year
4483 copyright_pattern = re.compile(
4484- "Copyright (?P<years>(?P<yearfrom>[0-9]{4})(-[0-9]{4})?) Canonical Ltd.")
4485+ "Copyright (?P<years>(?P<yearfrom>[0-9]{4})(-[0-9]{4})?) Canonical Ltd."
4486+)
4487+
4488
4489 def years_string(yearfrom):
4490 """Build the new years string."""
4491 if int(yearfrom) >= CURRENT_YEAR:
4492- return yearfrom
4493+ return yearfrom
4494 return "%s-%d" % (yearfrom, CURRENT_YEAR)
4495
4496+
4497 def update_copyright(lines):
4498 """Update the copyright notice in the given file lines."""
4499 for line in range(min(len(lines), 5)):
4500 match = copyright_pattern.search(lines[line])
4501 if match is not None:
4502- old_years = match.group('years')
4503- new_years = years_string(match.group('yearfrom'))
4504+ old_years = match.group("years")
4505+ new_years = years_string(match.group("yearfrom"))
4506 if old_years != new_years:
4507 lines[line] = lines[line].replace(old_years, new_years)
4508 return True
4509@@ -50,8 +49,10 @@ def update_files(filenames):
4510 """Open the files with the given file names and update them."""
4511 for filename in filenames:
4512 if not os.path.isfile(filename):
4513- print("Skipped: %s does not exist or is not a regular file." %(
4514- filename))
4515+ print(
4516+ "Skipped: %s does not exist or is not a regular file."
4517+ % (filename)
4518+ )
4519 continue
4520 if not os.access(filename, os.W_OK):
4521 print("Skipped: %s is not writeable." % filename)
4522@@ -60,28 +61,32 @@ def update_files(filenames):
4523 lines = f.readlines()
4524 changed = update_copyright(lines)
4525 if changed:
4526- newfile = open(filename, 'w')
4527- newfile.write(''.join(lines))
4528+ newfile = open(filename, "w")
4529+ newfile.write("".join(lines))
4530 newfile.close()
4531 print("Updated: %s" % filename)
4532 else:
4533 print("Unchanged: %s" % filename)
4534
4535+
4536 def find_changed_files():
4537 """Use the find-changed-files.sh script."""
4538 find_changed_files_cmd = [
4539- os.path.join(UTILITIES_DIR, 'find-changed-files.sh')]
4540+ os.path.join(UTILITIES_DIR, "find-changed-files.sh")
4541+ ]
4542 filenames = Popen(
4543- find_changed_files_cmd, stdout=PIPE,
4544- universal_newlines=True).communicate()[0]
4545+ find_changed_files_cmd, stdout=PIPE, universal_newlines=True
4546+ ).communicate()[0]
4547 return filenames.strip()
4548
4549+
4550 def find_and_update():
4551 """Put it all together."""
4552 filenames = find_changed_files()
4553- if filenames != '':
4554+ if filenames != "":
4555 update_files(filenames.splitlines())
4556
4557+
4558 if __name__ == "__main__":
4559 if len(sys.argv) < 2:
4560 find_and_update()
4561diff --git a/utilities/update-sourcecode b/utilities/update-sourcecode
4562index df0dac7..e1aabde 100755
4563--- a/utilities/update-sourcecode
4564+++ b/utilities/update-sourcecode
4565@@ -8,12 +8,10 @@
4566 import os
4567 import sys
4568
4569-
4570 sys.path.insert(0,
4571 os.path.join(os.path.dirname(os.path.dirname(__file__)), 'lib'))
4572
4573 from devscripts import sourcecode # noqa: E402
4574
4575-
4576 if __name__ == '__main__':
4577 sys.exit(sourcecode.main(sys.argv))

Subscribers

People subscribed via source and target branches

to status/vote changes: