Merge ~fourdollars/pc-enablement/+git/oem-scripts:master into ~oem-solutions-engineers/pc-enablement/+git/oem-scripts:master

Proposed by Shih-Yuan Lee
Status: Merged
Approved by: Shih-Yuan Lee
Approved revision: 633af21b9daeea13ce683c10089d4db0efcda4bd
Merged at revision: f08dc80657cb9ef6f23ee18eea7cdc3076a7cc44
Proposed branch: ~fourdollars/pc-enablement/+git/oem-scripts:master
Merge into: ~oem-solutions-engineers/pc-enablement/+git/oem-scripts:master
Diff against target: 5933 lines (+2255/-1371)
22 files modified
bug-bind.py (+53/-30)
copyPackage.py (+23/-18)
debian/changelog (+19/-0)
debian/control (+1/-1)
debian/rules (+1/-1)
dev/null (+0/-16)
get-oem-auth-token (+16/-14)
get-oemshare-auth-token (+2/-2)
lp-bug (+143/-99)
mir-bug (+455/-224)
oem-getiso (+96/-73)
oem-meta-packages (+836/-457)
oem_scripts/LaunchpadLogin.py (+61/-39)
oem_scripts/logging.py (+30/-23)
pkg-list (+109/-68)
pkg-oem-meta (+235/-189)
rename-everything.py (+89/-60)
review-merge-proposal (+20/-6)
setup.py (+43/-42)
tests/test_black (+3/-0)
tests/test_bugbind.py (+12/-7)
tests/test_flake8 (+8/-2)
Reviewer Review Type Date Requested Status
Bin Li Approve
Review via email: mp+404053@code.launchpad.net

Description of the change

There is no functional change.
It is just to refactor the Python checking by flake8 and black.

To post a comment you must log in.
Revision history for this message
Bin Li (binli) wrote :

LGTM.

review: Approve
Revision history for this message
Shih-Yuan Lee (fourdollars) wrote :

[BOT]
$ cat oem-scripts-0.98-633af21-in-docker-focal-summary.log
autopkgtest-collect-credentials PASS
autopkgtest-oem-scripts-auto PASS
pkg-somerville-meta PASS
pkg-stella-meta PASS
pkg-sutton-meta PASS
bug-bind PASS
get-private-ppa PASS
jq-lp PASS
launchpad-api PASS
lp-bug PASS
oem-meta-packages PASS
pkg-list PASS
review-merge-proposal PASS
run-autopkgtest PASS
setup-apt-dir PASS
mir-bug SKIP exit status 77 and marked as skippable
git-url-insteadof-setting PASS
recovery-from-iso.sh PASS
mir-bug-verification PASS
https://paste.ubuntu.com/p/g3wxnW9kXF/ oem-scripts-0.98-633af21-in-docker-focal-complete.log

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1diff --git a/bug-bind.py b/bug-bind.py
2index 16056b6..60a9db0 100755
3--- a/bug-bind.py
4+++ b/bug-bind.py
5@@ -9,18 +9,19 @@ import re
6 import lazr.restfulclient.resource
7 from oem_scripts.LaunchpadLogin import LaunchpadLogin
8
9-HWE_PUBLIC_PROJECT = 'hwe-next'
10-OEM_PUBLIC_PROJECT = 'oem-priority'
11+HWE_PUBLIC_PROJECT = "hwe-next"
12+OEM_PUBLIC_PROJECT = "oem-priority"
13
14 lp = None
15-log = logging.getLogger('bug-bind-logger')
16+log = logging.getLogger("bug-bind-logger")
17 log.setLevel(logging.DEBUG)
18-logging.basicConfig(format='%(levelname)s %(asctime)s - %(message)s',
19- datefmt='%m/%d/%Y %I:%M:%S %p')
20+logging.basicConfig(
21+ format="%(levelname)s %(asctime)s - %(message)s", datefmt="%m/%d/%Y %I:%M:%S %p"
22+)
23
24
25 def link_bugs(public_bugnum, privates, ihv):
26- assert(public_bugnum.isdigit())
27+ assert public_bugnum.isdigit()
28 login = LaunchpadLogin()
29 lp = login.lp
30 pub_bug = lp.bugs[public_bugnum]
31@@ -29,7 +30,7 @@ def link_bugs(public_bugnum, privates, ihv):
32
33 # Add X-HWE-Bug: tag to description.
34 for priv in privates:
35- assert(priv.isdigit())
36+ assert priv.isdigit()
37 bug = lp.bugs[priv]
38
39 if re.search(tag, bug.description) is None:
40@@ -41,30 +42,35 @@ def link_bugs(public_bugnum, privates, ihv):
41
42 if ihv == "hwe":
43 hwe_next = lp.projects[HWE_PUBLIC_PROJECT]
44- sub_url = "%s~%s" % (lp._root_uri, 'canonical-hwe-team')
45+ sub_url = "%s~%s" % (lp._root_uri, "canonical-hwe-team")
46 pub_bug.subscribe(person=sub_url)
47- remote_bug_tag(pub_bug, 'hwe-needs-public-bug')
48+ remote_bug_tag(pub_bug, "hwe-needs-public-bug")
49 elif ihv == "swe":
50 hwe_next = lp.projects[OEM_PUBLIC_PROJECT]
51- sub_url = "%s~%s" % (lp._root_uri, 'oem-solutions-engineers')
52+ sub_url = "%s~%s" % (lp._root_uri, "oem-solutions-engineers")
53 pub_bug.subscribe(person=sub_url)
54- remote_bug_tag(pub_bug, 'swe-needs-public-bug')
55+ remote_bug_tag(pub_bug, "swe-needs-public-bug")
56 else:
57 if lp.projects[ihv]:
58 hwe_next = lp.projects[ihv]
59- remote_bug_tag(pub_bug, 'hwe-needs-public-bug')
60+ remote_bug_tag(pub_bug, "hwe-needs-public-bug")
61 else:
62- log.error('Project ' + ihv + ' not defined')
63+ log.error("Project " + ihv + " not defined")
64
65- add_bug_tags(pub_bug, ['originate-from-' + str(bug.id),
66- bug.bug_tasks_collection[0].bug_target_name, # OEM codename
67- 'oem-priority'])
68+ add_bug_tags(
69+ pub_bug,
70+ [
71+ "originate-from-" + str(bug.id),
72+ bug.bug_tasks_collection[0].bug_target_name, # OEM codename
73+ "oem-priority",
74+ ],
75+ )
76
77 add_bug_task(pub_bug, hwe_next)
78
79
80 def link_priv_bugs(main_bugnum, privates, ihv):
81- assert(main_bugnum.isdigit())
82+ assert main_bugnum.isdigit()
83 login = LaunchpadLogin()
84 lp = login.lp
85 main_bug = lp.bugs[main_bugnum]
86@@ -73,7 +79,7 @@ def link_priv_bugs(main_bugnum, privates, ihv):
87
88 # Add X-HWE-Bug: tag to description.
89 for priv in privates:
90- assert(priv.isdigit())
91+ assert priv.isdigit()
92 bug = lp.bugs[priv]
93
94 if re.search(tag, bug.description) is None:
95@@ -83,20 +89,20 @@ def link_priv_bugs(main_bugnum, privates, ihv):
96 else:
97 log.warning("Bug already linked to main bug " + tag)
98
99- add_bug_tags(main_bug, ['originate-from-' + str(bug.id)])
100+ add_bug_tags(main_bug, ["originate-from-" + str(bug.id)])
101
102
103 def add_bug_task(bug, bug_task):
104- assert(type(bug_task) == lazr.restfulclient.resource.Entry)
105+ assert type(bug_task) == lazr.restfulclient.resource.Entry
106
107 # Check if already have the requested
108 for i in bug.bug_tasks:
109 if bug_task.name == i.bug_target_name:
110- log.warning('Also-affects on {} already complete.'.format(bug_task))
111+ log.warning("Also-affects on {} already complete.".format(bug_task))
112 return
113 bug.addTask(target=bug_task)
114 bug.lp_save()
115- log.info('Also-affects on {} successful.'.format(bug_task))
116+ log.info("Also-affects on {} successful.".format(bug_task))
117
118
119 def remote_bug_tag(bug, tag):
120@@ -110,7 +116,7 @@ def remote_bug_tag(bug, tag):
121
122 def add_bug_tags(bug, tags):
123 """ add tags to the bug. """
124- log.info('Add tags {} to bug {}'.format(tags, bug.web_link))
125+ log.info("Add tags {} to bug {}".format(tags, bug.web_link))
126 new_tags = []
127 for tag_to_add in tags:
128 if tag_to_add not in bug.tags:
129@@ -119,7 +125,7 @@ def add_bug_tags(bug, tags):
130 bug.lp_save()
131
132
133-if __name__ == '__main__':
134+if __name__ == "__main__":
135 description = """bind private bugs with pubilc bug
136 bud-bind -p bugnumber private_bugnumber1 private_bugnumber2"""
137 help = """The expected live cycle of an oem-priority bug is:
138@@ -127,11 +133,26 @@ bud-bind -p bugnumber private_bugnumber1 private_bugnumber2"""
139 2. SWE/HWE manually create a public bug.
140 3. Use bug-bind to bind public and private bug."""
141
142- parser = argparse.ArgumentParser(description=description, epilog=help, formatter_class=argparse.RawDescriptionHelpFormatter)
143- parser.add_argument('-m', '--main', help='main bug for private bugs')
144- parser.add_argument('-p', '--public', help='The public bug number')
145- parser.add_argument('-i', '--ihv', help='Launchpad project name for IHV\nExpecting "swe", "hwe", "intel", "amd", "nvidia", "lsi", "emulex"', default='swe')
146- parser.add_argument('-v', '--vebose', help='shows debug messages', action='store_true', default=False)
147+ parser = argparse.ArgumentParser(
148+ description=description,
149+ epilog=help,
150+ formatter_class=argparse.RawDescriptionHelpFormatter,
151+ )
152+ parser.add_argument("-m", "--main", help="main bug for private bugs")
153+ parser.add_argument("-p", "--public", help="The public bug number")
154+ parser.add_argument(
155+ "-i",
156+ "--ihv",
157+ help='Launchpad project name for IHV\nExpecting "swe", "hwe", "intel", "amd", "nvidia", "lsi", "emulex"',
158+ default="swe",
159+ )
160+ parser.add_argument(
161+ "-v",
162+ "--vebose",
163+ help="shows debug messages",
164+ action="store_true",
165+ default=False,
166+ )
167 # TODO
168 # parser.add_argument('-c', '--clean', help='unlnk the bug between public and private', action='store_true', default=False)
169
170@@ -139,7 +160,9 @@ bud-bind -p bugnumber private_bugnumber1 private_bugnumber2"""
171 if args.vebose:
172 log.setLevel(logging.DEBUG)
173 if args.ihv not in ["swe", "hwe", "intel", "amd", "nvidia", "lsi", "emulex"]:
174- raise Exception('Expecting "swe", "hwe", "intel", "amd", "nvidia", "lsi", "emulex" for ihv')
175+ raise Exception(
176+ 'Expecting "swe", "hwe", "intel", "amd", "nvidia", "lsi", "emulex" for ihv'
177+ )
178 if len(private_bugs) == 0:
179 parser.error("must provide private bug numbers.")
180 if args.main:
181diff --git a/copyPackage.py b/copyPackage.py
182index 5358674..c6c0b5c 100755
183--- a/copyPackage.py
184+++ b/copyPackage.py
185@@ -1,7 +1,8 @@
186 #!/usr/bin/python
187 import sys
188 from launchpadlib.launchpad import Launchpad
189-launchpad = Launchpad.login_with('test', "production")
190+
191+launchpad = Launchpad.login_with("test", "production")
192
193
194 def getValueWithDefault(prompt, default):
195@@ -10,8 +11,9 @@ def getValueWithDefault(prompt, default):
196 return result and result or default
197
198
199-from_pocket = getValueWithDefault("From Pocket (Proposed|Updates|Release...)?",
200- "Proposed")
201+from_pocket = getValueWithDefault(
202+ "From Pocket (Proposed|Updates|Release...)?", "Proposed"
203+)
204
205 team = None
206 while not team:
207@@ -29,8 +31,7 @@ while not ppa:
208 except e:
209 print("Invalid ppa name")
210
211-to_pocket = getValueWithDefault("To Pocket (Proposed|Updates|Release...)?",
212- "Release")
213+to_pocket = getValueWithDefault("To Pocket (Proposed|Updates|Release...)?", "Release")
214 to_series = getValueWithDefault("To Series?", "precise")
215
216 # Get link to ubuntu archive
217@@ -42,33 +43,37 @@ while True:
218
219 # View packages in ubuntu archive
220 pkgs = archive.getPublishedSources(
221- source_name=package_name, pocket=from_pocket, status="Published")
222+ source_name=package_name, pocket=from_pocket, status="Published"
223+ )
224
225 while True:
226 print("\n----")
227 names = [p.display_name for p in pkgs]
228 for i, name in enumerate(names):
229- print " %d: %s" % (i, name)
230+ print(" %d: %s" % (i, name))
231 print("----\n")
232- i = raw_input("Enter pkg to transfer (0..%d/[Q]uit/[a]nother)> "
233- % (len(names) - 1))
234+ i = raw_input(
235+ "Enter pkg to transfer (0..%d/[Q]uit/[a]nother)> " % (len(names) - 1)
236+ )
237 try:
238 pkg = pkgs[int(i)]
239
240 print("Ready to copy package %s" % pkg.display_name)
241- if raw_input("Confirm: [Y/n]").lower()[:1] != 'n':
242+ if raw_input("Confirm: [Y/n]").lower()[:1] != "n":
243 pass
244- ppa.syncSource(from_archive=archive,
245- include_binaries=True,
246- source_name=pkg.display_name.split()[0],
247- to_pocket=to_pocket,
248- to_series=to_series,
249- version=pkg.source_package_version)
250+ ppa.syncSource(
251+ from_archive=archive,
252+ include_binaries=True,
253+ source_name=pkg.display_name.split()[0],
254+ to_pocket=to_pocket,
255+ to_series=to_series,
256+ version=pkg.source_package_version,
257+ )
258
259 except (ValueError, IndexError):
260- if i.lower()[:1] == 'q':
261+ if i.lower()[:1] == "q":
262 print("Quitting")
263 sys.exit(0)
264- if i.lower()[:1] == 'a':
265+ if i.lower()[:1] == "a":
266 break
267 print("invalid input\n")
268diff --git a/debian/changelog b/debian/changelog
269index da72513..02a4499 100644
270--- a/debian/changelog
271+++ b/debian/changelog
272@@ -1,3 +1,22 @@
273+oem-scripts (0.98) UNRELEASED; urgency=medium
274+
275+ * Black all Python files.
276+ * debian/control,
277+ debian/rules: Use black to check all Python files.
278+ * tests/test_flake8: Ignore W503 because it is incompatible with PEP 8.
279+ * tests/test_flake8: Ignore E203 because flake8 doesn't deal with it well
280+ and black will cover it.
281+ * debian/rules,
282+ tests/test_black: Move the black check script out of debian/rules so
283+ people can use it to test Python files directly.
284+ * debian/control,
285+ debian/rules,
286+ tests/test_flake8,
287+ tests/test_pep8: Remove the pep8 check because flake8 will use
288+ pycodestyle (formerly called pep8) to check.
289+
290+ -- Shih-Yuan Lee (FourDollars) <sylee@canonical.com> Wed, 09 Jun 2021 20:55:21 +0800
291+
292 oem-scripts (0.97) focal; urgency=medium
293
294 * pkg-oem-meta: Add linux-oem-20.04c parameter support
295diff --git a/debian/control b/debian/control
296index f9e9d76..b805787 100644
297--- a/debian/control
298+++ b/debian/control
299@@ -2,10 +2,10 @@ Source: oem-scripts
300 Section: admin
301 Priority: optional
302 Build-Depends:
303+ black,
304 debhelper (>=11),
305 dh-python,
306 flake8,
307- pep8,
308 python3-all,
309 python3-debian,
310 python3-launchpadlib,
311diff --git a/debian/rules b/debian/rules
312index a739670..98284ec 100755
313--- a/debian/rules
314+++ b/debian/rules
315@@ -5,6 +5,6 @@
316 dh $@ --with python3 --buildsystem=pybuild
317
318 override_dh_auto_test:
319+ ./tests/test_black
320 ./tests/test_flake8
321- ./tests/test_pep8
322 ./tests/test_shellcheck
323diff --git a/get-oem-auth-token b/get-oem-auth-token
324index d3e8428..2e8c679 100755
325--- a/get-oem-auth-token
326+++ b/get-oem-auth-token
327@@ -23,7 +23,7 @@ def prompt_for_credentials():
328 """
329 Return username and password collected from stdin.
330 """
331- print("\rEmail: ", file=sys.stderr, end='')
332+ print("\rEmail: ", file=sys.stderr, end="")
333 username = input()
334 password = getpass.getpass(stream=sys.stderr)
335 return username, password
336@@ -41,7 +41,7 @@ def prompt_for_code():
337 """
338 Return code collected from stdin.
339 """
340- print("\r2FA Code: ", file=sys.stderr, end='')
341+ print("\r2FA Code: ", file=sys.stderr, end="")
342 return input()
343
344
345@@ -52,14 +52,16 @@ def get_session_cookie(browser=get_browser(), args=None):
346 """
347
348 prop = {
349- 'oem-ibs': {
350+ "oem-ibs": {
351 "name": "oem-ibs.canonical.com",
352 "url": "https://oem-ibs.canonical.com/builds/",
353- "session": "sessionid"},
354- 'oem-share': {
355+ "session": "sessionid",
356+ },
357+ "oem-share": {
358 "name": "oem-share.canonical.com",
359 "url": "https://oem-share.canonical.com/oem/cesg-builds/",
360- "session": "pysid"}
361+ "session": "pysid",
362+ },
363 }
364
365 bad_creds = False
366@@ -75,8 +77,7 @@ def get_session_cookie(browser=get_browser(), args=None):
367 return
368 server = prop[args.server]
369
370- print("Authenticating %s with Ubuntu SSO..." % args.server,
371- file=sys.stderr)
372+ print("Authenticating %s with Ubuntu SSO..." % args.server, file=sys.stderr)
373 browser.open(server["url"])
374 # apachd2-openid
375 try:
376@@ -144,15 +145,16 @@ def get_session_cookie(browser=get_browser(), args=None):
377
378 def main():
379 parser = argparse.ArgumentParser(
380- description='Retrieve Canonical internal websites session ID',
381- formatter_class=argparse.ArgumentDefaultsHelpFormatter)
382- parser.add_argument("server",
383- help="Specify server identifier [oem-ibs|oem-share]")
384+ description="Retrieve Canonical internal websites session ID",
385+ formatter_class=argparse.ArgumentDefaultsHelpFormatter,
386+ )
387+ parser.add_argument("server", help="Specify server identifier [oem-ibs|oem-share]")
388 parser.add_argument("-u", "--username", help="Specify user's email")
389 parser.add_argument("-p", "--password", help="Specify password")
390 parser.add_argument("-c", "--code", help="Specify 2-factor code")
391- parser.add_argument("-r", "--retry", help="Sepcify authentication retry",
392- type=int, default=3)
393+ parser.add_argument(
394+ "-r", "--retry", help="Sepcify authentication retry", type=int, default=3
395+ )
396 args = parser.parse_args()
397 result = 1
398 try:
399diff --git a/get-oemshare-auth-token b/get-oemshare-auth-token
400index 7e1379b..182649f 100755
401--- a/get-oemshare-auth-token
402+++ b/get-oemshare-auth-token
403@@ -18,7 +18,7 @@ def prompt_for_credentials():
404 """
405 Return username and password collected from stdin.
406 """
407- print("\rEmail: ", file=sys.stderr, end='')
408+ print("\rEmail: ", file=sys.stderr, end="")
409 username = input()
410 password = getpass.getpass(stream=sys.stderr)
411 return username, password
412@@ -28,7 +28,7 @@ def prompt_for_code():
413 """
414 Return code collected from stdin.
415 """
416- print("\r2FA Code: ", file=sys.stderr, end='')
417+ print("\r2FA Code: ", file=sys.stderr, end="")
418 return input()
419
420
421diff --git a/lp-bug b/lp-bug
422index 6bfb087..e76abb2 100755
423--- a/lp-bug
424+++ b/lp-bug
425@@ -30,77 +30,94 @@ from logging import debug, warning, info, critical
426 from oem_scripts.LaunchpadLogin import LaunchpadLogin
427 from tempfile import TemporaryDirectory
428
429-parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,
430- epilog="""
431+parser = argparse.ArgumentParser(
432+ formatter_class=argparse.RawDescriptionHelpFormatter,
433+ epilog="""
434 examples:
435 lp-bug copy --output=target_bug_id SOURCE_BUG_ID
436 lp-bug cleanup BUG_ID
437- lp-bug cqa-verify [BUG_ID]""")
438+ lp-bug cqa-verify [BUG_ID]""",
439+)
440
441-parser.add_argument("-d", "--debug",
442- help="print debug messages", action="store_true")
443-parser.add_argument("-q", "--quiet",
444- help="Don't print info messages", action="store_true")
445+parser.add_argument("-d", "--debug", help="print debug messages", action="store_true")
446+parser.add_argument(
447+ "-q", "--quiet", help="Don't print info messages", action="store_true"
448+)
449
450 subparsers = parser.add_subparsers(dest="subcommand")
451
452-copy = subparsers.add_parser('copy', help='[-h] [-o=targetBugID|--output=targetBugID] sourceBugID')
453-copy.add_argument("-o", "--output",
454- help="Specify a file name to write the bug number.",
455- type=argparse.FileType('w', encoding='UTF-8'))
456-copy.add_argument("-t", "--target",
457- help="Specify the target project, 'oem-priority' by default.",
458- type=str)
459-copy.add_argument("bugID",
460- help="Specify the bug number on Launchpad to copy from.", type=int)
461-copy.add_argument("--public",
462- help="Make the bug public.", action="store_true")
463-
464-cleanup = subparsers.add_parser('cleanup', help='[-h] [--yes] bugID')
465-cleanup.add_argument("bugID",
466- help="Specify the bug number on Launchpad to clean up.", type=int)
467-cleanup.add_argument("--yes",
468- help="Say yes for all prompts.", action="store_true")
469-
470-cqa_verify = subparsers.add_parser('cqa-verify', help='[-h] [--yes] [--dry-run] [bugID]',
471- formatter_class=argparse.RawDescriptionHelpFormatter,
472- epilog="""
473-The 'cqa-verify' subcommand will check the versions in the production archive automatically.""")
474-cqa_verify.add_argument("--yes",
475- help="Say yes for all prompts.", action="store_true")
476-cqa_verify.add_argument("--dry-run",
477- help="Dry run the process.", action="store_true")
478-cqa_verify.add_argument('bugID', nargs='?', type=int)
479+copy = subparsers.add_parser(
480+ "copy", help="[-h] [-o=targetBugID|--output=targetBugID] sourceBugID"
481+)
482+copy.add_argument(
483+ "-o",
484+ "--output",
485+ help="Specify a file name to write the bug number.",
486+ type=argparse.FileType("w", encoding="UTF-8"),
487+)
488+copy.add_argument(
489+ "-t",
490+ "--target",
491+ help="Specify the target project, 'oem-priority' by default.",
492+ type=str,
493+)
494+copy.add_argument(
495+ "bugID", help="Specify the bug number on Launchpad to copy from.", type=int
496+)
497+copy.add_argument("--public", help="Make the bug public.", action="store_true")
498+
499+cleanup = subparsers.add_parser("cleanup", help="[-h] [--yes] bugID")
500+cleanup.add_argument(
501+ "bugID", help="Specify the bug number on Launchpad to clean up.", type=int
502+)
503+cleanup.add_argument("--yes", help="Say yes for all prompts.", action="store_true")
504+
505+cqa_verify = subparsers.add_parser(
506+ "cqa-verify",
507+ help="[-h] [--yes] [--dry-run] [bugID]",
508+ formatter_class=argparse.RawDescriptionHelpFormatter,
509+ epilog="""
510+The 'cqa-verify' subcommand will check the versions in the production archive automatically.""",
511+)
512+cqa_verify.add_argument("--yes", help="Say yes for all prompts.", action="store_true")
513+cqa_verify.add_argument("--dry-run", help="Dry run the process.", action="store_true")
514+cqa_verify.add_argument("bugID", nargs="?", type=int)
515
516 args = parser.parse_args()
517
518-logging.addLevelName(logging.DEBUG,
519- "\033[1;96m%s\033[1;0m" %
520- logging.getLevelName(logging.DEBUG))
521-logging.addLevelName(logging.INFO,
522- "\033[1;32m%s\033[1;0m" %
523- logging.getLevelName(logging.INFO))
524-logging.addLevelName(logging.WARNING,
525- "\033[1;33m%s\033[1;0m" %
526- logging.getLevelName(logging.WARNING))
527-logging.addLevelName(logging.ERROR,
528- "\033[1;31m%s\033[1;0m" %
529- logging.getLevelName(logging.ERROR))
530-logging.addLevelName(logging.CRITICAL,
531- "\033[1;41m%s\033[1;0m" %
532- logging.getLevelName(logging.CRITICAL))
533+logging.addLevelName(
534+ logging.DEBUG, "\033[1;96m%s\033[1;0m" % logging.getLevelName(logging.DEBUG)
535+)
536+logging.addLevelName(
537+ logging.INFO, "\033[1;32m%s\033[1;0m" % logging.getLevelName(logging.INFO)
538+)
539+logging.addLevelName(
540+ logging.WARNING, "\033[1;33m%s\033[1;0m" % logging.getLevelName(logging.WARNING)
541+)
542+logging.addLevelName(
543+ logging.ERROR, "\033[1;31m%s\033[1;0m" % logging.getLevelName(logging.ERROR)
544+)
545+logging.addLevelName(
546+ logging.CRITICAL, "\033[1;41m%s\033[1;0m" % logging.getLevelName(logging.CRITICAL)
547+)
548
549 if args.debug:
550- logging.basicConfig(format='<%(levelname)s> %(message)s',
551- level=logging.DEBUG,
552- handlers=[logging.StreamHandler(sys.stdout)])
553+ logging.basicConfig(
554+ format="<%(levelname)s> %(message)s",
555+ level=logging.DEBUG,
556+ handlers=[logging.StreamHandler(sys.stdout)],
557+ )
558 elif not args.quiet:
559- logging.basicConfig(format='<%(levelname)s> %(message)s',
560- level=logging.INFO,
561- handlers=[logging.StreamHandler(sys.stdout)])
562+ logging.basicConfig(
563+ format="<%(levelname)s> %(message)s",
564+ level=logging.INFO,
565+ handlers=[logging.StreamHandler(sys.stdout)],
566+ )
567 else:
568- logging.basicConfig(format='<%(levelname)s> %(message)s',
569- handlers=[logging.StreamHandler(sys.stdout)])
570+ logging.basicConfig(
571+ format="<%(levelname)s> %(message)s",
572+ handlers=[logging.StreamHandler(sys.stdout)],
573+ )
574
575
576 def _yes_or_ask(yes: bool, message: str) -> bool:
577@@ -111,7 +128,7 @@ def _yes_or_ask(yes: bool, message: str) -> bool:
578 res = input(f"> \033[1;34m{message}\033[1;0m (y/n) ").lower()
579 if res not in {"y", "n"}:
580 continue
581- if res == 'y':
582+ if res == "y":
583 return True
584 else:
585 return False
586@@ -125,7 +142,7 @@ def copy_bug(lp, bug_number: int, target: str, public: bool, output=None) -> Non
587 project = lp.projects["oem-priority"]
588
589 if public:
590- information_type = 'Public'
591+ information_type = "Public"
592 else:
593 information_type = bug.information_type
594
595@@ -134,7 +151,8 @@ def copy_bug(lp, bug_number: int, target: str, public: bool, output=None) -> Non
596 target=project,
597 title=bug.title,
598 information_type=information_type,
599- tags=bug.tags)
600+ tags=bug.tags,
601+ )
602 info(f'LP: #{new_bug.id} - "{new_bug.title}" is created. {new_bug.web_link}')
603 if output:
604 output.write(f"{new_bug.id}\n")
605@@ -142,7 +160,10 @@ def copy_bug(lp, bug_number: int, target: str, public: bool, output=None) -> Non
606
607 def cleanup_bug(lp, bug_number: int, yes: bool) -> None:
608 bug = lp.bugs[bug_number]
609- if not _yes_or_ask(yes, f'Do you want to cleanup all information on LP: #{bug_number} - "{bug.title}"? {bug.web_link}'):
610+ if not _yes_or_ask(
611+ yes,
612+ f'Do you want to cleanup all information on LP: #{bug_number} - "{bug.title}"? {bug.web_link}',
613+ ):
614 return
615
616 if bug.title != "null":
617@@ -159,14 +180,17 @@ def cleanup_bug(lp, bug_number: int, yes: bool) -> None:
618 found = False
619
620 for bug_task in bug.bug_tasks:
621- if bug_task.bug_target_name == 'null-and-void':
622+ if bug_task.bug_target_name == "null-and-void":
623 found = True
624
625- if not found and bug.information_type == 'Public':
626+ if not found and bug.information_type == "Public":
627 bug.addTask(target=lp.projects["null-and-void"])
628
629 for bug_task in bug.bug_tasks:
630- if bug_task.bug_target_name != 'null-and-void' and bug.information_type == 'Public':
631+ if (
632+ bug_task.bug_target_name != "null-and-void"
633+ and bug.information_type == "Public"
634+ ):
635 try:
636 bug_task.lp_delete()
637 except lazr.restfulclient.errors.BadRequest as e:
638@@ -183,21 +207,27 @@ def cleanup_bug(lp, bug_number: int, yes: bool) -> None:
639 if subscription.canBeUnsubscribedByUser():
640 bug.unsubscribe(person=lp.people[subscription.person.name])
641 else:
642- warning(f"{lp.me.name} doesn't have the permission to unsubscribe {subscription.person.name}.")
643+ warning(
644+ f"{lp.me.name} doesn't have the permission to unsubscribe {subscription.person.name}."
645+ )
646
647- info(f'LP: #{bug.id} has been cleaned. {bug.web_link}')
648+ info(f"LP: #{bug.id} has been cleaned. {bug.web_link}")
649
650
651-def _run_command(command: list or tuple, returncode=(0,), env=None, silent=False) -> (str, str, int):
652+def _run_command(
653+ command: list or tuple, returncode=(0,), env=None, silent=False
654+) -> (str, str, int):
655 if not silent:
656 debug("$ " + " ".join(command))
657- proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env)
658+ proc = subprocess.Popen(
659+ command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env
660+ )
661 out, err = proc.communicate()
662
663 if out:
664- out = out.decode('utf-8').strip()
665+ out = out.decode("utf-8").strip()
666 if err:
667- err = err.decode('utf-8').strip()
668+ err = err.decode("utf-8").strip()
669
670 if proc.returncode not in returncode:
671 critical(f"return {proc.returncode}")
672@@ -216,44 +246,46 @@ def _run_command(command: list or tuple, returncode=(0,), env=None, silent=False
673 return (out, err, proc.returncode)
674
675
676-pattern = re.compile(r'(.*) \(==(.*)\)')
677+pattern = re.compile(r"(.*) \(==(.*)\)")
678
679
680 def cloudberry_cqa_verified(lp, yes: bool, bugID: int) -> None:
681- cloudberry = lp.projects['cloudberry']
682+ cloudberry = lp.projects["cloudberry"]
683 # Only deal with those bugs with 'Fix Committed' and 'request of publish_package' in the title.
684- tasks = cloudberry.searchTasks(status=['Fix Committed'], search_text='request of publish_package')
685+ tasks = cloudberry.searchTasks(
686+ status=["Fix Committed"], search_text="request of publish_package"
687+ )
688 for task in tasks:
689 bug = task.bug
690 # Only deal with one bug id when it is provided.
691 if bugID and bug.id != bugID:
692 continue
693 # Only deal with those bugs with this tag.
694- if 'cqa-verified-staging' not in bug.tags:
695+ if "cqa-verified-staging" not in bug.tags:
696 continue
697 info(f'LP: #{bug.id} "{bug.title}"\n{bug.description}')
698 debug(bug.tags)
699 multiple = False
700 packages = []
701 prod_archive_line = ""
702- lines = bug.description.split('\n')
703+ lines = bug.description.split("\n")
704 # Parse the package list and the production archive in the bug description.
705 for idx, line in enumerate(lines):
706- if line.startswith('Package: '):
707+ if line.startswith("Package: "):
708 debug(line)
709- if line.endswith(','):
710+ if line.endswith(","):
711 multiple = True
712 packages.append(line[9:-1])
713 else:
714- packages = line[9:].split(',')
715+ packages = line[9:].split(",")
716 elif multiple is True:
717 debug(line)
718- if not line.endswith(','):
719+ if not line.endswith(","):
720 multiple = False
721 packages.append(line.strip())
722 else:
723 packages.append(line.strip()[:-1])
724- elif 'production archive' in line:
725+ elif "production archive" in line:
726 prod_archive_line = lines[idx + 2]
727 # Skip the bug when it found no production archive.
728 if not prod_archive_line:
729@@ -272,9 +304,9 @@ def cloudberry_cqa_verified(lp, yes: bool, bugID: int) -> None:
730 # Check if the production archive provided the packages and versions.
731 with TemporaryDirectory() as tmpdir:
732 failed = False
733- fingerprint = 'F9FDA6BED73CDC22'
734- series = ['focal', 'bionic', 'xenial']
735- codename = ''
736+ fingerprint = "F9FDA6BED73CDC22"
737+ series = ["focal", "bionic", "xenial"]
738+ codename = ""
739 for item in series:
740 if item in prod_archive_line:
741 codename = item
742@@ -284,23 +316,32 @@ def cloudberry_cqa_verified(lp, yes: bool, bugID: int) -> None:
743 continue
744 # Setup the temporary apt dir to include the production archive.
745 output, _, returncode = _run_command(
746- ['setup-apt-dir.sh',
747- '-c', codename,
748- '--disable-updates',
749- '--disable-backports',
750- '--apt-dir', tmpdir,
751- '--extra-key', fingerprint,
752- '--extra-repo', prod_archive_line.replace("deb ", f"deb [signed-by={tmpdir}/{fingerprint}.pub] ")
753- ], returncode=(0, 100))
754+ [
755+ "setup-apt-dir.sh",
756+ "-c",
757+ codename,
758+ "--disable-updates",
759+ "--disable-backports",
760+ "--apt-dir",
761+ tmpdir,
762+ "--extra-key",
763+ fingerprint,
764+ "--extra-repo",
765+ prod_archive_line.replace(
766+ "deb ", f"deb [signed-by={tmpdir}/{fingerprint}.pub] "
767+ ),
768+ ],
769+ returncode=(0, 100),
770+ )
771 # Skip the bug when it found some error in the production archive.
772 if returncode == 100:
773 warning(output)
774 continue
775 # Use the temporary apt dir to compare the package versions.
776 for pkg, ver in packages:
777- output, _, _ = _run_command(['pkg-list', '--apt-dir', tmpdir, pkg])
778- for line in output.split('\n'):
779- archive_pkg, archive_ver = line.split(' ')
780+ output, _, _ = _run_command(["pkg-list", "--apt-dir", tmpdir, pkg])
781+ for line in output.split("\n"):
782+ archive_pkg, archive_ver = line.split(" ")
783 if pkg == archive_pkg:
784 if apt_pkg.version_compare(archive_ver, ver) >= 0:
785 print(f"{line} >= {ver}")
786@@ -309,9 +350,12 @@ def cloudberry_cqa_verified(lp, yes: bool, bugID: int) -> None:
787 failed = True
788 # Tag "cqa-verified" if no failure.
789 if not failed:
790- if not args.dry_run and _yes_or_ask(yes, f'Would you like to tag "cqa-verified" for LP: #{bug.id} "{bug.title}"?'):
791+ if not args.dry_run and _yes_or_ask(
792+ yes,
793+ f'Would you like to tag "cqa-verified" for LP: #{bug.id} "{bug.title}"?',
794+ ):
795 tags = bug.tags.copy()
796- tags.append('cqa-verified')
797+ tags.append("cqa-verified")
798 if f"oem-scripts-{oem_scripts.__version__}" not in tags:
799 tags.append(f"oem-scripts-{oem_scripts.__version__}")
800 bug.tags = tags
801@@ -322,11 +366,11 @@ if args.subcommand:
802 login = LaunchpadLogin()
803 lp = login.lp
804
805-if args.subcommand == 'copy':
806+if args.subcommand == "copy":
807 copy_bug(lp, args.bugID, output=args.output, target=args.target, public=args.public)
808-elif args.subcommand == 'cleanup':
809+elif args.subcommand == "cleanup":
810 cleanup_bug(lp, args.bugID, args.yes)
811-elif args.subcommand == 'cqa-verify':
812+elif args.subcommand == "cqa-verify":
813 cloudberry_cqa_verified(lp, args.yes, args.bugID)
814 else:
815 parser.print_help()
816diff --git a/mir-bug b/mir-bug
817index dee3a25..9683a3b 100755
818--- a/mir-bug
819+++ b/mir-bug
820@@ -36,75 +36,108 @@ from oem_scripts.logging import setup_logging
821 from string import Template
822 from tempfile import TemporaryDirectory
823
824-SUBSCRIBER_LIST = ('oem-solutions-engineers', 'ubuntu-sponsors', 'ubuntu-desktop')
825-TAG_LIST = ('oem-meta-packages', 'oem-priority', f'oem-scripts-{oem_scripts.__version__:.2f}')
826-
827-parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,
828- epilog="""
829+SUBSCRIBER_LIST = ("oem-solutions-engineers", "ubuntu-sponsors", "ubuntu-desktop")
830+TAG_LIST = (
831+ "oem-meta-packages",
832+ "oem-priority",
833+ f"oem-scripts-{oem_scripts.__version__:.2f}",
834+)
835+
836+parser = argparse.ArgumentParser(
837+ formatter_class=argparse.RawDescriptionHelpFormatter,
838+ epilog="""
839 examples:
840 mir-bug create sutton.newell ace \"ThinkPad X1 Carbon Gen 8\"
841 mir-bug check BUG_NUMBER
842 mir-bug update BUG_NUMBER
843- mir-bug collect oem-meta-mir-bugs.json""")
844+ mir-bug collect oem-meta-mir-bugs.json""",
845+)
846
847-parser.add_argument("-d", "--debug",
848- help="print debug messages", action="store_true")
849-parser.add_argument("-q", "--quiet",
850- help="Don't print info messages", action="store_true")
851+parser.add_argument("-d", "--debug", help="print debug messages", action="store_true")
852+parser.add_argument(
853+ "-q", "--quiet", help="Don't print info messages", action="store_true"
854+)
855
856 subparsers = parser.add_subparsers(dest="subcommand")
857
858-create = subparsers.add_parser('create', help='[-h] [-o=bugID|--output=bugID] oemCodename platformCodename deviceName')
859-create.add_argument("oemCodename",
860- help="Such as somerville, stella, or sutton.simon")
861-create.add_argument("platformCodename",
862- help="Name deined by PM, like ace.")
863-create.add_argument("deviceName",
864- help="ThinkPad X1 Carbon Gen 8")
865-create.add_argument("-o", "--output",
866- help="Specify a file name to write the bug number.",
867- type=argparse.FileType('w', encoding='UTF-8'))
868-
869-update = subparsers.add_parser('update', help='[-h] [--ready] [--skip] [--tz=UTC-8] [--yes] bugNumber')
870-update.add_argument("bugNumber",
871- help="Specify the bug number on Launchpad to update.", type=int)
872-update.add_argument("--yes",
873- help="Say yes for all prompts.", action="store_true")
874-update.add_argument("--skip",
875- help="Skip updating bootstrap branch of Git repository.", action="store_true")
876-update.add_argument("--tz",
877- help="Specify the value for TZ. (UTC-8 by default)", default="UTC-8")
878-update.add_argument("--ready",
879- action="store_true",
880- help="Update the bug to Fix Committed, also affects 'Ubuntu', and subscribe 'ubuntu-sponsors' and 'ubuntu-desktop'.")
881-
882-check = subparsers.add_parser('check', help='[-h] [--ready] [--skip] [--tz=UTC-8] bugNumber')
883-check.add_argument("bugNumber",
884- help="Specify the bug number on Launchpad to do some sanity checks.", type=int)
885-check.add_argument("--skip",
886- help="Skip checking oem branch of Git repository.", action="store_true")
887-check.add_argument("--tz",
888- help="Specify the value for TZ. (UTC-8 by default)", default="UTC-8")
889-check.add_argument("--ready",
890- action="store_true",
891- help="Check if the bug is Fix Committed, also affects 'Ubuntu', and subscribe 'ubuntu-sponsors' and 'ubuntu-desktop'.")
892-
893-collect = subparsers.add_parser('collect', help='[-h] [--ubuntu-certified] jsonFile')
894-collect.add_argument("json",
895- help="Specify the json file name to write.",
896- type=argparse.FileType('w', encoding='UTF-8'))
897-collect.add_argument("--ubuntu-certified",
898- action="store_true",
899- help="Only collect those bugs with the 'ubuntu-certified' tag.")
900-collect.add_argument("--verification-needed",
901- action="store_true",
902- help="Only collect those bugs with the 'verification-needed' tag.")
903+create = subparsers.add_parser(
904+ "create",
905+ help="[-h] [-o=bugID|--output=bugID] oemCodename platformCodename deviceName",
906+)
907+create.add_argument("oemCodename", help="Such as somerville, stella, or sutton.simon")
908+create.add_argument("platformCodename", help="Name deined by PM, like ace.")
909+create.add_argument("deviceName", help="ThinkPad X1 Carbon Gen 8")
910+create.add_argument(
911+ "-o",
912+ "--output",
913+ help="Specify a file name to write the bug number.",
914+ type=argparse.FileType("w", encoding="UTF-8"),
915+)
916+
917+update = subparsers.add_parser(
918+ "update", help="[-h] [--ready] [--skip] [--tz=UTC-8] [--yes] bugNumber"
919+)
920+update.add_argument(
921+ "bugNumber", help="Specify the bug number on Launchpad to update.", type=int
922+)
923+update.add_argument("--yes", help="Say yes for all prompts.", action="store_true")
924+update.add_argument(
925+ "--skip",
926+ help="Skip updating bootstrap branch of Git repository.",
927+ action="store_true",
928+)
929+update.add_argument(
930+ "--tz", help="Specify the value for TZ. (UTC-8 by default)", default="UTC-8"
931+)
932+update.add_argument(
933+ "--ready",
934+ action="store_true",
935+ help="Update the bug to Fix Committed, also affects 'Ubuntu', and subscribe 'ubuntu-sponsors' and 'ubuntu-desktop'.",
936+)
937+
938+check = subparsers.add_parser(
939+ "check", help="[-h] [--ready] [--skip] [--tz=UTC-8] bugNumber"
940+)
941+check.add_argument(
942+ "bugNumber",
943+ help="Specify the bug number on Launchpad to do some sanity checks.",
944+ type=int,
945+)
946+check.add_argument(
947+ "--skip", help="Skip checking oem branch of Git repository.", action="store_true"
948+)
949+check.add_argument(
950+ "--tz", help="Specify the value for TZ. (UTC-8 by default)", default="UTC-8"
951+)
952+check.add_argument(
953+ "--ready",
954+ action="store_true",
955+ help="Check if the bug is Fix Committed, also affects 'Ubuntu', and subscribe 'ubuntu-sponsors' and 'ubuntu-desktop'.",
956+)
957+
958+collect = subparsers.add_parser("collect", help="[-h] [--ubuntu-certified] jsonFile")
959+collect.add_argument(
960+ "json",
961+ help="Specify the json file name to write.",
962+ type=argparse.FileType("w", encoding="UTF-8"),
963+)
964+collect.add_argument(
965+ "--ubuntu-certified",
966+ action="store_true",
967+ help="Only collect those bugs with the 'ubuntu-certified' tag.",
968+)
969+collect.add_argument(
970+ "--verification-needed",
971+ action="store_true",
972+ help="Only collect those bugs with the 'verification-needed' tag.",
973+)
974
975 args = parser.parse_args()
976
977 setup_logging(debug=args.debug, quiet=args.quiet)
978
979-mir_bug_description_template = Template(f"""[Availability]
980+mir_bug_description_template = Template(
981+ f"""[Availability]
982 This is a meta package for https://wiki.ubuntu.com/MIRTeam/Exceptions/OEM that means the package doesn't exist in Debian or Ubuntu archive yet.
983 The source code of the $metaPkgName for focal:
984 git clone -b $branchName https://git.launchpad.net/~oem-solutions-engineers/pc-enablement/+git/oem-$oemCodenameNogroup-projects-meta
985@@ -131,26 +164,27 @@ Canonical OEM Enablement Team will take care of the maintenance.
986 [Background information]
987 Please check https://wiki.ubuntu.com/MIRTeam/Exceptions/OEM for details.
988
989-Please use "oem-metapackage-mir-check" in lp:ubuntu-archive-tools to verify this MIR against the reference package in the archive.""")
990+Please use "oem-metapackage-mir-check" in lp:ubuntu-archive-tools to verify this MIR against the reference package in the archive."""
991+)
992
993-pattern = re.compile(r'.*\[MIR\]\W*oem-([^-]*)-(.*)-meta\W*')
994+pattern = re.compile(r".*\[MIR\]\W*oem-([^-]*)-(.*)-meta\W*")
995
996
997 def create_bug(lp, oemCodename, platformCodename, deviceName):
998 info("Creating bug...")
999- tempList = oemCodename.split('.')
1000+ tempList = oemCodename.split(".")
1001 oemCodenameNogroup = tempList[0]
1002 if len(tempList) == 2:
1003 oemGroupName = tempList[1]
1004 else:
1005- oemGroupName = ''
1006+ oemGroupName = ""
1007
1008 # metaPkgName's examples
1009 # oem-somerville-metapod-meta
1010 # oem-sutton.newell-ace-meta
1011 metaPkgName = "oem-" + oemCodename + "-" + platformCodename + "-meta"
1012
1013- if oemGroupName.strip() != '':
1014+ if oemGroupName.strip() != "":
1015 branchName = oemGroupName + "." + platformCodename + "-focal-ubuntu"
1016 else:
1017 branchName = platformCodename + "-focal-ubuntu"
1018@@ -162,8 +196,15 @@ def create_bug(lp, oemCodename, platformCodename, deviceName):
1019 metaPkgName=metaPkgName,
1020 branchName=branchName,
1021 oemCodenameNogroup=oemCodenameNogroup,
1022- deviceName=deviceName)
1023- bug = lp.bugs.createBug(description=bd, target=project, title=bt, information_type='Public', tags=TAG_LIST)
1024+ deviceName=deviceName,
1025+ )
1026+ bug = lp.bugs.createBug(
1027+ description=bd,
1028+ target=project,
1029+ title=bt,
1030+ information_type="Public",
1031+ tags=TAG_LIST,
1032+ )
1033
1034 info("meta package public bug: " + bug.web_link)
1035
1036@@ -171,23 +212,32 @@ def create_bug(lp, oemCodename, platformCodename, deviceName):
1037 args.output.write(f"{bug.id}\n")
1038
1039 for task in bug.bug_tasks:
1040- task.status = 'Confirmed'
1041- task.importance = 'Critical'
1042+ task.status = "Confirmed"
1043+ task.importance = "Critical"
1044 # Assign to reporter by default
1045 task.assignee = lp.me
1046 task.lp_save()
1047
1048 # Subscribe the oem-solutions-engineers
1049- bug.subscribe(person=lp.people['oem-solutions-engineers'])
1050+ bug.subscribe(person=lp.people["oem-solutions-engineers"])
1051 bug.lp_save()
1052
1053
1054 def collect_bugs(lp, output):
1055 info("Collecting bugs...")
1056- project = lp.projects['oem-priority']
1057+ project = lp.projects["oem-priority"]
1058 tasks = project.searchTasks(
1059- status=['New', 'Incomplete', 'Triaged', 'Opinion', 'Confirmed', 'In Progress', 'Fix Committed'],
1060- search_text='[MIR]')
1061+ status=[
1062+ "New",
1063+ "Incomplete",
1064+ "Triaged",
1065+ "Opinion",
1066+ "Confirmed",
1067+ "In Progress",
1068+ "Fix Committed",
1069+ ],
1070+ search_text="[MIR]",
1071+ )
1072 try:
1073 total = int(tasks.total_size)
1074 except TypeError: # When the total size becomes more than 50, it won't return 'int' but 'ScalarValue' instead.
1075@@ -197,28 +247,38 @@ def collect_bugs(lp, output):
1076 for counter, task in enumerate(tasks, 1):
1077 bug = task.bug
1078
1079- if '[MIR]' not in bug.title or 'oem' not in bug.title or 'meta' not in bug.title:
1080- info(f"{counter}/{total} LP: #{bug.id} {bug.title} ({task.status}) **NOT MATCHED**")
1081+ if (
1082+ "[MIR]" not in bug.title
1083+ or "oem" not in bug.title
1084+ or "meta" not in bug.title
1085+ ):
1086+ info(
1087+ f"{counter}/{total} LP: #{bug.id} {bug.title} ({task.status}) **NOT MATCHED**"
1088+ )
1089 continue
1090
1091- if args.ubuntu_certified and 'ubuntu-certified' not in bug.tags:
1092- info(f"{counter}/{total} LP: #{bug.id} {bug.title} ({task.status}) **NOT CERTIFIED**")
1093+ if args.ubuntu_certified and "ubuntu-certified" not in bug.tags:
1094+ info(
1095+ f"{counter}/{total} LP: #{bug.id} {bug.title} ({task.status}) **NOT CERTIFIED**"
1096+ )
1097 continue
1098
1099 if args.verification_needed:
1100 verification_needed = False
1101 for tag in bug.tags:
1102- if tag.startswith('verification-needed'):
1103+ if tag.startswith("verification-needed"):
1104 verification_needed = True
1105 if not verification_needed:
1106- info(f"{counter}/{total} LP: #{bug.id} {bug.title} ({task.status}) **NOT VERIFICATION NEEDED**")
1107+ info(
1108+ f"{counter}/{total} LP: #{bug.id} {bug.title} ({task.status}) **NOT VERIFICATION NEEDED**"
1109+ )
1110 continue
1111
1112 result = pattern.match(bug.title)
1113 git = None
1114 if result:
1115- if '.' in result.group(1):
1116- project, group = result.group(1).split('.')
1117+ if "." in result.group(1):
1118+ project, group = result.group(1).split(".")
1119 else:
1120 project = result.group(1)
1121 group = None
1122@@ -234,32 +294,38 @@ def collect_bugs(lp, output):
1123
1124 ubuntu_status = None
1125 for bug_task in bug.bug_tasks:
1126- if bug_task.bug_target_name == 'ubuntu':
1127+ if bug_task.bug_target_name == "ubuntu":
1128 ubuntu_status = bug_task.status
1129
1130 attachments = []
1131 for attachment in bug.attachments:
1132- attachments.append({'title': attachment.title, 'data_link': attachment.data_link, 'type': attachment.type})
1133+ attachments.append(
1134+ {
1135+ "title": attachment.title,
1136+ "data_link": attachment.data_link,
1137+ "type": attachment.type,
1138+ }
1139+ )
1140 clip = {
1141- 'bug': "https://bugs.launchpad.net/bugs/%s" % bug.id,
1142- 'link': bug.self_link,
1143- 'title': bug.title,
1144- 'importance': task.importance,
1145- 'tag': bug.tags,
1146- 'description': bug.description,
1147- 'status': task.status,
1148- 'ubuntu_status': ubuntu_status,
1149- 'owner': task.owner.name,
1150- 'assignee': task.assignee.name if task.assignee else 'none',
1151- 'subscriptions': subscriptions,
1152- 'attachments': attachments,
1153- 'git': git
1154+ "bug": "https://bugs.launchpad.net/bugs/%s" % bug.id,
1155+ "link": bug.self_link,
1156+ "title": bug.title,
1157+ "importance": task.importance,
1158+ "tag": bug.tags,
1159+ "description": bug.description,
1160+ "status": task.status,
1161+ "ubuntu_status": ubuntu_status,
1162+ "owner": task.owner.name,
1163+ "assignee": task.assignee.name if task.assignee else "none",
1164+ "subscriptions": subscriptions,
1165+ "attachments": attachments,
1166+ "git": git,
1167 }
1168 bugs.append(clip)
1169 info(f"{counter}/{total} LP: #{bug.id} {bug.title} ({task.status})")
1170
1171 info("total: %d matched" % len(bugs))
1172- output.write(json.dumps(bugs, sort_keys=True, separators=(',', ':')))
1173+ output.write(json.dumps(bugs, sort_keys=True, separators=(",", ":")))
1174 output.write("\n")
1175
1176
1177@@ -271,7 +337,7 @@ def yes_or_ask(yes: bool, message: str) -> bool:
1178 res = input(f"> \033[1;34m{message}\033[1;0m (y/n) ").lower()
1179 if res not in {"y", "n"}:
1180 continue
1181- if res == 'y':
1182+ if res == "y":
1183 return True
1184 else:
1185 return False
1186@@ -288,7 +354,9 @@ def update_bug(lp, bug_number: int, yes: bool) -> None:
1187 bug_modified = True
1188 bug.description = desc
1189
1190- if check_bug_title(bug, pkg_name) is False and yes_or_ask(yes, "Do you want to update the bug title?"):
1191+ if check_bug_title(bug, pkg_name) is False and yes_or_ask(
1192+ yes, "Do you want to update the bug title?"
1193+ ):
1194 bug_modified = True
1195 bug.title = f"[MIR] {pkg_name}"
1196
1197@@ -298,11 +366,13 @@ def update_bug(lp, bug_number: int, yes: bool) -> None:
1198
1199 if check_bug_importance(bug) is False:
1200 for task in bug.bug_tasks:
1201- if task.importance != 'Critical':
1202- if task.bug_target_name == 'oem-priority' and \
1203- yes_or_ask(yes, f"Do you want to update the importance of {task.bug_target_name} from {task.importance} to Critical?"):
1204+ if task.importance != "Critical":
1205+ if task.bug_target_name == "oem-priority" and yes_or_ask(
1206+ yes,
1207+ f"Do you want to update the importance of {task.bug_target_name} from {task.importance} to Critical?",
1208+ ):
1209 task_modified = True
1210- task.importance = 'Critical'
1211+ task.importance = "Critical"
1212 task.lp_save()
1213
1214 update_bug_status(bug, yes)
1215@@ -312,12 +382,14 @@ def update_bug(lp, bug_number: int, yes: bool) -> None:
1216 if check_bug_tags(bug) is False:
1217 tags = copy(bug.tags)
1218 for tag in TAG_LIST:
1219- if tag not in bug.tags and yes_or_ask(yes, f"Do you want to add '{tag}' tag?"):
1220+ if tag not in bug.tags and yes_or_ask(
1221+ yes, f"Do you want to add '{tag}' tag?"
1222+ ):
1223 bug_modified = True
1224 tags.append(tag)
1225 for tag in bug.tags:
1226- if tag.startswith('oem-scripts-'):
1227- if tag[len("oem-scripts-"):] != f'{oem_scripts.__version__:.2f}':
1228+ if tag.startswith("oem-scripts-"):
1229+ if tag[len("oem-scripts-") :] != f"{oem_scripts.__version__:.2f}":
1230 if yes_or_ask(yes, f"Do you want to remove '{tag}' tag?"):
1231 tags.remove(tag)
1232 if tags != bug.tags:
1233@@ -329,11 +401,11 @@ def update_bug(lp, bug_number: int, yes: bool) -> None:
1234
1235 if bug_modified:
1236 bug.lp_save()
1237- info(f'LP: #{bug_number} is updated.')
1238+ info(f"LP: #{bug_number} is updated.")
1239 elif task_modified:
1240- info(f'LP: #{bug_number} is updated.')
1241+ info(f"LP: #{bug_number} is updated.")
1242 elif yes:
1243- info('Everything looks OK.')
1244+ info("Everything looks OK.")
1245
1246
1247 def check_bug(lp, bug_number: int) -> None:
1248@@ -357,11 +429,14 @@ def check_bug(lp, bug_number: int) -> None:
1249 need_fixing = True
1250 if check_and_update_bug_attachments(bug, pkg_name) is False:
1251 need_fixing = True
1252- if not args.skip and check_and_update_git_repo(bug, pkg_name, bootstrap=False) is False:
1253+ if (
1254+ not args.skip
1255+ and check_and_update_git_repo(bug, pkg_name, bootstrap=False) is False
1256+ ):
1257 need_fixing = True
1258
1259 if need_fixing is False:
1260- info('Everything looks OK.')
1261+ info("Everything looks OK.")
1262 else:
1263 exit(1)
1264
1265@@ -373,8 +448,8 @@ def check_bug_description(bug) -> (str, str):
1266 critical(f"LP: #{bug.id} '{bug.title}' is NOT MATCHED")
1267 exit(1)
1268
1269- if '.' in result.group(1):
1270- project, group = result.group(1).split('.')
1271+ if "." in result.group(1):
1272+ project, group = result.group(1).split(".")
1273 platform = result.group(2)
1274 branchName = group + "." + platform + "-focal-ubuntu"
1275 else:
1276@@ -384,10 +459,10 @@ def check_bug_description(bug) -> (str, str):
1277 branchName = platform + "-focal-ubuntu"
1278
1279 metaPkgName = f"oem-{result.group(1)}-{result.group(2)}-meta"
1280- prog = re.compile(r'\W*We want to improve the hardware support for ([^.]*).\W*')
1281+ prog = re.compile(r"\W*We want to improve the hardware support for ([^.]*).\W*")
1282 deviceName = None
1283
1284- for line in bug.description.split('\n'):
1285+ for line in bug.description.split("\n"):
1286 result = prog.match(line)
1287 if not result:
1288 continue
1289@@ -403,11 +478,12 @@ def check_bug_description(bug) -> (str, str):
1290 metaPkgName=metaPkgName,
1291 branchName=branchName,
1292 oemCodenameNogroup=project,
1293- deviceName=deviceName)
1294+ deviceName=deviceName,
1295+ )
1296
1297 if bug.description != desc:
1298 d = difflib.Differ()
1299- diff = d.compare(bug.description.split('\n'), desc.split('\n'))
1300+ diff = d.compare(bug.description.split("\n"), desc.split("\n"))
1301 error("The description needs to update.")
1302 if not args.quiet:
1303 for i, line in enumerate(diff):
1304@@ -431,15 +507,19 @@ def check_bug_importance(bug) -> bool:
1305 info("Checking bug importance...")
1306 result = True
1307 for task in bug.bug_tasks:
1308- if task.bug_target_name == 'oem-priority' and task.importance != 'Critical':
1309- error(f"The '{task.bug_target_name}' importance is expected to be 'Critical' instead of '{task.importance}'.")
1310+ if task.bug_target_name == "oem-priority" and task.importance != "Critical":
1311+ error(
1312+ f"The '{task.bug_target_name}' importance is expected to be 'Critical' instead of '{task.importance}'."
1313+ )
1314 result = False
1315 return result
1316
1317
1318 def _expected_status(target_name: str, status: str, expected: str) -> bool:
1319 if status != expected:
1320- error(f"The '{target_name}' status is expected to be '{expected}' instead of '{status}'.")
1321+ error(
1322+ f"The '{target_name}' status is expected to be '{expected}' instead of '{status}'."
1323+ )
1324 return False
1325 return True
1326
1327@@ -449,20 +529,32 @@ def check_bug_status(bug, pkg_name: str) -> bool:
1328 result = True
1329 saw_ubuntu_task = False
1330 for task in bug.bug_tasks:
1331- if task.bug_target_name == 'oem-priority':
1332+ if task.bug_target_name == "oem-priority":
1333 if args.ready:
1334- if _expected_status(task.bug_target_name, task.status, 'Fix Committed') is False:
1335+ if (
1336+ _expected_status(task.bug_target_name, task.status, "Fix Committed")
1337+ is False
1338+ ):
1339 result = False
1340 else:
1341- if _expected_status(task.bug_target_name, task.status, 'In Progress') is False:
1342+ if (
1343+ _expected_status(task.bug_target_name, task.status, "In Progress")
1344+ is False
1345+ ):
1346 result = False
1347- elif task.bug_target_name == 'ubuntu':
1348+ elif task.bug_target_name == "ubuntu":
1349 saw_ubuntu_task = True
1350 if args.ready:
1351- if _expected_status(task.bug_target_name, task.status, 'Confirmed') is False:
1352+ if (
1353+ _expected_status(task.bug_target_name, task.status, "Confirmed")
1354+ is False
1355+ ):
1356 result = False
1357 else:
1358- if _expected_status(task.bug_target_name, task.status, 'Incomplete') is False:
1359+ if (
1360+ _expected_status(task.bug_target_name, task.status, "Incomplete")
1361+ is False
1362+ ):
1363 result = False
1364 elif f"{pkg_name} (Ubuntu)" not in task.bug_target_name:
1365 critical(f"It is unexpected to have '{task.bug_target_name}' task")
1366@@ -472,17 +564,25 @@ def check_bug_status(bug, pkg_name: str) -> bool:
1367 return result
1368
1369
1370-def _ok_to_change_status(target_name: str, orig_status: str, new_status: str, yes: bool) -> bool:
1371+def _ok_to_change_status(
1372+ target_name: str, orig_status: str, new_status: str, yes: bool
1373+) -> bool:
1374 if orig_status == new_status:
1375 return False
1376- if yes_or_ask(yes, f"Would you like to change the '{target_name}' status from '{orig_status}' to '{new_status}'?"):
1377+ if yes_or_ask(
1378+ yes,
1379+ f"Would you like to change the '{target_name}' status from '{orig_status}' to '{new_status}'?",
1380+ ):
1381 return True
1382 return False
1383
1384
1385 def _change_task_status(task, new_status: str, yes: bool) -> bool:
1386- if _expected_status(task.bug_target_name, task.status, new_status) is False and \
1387- _ok_to_change_status(task.bug_target_name, task.status, new_status, yes):
1388+ if _expected_status(
1389+ task.bug_target_name, task.status, new_status
1390+ ) is False and _ok_to_change_status(
1391+ task.bug_target_name, task.status, new_status, yes
1392+ ):
1393 task.status = new_status
1394 task.lp_save()
1395
1396@@ -491,35 +591,42 @@ def update_bug_status(bug, yes: bool) -> None:
1397 info("Updating bug status...")
1398 saw_ubuntu_task = False
1399 for bug_task in bug.bug_tasks:
1400- if bug_task.bug_target_name == 'oem-priority':
1401+ if bug_task.bug_target_name == "oem-priority":
1402 if args.ready:
1403- _change_task_status(bug_task, 'Fix Committed', yes)
1404+ _change_task_status(bug_task, "Fix Committed", yes)
1405 else:
1406- _change_task_status(bug_task, 'In Progress', yes)
1407- elif bug_task.bug_target_name == 'ubuntu':
1408+ _change_task_status(bug_task, "In Progress", yes)
1409+ elif bug_task.bug_target_name == "ubuntu":
1410 saw_ubuntu_task = True
1411 if args.ready:
1412- _change_task_status(bug_task, 'Confirmed', yes)
1413- elif yes_or_ask(yes, f"Would you like to delete the '{bug_task.bug_target_name}' bug_task? (Don't affect '{bug_task.bug_target_display_name}')"):
1414+ _change_task_status(bug_task, "Confirmed", yes)
1415+ elif yes_or_ask(
1416+ yes,
1417+ f"Would you like to delete the '{bug_task.bug_target_name}' bug_task? (Don't affect '{bug_task.bug_target_display_name}')",
1418+ ):
1419 try:
1420 bug_task.lp_delete()
1421 except lazr.restfulclient.errors.BadRequest as e:
1422- warning(f"{bug_task.bug_target_name} can not be deleted, so changing the status to Incomplete instead.")
1423+ warning(
1424+ f"{bug_task.bug_target_name} can not be deleted, so changing the status to Incomplete instead."
1425+ )
1426 debug(e)
1427- _change_task_status(bug_task, 'Incomplete', yes)
1428+ _change_task_status(bug_task, "Incomplete", yes)
1429 except lazr.restfulclient.errors.Unauthorized as e:
1430- warning(f"{bug_task.bug_target_name} can not be deleted, so changing the status to Incomplete instead.")
1431+ warning(
1432+ f"{bug_task.bug_target_name} can not be deleted, so changing the status to Incomplete instead."
1433+ )
1434 debug(e)
1435- _change_task_status(bug_task, 'Incomplete', yes)
1436+ _change_task_status(bug_task, "Incomplete", yes)
1437 else:
1438- _change_task_status(bug_task, 'Incomplete', yes)
1439+ _change_task_status(bug_task, "Incomplete", yes)
1440 else:
1441 warning(f"{bug_task.bug_target_name} {bug_task.status}")
1442 if args.ready and saw_ubuntu_task is False:
1443 bug.addTask(target=lp.projects["Ubuntu"])
1444 for bug_task in bug.bug_tasks:
1445- if bug_task.bug_target_name == 'ubuntu':
1446- _change_task_status(bug_task, 'Confirmed', yes)
1447+ if bug_task.bug_target_name == "ubuntu":
1448+ _change_task_status(bug_task, "Confirmed", yes)
1449
1450
1451 def check_and_update_bug_subscriptions(lp, bug, update=False, yes=False) -> bool:
1452@@ -532,29 +639,41 @@ def check_and_update_bug_subscriptions(lp, bug, update=False, yes=False) -> bool
1453 for subscription in bug.subscriptions:
1454 subscriptions.append(subscription.person.name)
1455 if not args.ready:
1456- for subscriber in ('ubuntu-sponsors', 'ubuntu-desktop'):
1457+ for subscriber in ("ubuntu-sponsors", "ubuntu-desktop"):
1458 if subscriber == subscription.person.name:
1459 error(f"'{subscriber}' should not be in the subscriptions.")
1460- if update and yes_or_ask(yes, f"Do you want to unsubscribe '{subscriber}'?"):
1461+ if update and yes_or_ask(
1462+ yes, f"Do you want to unsubscribe '{subscriber}'?"
1463+ ):
1464 if subscription.canBeUnsubscribedByUser():
1465 bug.unsubscribe(person=lp.people[subscriber])
1466 else:
1467- warning(f"{lp.me.name} doesn't have the permission to unsubscribe {subscriber}.")
1468+ warning(
1469+ f"{lp.me.name} doesn't have the permission to unsubscribe {subscriber}."
1470+ )
1471 result = False
1472 else:
1473 result = False
1474 if args.ready:
1475- for subscriber in ('oem-solutions-engineers', 'ubuntu-sponsors', 'ubuntu-desktop'):
1476+ for subscriber in (
1477+ "oem-solutions-engineers",
1478+ "ubuntu-sponsors",
1479+ "ubuntu-desktop",
1480+ ):
1481 if subscriber not in subscriptions:
1482 error(f"'{subscriber}' is not in the subscriptions.")
1483- if update and yes_or_ask(yes, f"Do you want to subscribe '{subscriber}'?"):
1484+ if update and yes_or_ask(
1485+ yes, f"Do you want to subscribe '{subscriber}'?"
1486+ ):
1487 bug.subscribe(person=lp.people[subscriber])
1488 else:
1489 result = False
1490 else:
1491- if 'oem-solutions-engineers' not in subscriptions:
1492+ if "oem-solutions-engineers" not in subscriptions:
1493 error(f"'oem-solutions-engineers' is not in the subscriptions.")
1494- if update and yes_or_ask(yes, f"Do you want to subscribe 'oem-solutions-engineers'?"):
1495+ if update and yes_or_ask(
1496+ yes, f"Do you want to subscribe 'oem-solutions-engineers'?"
1497+ ):
1498 bug.subscribe(person=lp.people[subscriber])
1499 else:
1500 result = False
1501@@ -573,13 +692,15 @@ def check_bug_tags(bug) -> bool:
1502
1503 def _run_command(command: list or tuple, returncode=(0,), env=None) -> (str, str):
1504 debug("$ " + " ".join(command))
1505- proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env)
1506+ proc = subprocess.Popen(
1507+ command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env
1508+ )
1509 out, err = proc.communicate()
1510
1511 if out:
1512- out = out.decode('utf-8').strip()
1513+ out = out.decode("utf-8").strip()
1514 if err:
1515- err = err.decode('utf-8').strip()
1516+ err = err.decode("utf-8").strip()
1517
1518 if proc.returncode not in returncode:
1519 critical(f"return {proc.returncode}")
1520@@ -597,7 +718,9 @@ def _run_command(command: list or tuple, returncode=(0,), env=None) -> (str, str
1521 return (out, err, proc.returncode)
1522
1523
1524-def check_and_update_bug_attachments(bug, pkg_name: str, update=False, yes=False) -> bool:
1525+def check_and_update_bug_attachments(
1526+ bug, pkg_name: str, update=False, yes=False
1527+) -> bool:
1528 if update:
1529 info("Checking and updating attachments...")
1530 else:
1531@@ -608,8 +731,8 @@ def check_and_update_bug_attachments(bug, pkg_name: str, update=False, yes=False
1532 critical(f"{pkg_name} failed.")
1533 exit(1)
1534
1535- if '.' in result.group(1):
1536- project, group = result.group(1).split('.')
1537+ if "." in result.group(1):
1538+ project, group = result.group(1).split(".")
1539 else:
1540 project = result.group(1)
1541 group = None
1542@@ -620,7 +743,16 @@ def check_and_update_bug_attachments(bug, pkg_name: str, update=False, yes=False
1543 else:
1544 branch = f"{platform}-focal-ubuntu"
1545
1546- git_command = ("git", "clone", "--depth", "1", "-b", branch, f"https://git.launchpad.net/~oem-solutions-engineers/pc-enablement/+git/oem-{project}-projects-meta", pkg_name)
1547+ git_command = (
1548+ "git",
1549+ "clone",
1550+ "--depth",
1551+ "1",
1552+ "-b",
1553+ branch,
1554+ f"https://git.launchpad.net/~oem-solutions-engineers/pc-enablement/+git/oem-{project}-projects-meta",
1555+ pkg_name,
1556+ )
1557
1558 debdiff = None
1559 content = None
1560@@ -628,33 +760,49 @@ def check_and_update_bug_attachments(bug, pkg_name: str, update=False, yes=False
1561
1562 with TemporaryDirectory() as tmpdir:
1563 os.chdir(tmpdir)
1564- _run_command(['wget', 'https://bazaar.launchpad.net/~ubuntu-archive/ubuntu-archive-tools/trunk/download/head:/oem-metapackage-mir-check'])
1565+ _run_command(
1566+ [
1567+ "wget",
1568+ "https://bazaar.launchpad.net/~ubuntu-archive/ubuntu-archive-tools/trunk/download/head:/oem-metapackage-mir-check",
1569+ ]
1570+ )
1571 _run_command(git_command)
1572 git_dir = os.path.join(tmpdir, pkg_name)
1573 os.chdir(git_dir)
1574- _run_command(['dpkg-buildpackage', '-S', '-us', '-uc'])
1575+ _run_command(["dpkg-buildpackage", "-S", "-us", "-uc"])
1576 os.chdir(tmpdir)
1577- dsc = glob(f'{pkg_name}*.dsc')[0]
1578- prog = re.compile(fr'{pkg_name}_(.*).dsc')
1579+ dsc = glob(f"{pkg_name}*.dsc")[0]
1580+ prog = re.compile(fr"{pkg_name}_(.*).dsc")
1581 result = prog.match(dsc)
1582 version = result.group(1)
1583 debdiff = f"{pkg_name}_{version}.debdiff"
1584 # It should generate some debdiff so the return code should be 1 unless comparing to oem-qemu-meta itself.
1585 debug(f"TZ={args.tz}")
1586- content, _, _ = _run_command(['bash', 'oem-metapackage-mir-check', dsc], returncode=(1,), env=dict(os.environ, TZ=args.tz))
1587+ content, _, _ = _run_command(
1588+ ["bash", "oem-metapackage-mir-check", dsc],
1589+ returncode=(1,),
1590+ env=dict(os.environ, TZ=args.tz),
1591+ )
1592 content += "\n"
1593- with open(debdiff, 'w') as f:
1594+ with open(debdiff, "w") as f:
1595 f.write(content)
1596
1597 for attachment in bug.attachments:
1598- if 'debdiff' in attachment.title:
1599- _run_command(['wget', attachment.data_link, '-O', 'data'])
1600- out, err, returncode = _run_command(['colordiff', '-ur', 'data', debdiff], returncode=(0, 1))
1601+ if "debdiff" in attachment.title:
1602+ _run_command(["wget", attachment.data_link, "-O", "data"])
1603+ out, err, returncode = _run_command(
1604+ ["colordiff", "-ur", "data", debdiff], returncode=(0, 1)
1605+ )
1606 if returncode == 1:
1607- warning(f"{attachment.title} - {attachment.web_link} has unexpected content.")
1608+ warning(
1609+ f"{attachment.title} - {attachment.web_link} has unexpected content."
1610+ )
1611 info(f"{out}")
1612 found = True
1613- if update and yes_or_ask(yes, f"Do you want to remove {attachment.title} - {attachment.web_link}?"):
1614+ if update and yes_or_ask(
1615+ yes,
1616+ f"Do you want to remove {attachment.title} - {attachment.web_link}?",
1617+ ):
1618 try:
1619 attachment.removeFromBug()
1620 except lazr.restfulclient.errors.NotFound as e:
1621@@ -669,32 +817,54 @@ def check_and_update_bug_attachments(bug, pkg_name: str, update=False, yes=False
1622 error(f"There is no {debdiff}.")
1623 info(content)
1624 if update and yes_or_ask(yes, f"Do you want to attach {debdiff}?"):
1625- bug.addAttachment(filename=debdiff, data=content.encode("utf-8"), comment=f"Attach {debdiff} by oem-scripts {oem_scripts.__version__:.2f}.", is_patch=True)
1626+ bug.addAttachment(
1627+ filename=debdiff,
1628+ data=content.encode("utf-8"),
1629+ comment=f"Attach {debdiff} by oem-scripts {oem_scripts.__version__:.2f}.",
1630+ is_patch=True,
1631+ )
1632 return True
1633 else:
1634 return False
1635
1636
1637 def _get_items_from_git(project: str, branch: str, pkg_name: str) -> tuple:
1638- git_command = ("git", "clone", "--depth", "1", "-b", branch, f"https://git.launchpad.net/~oem-solutions-engineers/pc-enablement/+git/oem-{project}-projects-meta", pkg_name)
1639+ git_command = (
1640+ "git",
1641+ "clone",
1642+ "--depth",
1643+ "1",
1644+ "-b",
1645+ branch,
1646+ f"https://git.launchpad.net/~oem-solutions-engineers/pc-enablement/+git/oem-{project}-projects-meta",
1647+ pkg_name,
1648+ )
1649 with TemporaryDirectory() as tmpdir:
1650 os.chdir(tmpdir)
1651 _run_command(git_command)
1652 git_dir = os.path.join(tmpdir, pkg_name)
1653
1654- if project == 'somerville':
1655- prog = re.compile(r"alias pci:\*sv00001028sd0000([0-9A-F]{4})[^ ]* meta (.*)")
1656- elif project == 'stella':
1657- prog = re.compile(r"alias pci:\*sv0000103Csd0000([0-9A-F]{4})[^ ]* meta (.*)")
1658+ if project == "somerville":
1659+ prog = re.compile(
1660+ r"alias pci:\*sv00001028sd0000([0-9A-F]{4})[^ ]* meta (.*)"
1661+ )
1662+ elif project == "stella":
1663+ prog = re.compile(
1664+ r"alias pci:\*sv0000103Csd0000([0-9A-F]{4})[^ ]* meta (.*)"
1665+ )
1666 else:
1667- prog = re.compile(r"alias dmi:\*bvn([0-9a-zA-Z]+):bvr([0-9a-zA-Z]{3})\* meta (.*)")
1668+ prog = re.compile(
1669+ r"alias dmi:\*bvn([0-9a-zA-Z]+):bvr([0-9a-zA-Z]{3})\* meta (.*)"
1670+ )
1671
1672 ids = []
1673- with open(os.path.join(git_dir, 'debian', 'modaliases'), 'r') as modaliases:
1674+ with open(os.path.join(git_dir, "debian", "modaliases"), "r") as modaliases:
1675 for line in modaliases:
1676 result = prog.match(line.strip())
1677 if result.group(result.lastindex) != pkg_name:
1678- error("Something wrong in debian/modaliases. Please fix it manually first.")
1679+ error(
1680+ "Something wrong in debian/modaliases. Please fix it manually first."
1681+ )
1682 return False
1683 if result.lastindex == 3:
1684 ids.append((result.group(1), result.group(2)))
1685@@ -702,21 +872,25 @@ def _get_items_from_git(project: str, branch: str, pkg_name: str) -> tuple:
1686 ids.append(result.group(1))
1687 kernel_flavour = None
1688 kernel_meta = None
1689- with open(os.path.join(git_dir, 'debian', 'control'), 'r') as control:
1690+ with open(os.path.join(git_dir, "debian", "control"), "r") as control:
1691 for line in control:
1692- if line.startswith('XB-Ubuntu-OEM-Kernel-Flavour:'):
1693- kernel_flavour = line[len('XB-Ubuntu-OEM-Kernel-Flavour:'):].strip()
1694- elif line.startswith('Depends:'):
1695- if 'linux-oem-20.04b' in line:
1696- kernel_meta = 'linux-oem-20.04b'
1697- elif 'linux-oem-20.04' in line:
1698- kernel_meta = 'linux-oem-20.04'
1699- elif 'linux-generic-hwe-20.04' in line:
1700- kernel_meta = 'linux-generic-hwe-20.04'
1701+ if line.startswith("XB-Ubuntu-OEM-Kernel-Flavour:"):
1702+ kernel_flavour = line[
1703+ len("XB-Ubuntu-OEM-Kernel-Flavour:") :
1704+ ].strip()
1705+ elif line.startswith("Depends:"):
1706+ if "linux-oem-20.04b" in line:
1707+ kernel_meta = "linux-oem-20.04b"
1708+ elif "linux-oem-20.04" in line:
1709+ kernel_meta = "linux-oem-20.04"
1710+ elif "linux-generic-hwe-20.04" in line:
1711+ kernel_meta = "linux-generic-hwe-20.04"
1712 return kernel_flavour, kernel_meta, ids
1713
1714
1715-def check_and_update_git_repo(bug, pkg_name: str, yes=False, bootstrap=True, update=False, username=None) -> bool:
1716+def check_and_update_git_repo(
1717+ bug, pkg_name: str, yes=False, bootstrap=True, update=False, username=None
1718+) -> bool:
1719 if update:
1720 if bootstrap:
1721 info("Checking and updating git repo for bootstrap branch...")
1722@@ -734,17 +908,17 @@ def check_and_update_git_repo(bug, pkg_name: str, yes=False, bootstrap=True, upd
1723 critical(f"{pkg_name} failed.")
1724 exit(1)
1725
1726- if '.' in result.group(1):
1727- project, group = result.group(1).split('.')
1728+ if "." in result.group(1):
1729+ project, group = result.group(1).split(".")
1730 else:
1731 project = result.group(1)
1732 group = None
1733 platform = result.group(2)
1734
1735 if bootstrap:
1736- suffix = 'ubuntu'
1737+ suffix = "ubuntu"
1738 else:
1739- suffix = 'oem'
1740+ suffix = "oem"
1741
1742 if group:
1743 branch = f"{group}.{platform}-focal-{suffix}"
1744@@ -752,23 +926,27 @@ def check_and_update_git_repo(bug, pkg_name: str, yes=False, bootstrap=True, upd
1745 branch = f"{platform}-focal-{suffix}"
1746
1747 kernel_flavour, kernel_meta, ids = _get_items_from_git(project, branch, pkg_name)
1748- if kernel_flavour == 'default':
1749+ if kernel_flavour == "default":
1750 if kernel_meta is None:
1751- kernel_meta = 'linux-generic-hwe-20.04'
1752- elif kernel_meta == 'linux-generic-hwe-20.04':
1753+ kernel_meta = "linux-generic-hwe-20.04"
1754+ elif kernel_meta == "linux-generic-hwe-20.04":
1755 pass
1756 else:
1757- critical(f"{pkg_name}'s {branch} branch has unexpected kernel_flavour=${kernel_flavour}, kernel_meta={kernel_meta}.")
1758+ critical(
1759+ f"{pkg_name}'s {branch} branch has unexpected kernel_flavour=${kernel_flavour}, kernel_meta={kernel_meta}."
1760+ )
1761 exit(1)
1762- elif kernel_flavour == 'oem':
1763+ elif kernel_flavour == "oem":
1764 if kernel_meta is None:
1765- kernel_meta = 'linux-oem-20.04'
1766- elif kernel_meta == 'linux-oem-20.04b':
1767+ kernel_meta = "linux-oem-20.04"
1768+ elif kernel_meta == "linux-oem-20.04b":
1769 pass
1770- elif kernel_meta == 'linux-oem-20.04':
1771+ elif kernel_meta == "linux-oem-20.04":
1772 pass
1773 else:
1774- critical(f"{pkg_name}'s {branch} branch has unexpected kernel_flavour=${kernel_flavour}, kernel_meta={kernel_meta}.")
1775+ critical(
1776+ f"{pkg_name}'s {branch} branch has unexpected kernel_flavour=${kernel_flavour}, kernel_meta={kernel_meta}."
1777+ )
1778 exit(1)
1779
1780 if ids == []:
1781@@ -778,27 +956,61 @@ def check_and_update_git_repo(bug, pkg_name: str, yes=False, bootstrap=True, upd
1782 with TemporaryDirectory() as tmpdir:
1783 os.chdir(tmpdir)
1784 # Generated the meta package by pkg-oem-meta
1785- if project == 'somerville':
1786- command = ['pkg-somerville-meta', '-s', 'focal', '-k', kernel_meta, '-p', platform, '--public-bug', str(bug.id)]
1787+ if project == "somerville":
1788+ command = [
1789+ "pkg-somerville-meta",
1790+ "-s",
1791+ "focal",
1792+ "-k",
1793+ kernel_meta,
1794+ "-p",
1795+ platform,
1796+ "--public-bug",
1797+ str(bug.id),
1798+ ]
1799 command.extend(ids)
1800- elif project == 'stella':
1801- command = ['pkg-stella-meta', '-s', 'focal', '-k', kernel_meta, '-g', group, '-p', platform, '--public-bug', str(bug.id)]
1802+ elif project == "stella":
1803+ command = [
1804+ "pkg-stella-meta",
1805+ "-s",
1806+ "focal",
1807+ "-k",
1808+ kernel_meta,
1809+ "-g",
1810+ group,
1811+ "-p",
1812+ platform,
1813+ "--public-bug",
1814+ str(bug.id),
1815+ ]
1816 command.extend(ids)
1817- elif project == 'sutton':
1818- command = ['pkg-sutton-meta', '-s', 'focal', '-k', kernel_meta, '-g', group, '-p', platform, '--public-bug', str(bug.id)]
1819+ elif project == "sutton":
1820+ command = [
1821+ "pkg-sutton-meta",
1822+ "-s",
1823+ "focal",
1824+ "-k",
1825+ kernel_meta,
1826+ "-g",
1827+ group,
1828+ "-p",
1829+ platform,
1830+ "--public-bug",
1831+ str(bug.id),
1832+ ]
1833 for bvn, bvr in ids:
1834 command.append(f"bvn{bvn}:bvr{bvr}")
1835 _run_command(command)
1836 new_dir = os.path.join(tmpdir, pkg_name)
1837 os.chdir(new_dir)
1838 if bootstrap:
1839- _run_command(['git', 'checkout', branch])
1840+ _run_command(["git", "checkout", branch])
1841
1842 os.chdir(tmpdir)
1843- os.rename(new_dir, new_dir + '.new')
1844+ os.rename(new_dir, new_dir + ".new")
1845 new_dir += ".new"
1846- shutil.rmtree(os.path.join(new_dir, '.git'))
1847- os.remove(os.path.join(new_dir, 'debian', 'changelog'))
1848+ shutil.rmtree(os.path.join(new_dir, ".git"))
1849+ os.remove(os.path.join(new_dir, "debian", "changelog"))
1850
1851 if username:
1852 git_repo = f"git+ssh://{username}@git.launchpad.net/~oem-solutions-engineers/pc-enablement/+git/oem-{project}-projects-meta"
1853@@ -811,25 +1023,27 @@ def check_and_update_git_repo(bug, pkg_name: str, yes=False, bootstrap=True, upd
1854
1855 if bootstrap:
1856 lines = None
1857- changelog = os.path.join(tmpdir, pkg_name, 'debian', 'changelog')
1858- with open(changelog, 'r') as f:
1859+ changelog = os.path.join(tmpdir, pkg_name, "debian", "changelog")
1860+ with open(changelog, "r") as f:
1861 lines = f.readlines()
1862
1863 lines[0] = f"{pkg_name} (20.04~ubuntu1) UNRELEASED; urgency=medium\n"
1864
1865- if f"(LP: #{bug.id})" not in lines[2] and lines[2].startswith(" * Meta package for"):
1866+ if f"(LP: #{bug.id})" not in lines[2] and lines[2].startswith(
1867+ " * Meta package for"
1868+ ):
1869 lines[2] = " " + lines[2].strip() + f" (LP: #{bug.id})\n"
1870
1871- with open(changelog, 'w') as f:
1872+ with open(changelog, "w") as f:
1873 f.writelines(lines)
1874
1875 # Remove deprecated autopkgtest file
1876- deprecated_test = os.path.join(tmpdir, pkg_name, 'debian', 'tests', pkg_name)
1877+ deprecated_test = os.path.join(tmpdir, pkg_name, "debian", "tests", pkg_name)
1878 if os.path.exists(deprecated_test):
1879 _run_command(["git", "rm", f"debian/tests/{pkg_name}"])
1880
1881 # Remove deprecated debian/compat
1882- deprecated_compat = os.path.join(tmpdir, pkg_name, 'debian', 'compat')
1883+ deprecated_compat = os.path.join(tmpdir, pkg_name, "debian", "compat")
1884 if os.path.exists(deprecated_compat):
1885 _run_command(["git", "rm", f"debian/compat"])
1886
1887@@ -841,22 +1055,39 @@ def check_and_update_git_repo(bug, pkg_name: str, yes=False, bootstrap=True, upd
1888
1889 _run_command(["git", "add", "."])
1890 out, _, _ = _run_command(["git", "diff", "--color=always", "--cached"])
1891- if out != b'':
1892+ if out != b"":
1893 warning("$ git diff")
1894 print(out)
1895 if update is True:
1896 if args.skip:
1897- warning(f"The update of the '{branch}' branch of {pkg_name}'s Git repository is skipped on demand.")
1898+ warning(
1899+ f"The update of the '{branch}' branch of {pkg_name}'s Git repository is skipped on demand."
1900+ )
1901 return False
1902- elif yes_or_ask(yes, f"Do you want to commit and push the changes above into the '{branch}' branch of {pkg_name}'s Git repository?"):
1903- _run_command(['git', 'commit', '-a', '-m', f"Updated by oem-scripts {oem_scripts.__version__:.2f}."])
1904- _run_command(['git', 'push'])
1905+ elif yes_or_ask(
1906+ yes,
1907+ f"Do you want to commit and push the changes above into the '{branch}' branch of {pkg_name}'s Git repository?",
1908+ ):
1909+ _run_command(
1910+ [
1911+ "git",
1912+ "commit",
1913+ "-a",
1914+ "-m",
1915+ f"Updated by oem-scripts {oem_scripts.__version__:.2f}.",
1916+ ]
1917+ )
1918+ _run_command(["git", "push"])
1919 return True
1920 else:
1921 if bootstrap:
1922- error(f"The '{branch}' branch of {pkg_name} in Git repository needs to update.")
1923+ error(
1924+ f"The '{branch}' branch of {pkg_name} in Git repository needs to update."
1925+ )
1926 else:
1927- warning(f"The '{branch}' branch of {pkg_name} in Git repository may need to update.")
1928+ warning(
1929+ f"The '{branch}' branch of {pkg_name} in Git repository may need to update."
1930+ )
1931 return False
1932 else:
1933 info(f"The '{branch}' branch of {pkg_name} in Git repository looks fine.")
1934@@ -867,13 +1098,13 @@ if args.subcommand:
1935 login = LaunchpadLogin()
1936 lp = login.lp
1937
1938-if args.subcommand == 'create':
1939+if args.subcommand == "create":
1940 create_bug(lp, args.oemCodename, args.platformCodename, args.deviceName)
1941-elif args.subcommand == 'update':
1942+elif args.subcommand == "update":
1943 update_bug(lp, args.bugNumber, args.yes)
1944-elif args.subcommand == 'check':
1945+elif args.subcommand == "check":
1946 check_bug(lp, args.bugNumber)
1947-elif args.subcommand == 'collect':
1948+elif args.subcommand == "collect":
1949 collect_bugs(lp, args.json)
1950 else:
1951 parser.print_help()
1952diff --git a/oem-getiso b/oem-getiso
1953index fc7723d..932e769 100755
1954--- a/oem-getiso
1955+++ b/oem-getiso
1956@@ -11,11 +11,10 @@ import sys
1957 import re
1958 from optparse import OptionParser
1959
1960-__DEBUG__ = (os.getenv('DEBUG') == 'true')
1961+__DEBUG__ = os.getenv("DEBUG") == "true"
1962
1963
1964 class ImageURI(object):
1965-
1966 def __init__(self, project, date, buildorder, image_type):
1967 self.project = project
1968 self.date = date
1969@@ -24,13 +23,12 @@ class ImageURI(object):
1970 self.image_type = image_type
1971
1972 # default value
1973- self.siteurl = 'https://oem-share.canonical.com'
1974+ self.siteurl = "https://oem-share.canonical.com"
1975 self.ispublished = False
1976
1977 @property
1978 def rootdir(self):
1979- self._rootdir = self.ispublished and\
1980- '/partners/' or '/oem/cesg-builds/'
1981+ self._rootdir = self.ispublished and "/partners/" or "/oem/cesg-builds/"
1982 return self._rootdir
1983
1984 @property
1985@@ -44,23 +42,31 @@ class ImageURI(object):
1986 @property
1987 def imagepath(self):
1988 if self.ispublished:
1989- return os.path.join(self.project, 'images',
1990- '{}-{}'.format(self.date, self.buildorder),
1991- self.image_type, self.isoname)
1992+ return os.path.join(
1993+ self.project,
1994+ "images",
1995+ "{}-{}".format(self.date, self.buildorder),
1996+ self.image_type,
1997+ self.isoname,
1998+ )
1999 else:
2000- return os.path.join(self.project, self.date, self.buildorder,
2001- 'images', self.image_type, self.isoname)
2002+ return os.path.join(
2003+ self.project,
2004+ self.date,
2005+ self.buildorder,
2006+ "images",
2007+ self.image_type,
2008+ self.isoname,
2009+ )
2010
2011 @property
2012 def isoname(self):
2013 if self.manifest_ver:
2014- project = self.project + '-' + self.manifest_ver
2015+ project = self.project + "-" + self.manifest_ver
2016 else:
2017 project = self.project
2018- image_type = 'iso' \
2019- if re.match(r'^dell-bto', self.project) else self.image_type
2020- return '{}-{}-{}-{}.iso'.format(project, image_type,
2021- self.date, self.buildorder)
2022+ image_type = "iso" if re.match(r"^dell-bto", self.project) else self.image_type
2023+ return "{}-{}-{}-{}.iso".format(project, image_type, self.date, self.buildorder)
2024
2025 @classmethod
2026 def from_url(cls, url):
2027@@ -88,33 +94,41 @@ class ImageURI(object):
2028 'dell-bto-oneiric-pebble-beach-X05-iso-20111226-0.iso'
2029 >>> o = ImageURI.from_url('https://oem-share.canonical.com/partners/dell-bto-oneiric-audi-13-intel/images/20120316-3/iso/dell-bto-oneiric-audi-13-intel-A04-iso-20120316-3.iso')
2030 """
2031+
2032 def _parse(url):
2033 from urllib.parse import urlparse
2034- if not re.match(r'^https.*', url):
2035- logging.error('{0} is not a valid image URL'.format(url))
2036+
2037+ if not re.match(r"^https.*", url):
2038+ logging.error("{0} is not a valid image URL".format(url))
2039 raise IndexError
2040- comps = urlparse(url).path.split('/')
2041- published = (comps[1] == 'partners')
2042+ comps = urlparse(url).path.split("/")
2043+ published = comps[1] == "partners"
2044 if published:
2045 (proj, image_type) = (comps[2], comps[5])
2046- (date, buildorder) = comps[4].split('-')
2047+ (date, buildorder) = comps[4].split("-")
2048 else:
2049- (proj, date, buildorder, image_type) = \
2050- (comps[3], comps[4], comps[5], comps[7])
2051- if 'dell-bto' in proj:
2052+ (proj, date, buildorder, image_type) = (
2053+ comps[3],
2054+ comps[4],
2055+ comps[5],
2056+ comps[7],
2057+ )
2058+ if "dell-bto" in proj:
2059 manifest_ver_index = published and 6 or 8
2060 try:
2061- manifest_ver = comps[manifest_ver_index].split('-')[-4]
2062+ manifest_ver = comps[manifest_ver_index].split("-")[-4]
2063 except IndexError:
2064- logging.error('published:{0} manifest_ver_index:{1}'
2065- .format(published, manifest_ver_index))
2066+ logging.error(
2067+ "published:{0} manifest_ver_index:{1}".format(
2068+ published, manifest_ver_index
2069+ )
2070+ )
2071 raise IndexError
2072 else:
2073 manifest_ver = None
2074- return (proj, date, buildorder, published,
2075- manifest_ver, image_type)
2076- (project, date, buildorder, published, manifest_ver, image_type) = \
2077- _parse(url)
2078+ return (proj, date, buildorder, published, manifest_ver, image_type)
2079+
2080+ (project, date, buildorder, published, manifest_ver, image_type) = _parse(url)
2081 obj = cls(project, date, buildorder, image_type)
2082 obj.manifest_ver = manifest_ver
2083 obj.ispublished = published
2084@@ -141,25 +155,24 @@ class ImageURI(object):
2085
2086 def __testself():
2087 import doctest
2088+
2089 doctest.testmod()
2090
2091
2092 class RsyncURI(object):
2093-
2094 def __init__(self, imguri, username=None, siteurl=None):
2095 self.imguri = imguri
2096- self.siteurl = siteurl or 'oem-share.canonical.com'
2097- self.username = username or os.getenv('USER')
2098- self.rootdir = '/srv/oem-share.canonical.com/www'
2099+ self.siteurl = siteurl or "oem-share.canonical.com"
2100+ self.username = username or os.getenv("USER")
2101+ self.rootdir = "/srv/oem-share.canonical.com/www"
2102
2103 @property
2104 def isolink(self):
2105- return '{}@{}:{}'.format(self.username, self.siteurl, self.imagepath)
2106+ return "{}@{}:{}".format(self.username, self.siteurl, self.imagepath)
2107
2108 @property
2109 def imagepath(self):
2110- return self.rootdir + os.path.join(self.imguri.rootdir,
2111- self.imguri.imagepath)
2112+ return self.rootdir + os.path.join(self.imguri.rootdir, self.imguri.imagepath)
2113
2114 @classmethod
2115 def from_imguri(cls, imguri):
2116@@ -176,7 +189,6 @@ class RsyncURI(object):
2117
2118
2119 class _Downloader(object):
2120-
2121 def __init__(self, url):
2122 """
2123 >>> o = _Downloader('https://oem-share.canonical.com/partners/somerville-oneiric-amd64/images/20111116-1/iso/somerville-oneiric-amd64-iso-20111116-1.iso')
2124@@ -185,8 +197,7 @@ class _Downloader(object):
2125 self.imguri = ImageURI.from_url(url)
2126
2127 def find_lastdownloadediso(self):
2128- proj_isos = filter(lambda iso: self.imguri.project in iso,
2129- glob.glob('*.iso'))
2130+ proj_isos = filter(lambda iso: self.imguri.project in iso, glob.glob("*.iso"))
2131 try:
2132 return sorted_isos(proj_isos)[-1]
2133 except IndexError:
2134@@ -205,12 +216,12 @@ class _Downloader(object):
2135
2136 def run(self):
2137 self.lastiso = self.find_lastdownloadediso()
2138- print('Found last downloaded file:{}'.format(self.lastiso))
2139+ print("Found last downloaded file:{}".format(self.lastiso))
2140 self.isolink = RsyncURI.from_imguri(self.imguri).isolink
2141 # if os.path.exists(self.imguri.isoname):
2142 # sys.exit("Image {} already be downloaded.".format(
2143 # self.imguri.isoname))
2144- print('Starting to download file:{}'.format(self.imguri.isoname))
2145+ print("Starting to download file:{}".format(self.imguri.isoname))
2146 self.proc = self.do_download()
2147 ret = self.proc.wait()
2148 self.post_download()
2149@@ -222,10 +233,7 @@ class RsyncDownloader(_Downloader):
2150
2151 def do_download(self):
2152 self.filename = self.lastiso and self.lastiso or self.imguri.isoname
2153- cmd = ['rsync', '-Pv',
2154- self.isolink,
2155- self.filename
2156- ]
2157+ cmd = ["rsync", "-Pv", self.isolink, self.filename]
2158
2159 return subprocess.Popen(cmd)
2160
2161@@ -235,24 +243,27 @@ class RsyncDownloader(_Downloader):
2162
2163
2164 class ZsyncDownloader(_Downloader):
2165-
2166 def __init__(self, url, auth_token):
2167 super(ZsyncDownloader, self).__init__(url)
2168 self.auth_token = auth_token
2169
2170 def do_download(self):
2171- cmd = ['zsync_curl',
2172- self.imguri.isolink + '.zsync',
2173- '-c pysid=' + self.auth_token
2174- ]
2175+ cmd = [
2176+ "zsync_curl",
2177+ self.imguri.isolink + ".zsync",
2178+ "-c pysid=" + self.auth_token,
2179+ ]
2180 if self.lastiso:
2181- cmd.append('-i ' + self.lastiso)
2182+ cmd.append("-i " + self.lastiso)
2183
2184 return subprocess.Popen(" ".join(cmd), shell=True)
2185
2186 def post_download(self):
2187- if self.lastiso and self.lastiso != self.imguri.isoname and \
2188- os.path.exists(self.lastiso):
2189+ if (
2190+ self.lastiso
2191+ and self.lastiso != self.imguri.isoname
2192+ and os.path.exists(self.lastiso)
2193+ ):
2194 os.remove(self.lastiso)
2195 if os.path.exists(self.imguri.isoname + ".zs-old"):
2196 os.remove(self.imguri.isoname + ".zs-old")
2197@@ -267,19 +278,21 @@ def sorted_isos(isos):
2198 >>> sorted_isos(isos)
2199 ['watauga2-precise-amd64-norecovery-iso-20130121-0.iso', 'watauga2-precise-amd64-norecovery-iso-20130121-1.iso']
2200 """
2201+
2202 def _f(e):
2203- e = e.replace('.iso', '')
2204- if e.startswith('dell'):
2205- e = e.replace('dell-bto-oneiric-', '')
2206- comps = e.split('-iso-')
2207- comps = comps[1].split('-')
2208+ e = e.replace(".iso", "")
2209+ if e.startswith("dell"):
2210+ e = e.replace("dell-bto-oneiric-", "")
2211+ comps = e.split("-iso-")
2212+ comps = comps[1].split("-")
2213 (date, order) = (comps[0], comps[1])
2214 else:
2215 # Date and build number are guaranteed to be the last 2 fields
2216 # in the name of an ISO produced by Offspring
2217- comps = e.rsplit('-', 2)
2218+ comps = e.rsplit("-", 2)
2219 (date, order) = (comps[1], comps[2])
2220- return int('{}{}'.format(date, order))
2221+ return int("{}{}".format(date, order))
2222+
2223 return sorted(isos, key=_f)
2224
2225
2226@@ -289,7 +302,8 @@ def iso_of_bugdesc(desc):
2227 ('somerville-oneiric-amd64', '20111116', '1')
2228 """
2229 import re
2230- ret = re.findall('Image:\\s+((\\S+)-(\\d+)-(\\d)).*', desc)
2231+
2232+ ret = re.findall("Image:\\s+((\\S+)-(\\d+)-(\\d)).*", desc)
2233 if ret:
2234 return (ret[0][1], ret[0][2], ret[0][3])
2235 else:
2236@@ -302,9 +316,10 @@ def isourl_of_bug(q):
2237 'https://oem-share.canonical.com/oem/cesg-builds/stella-anaheim-precise-amd64/20130116/0/images/iso/stella-anaheim-precise-amd64-iso-20130116-0.iso'
2238 """
2239 from launchpadlib.launchpad import Launchpad
2240+
2241 cachedir = os.path.join(os.environ["HOME"], ".launchpadlib/cache")
2242 try:
2243- lp = Launchpad.login_with("oem-getiso", 'production', cachedir)
2244+ lp = Launchpad.login_with("oem-getiso", "production", cachedir)
2245 found_iso = iso_of_bugdesc(lp.bugs[q].description)
2246 except KeyboardInterrupt:
2247 print("Terminated by user reqeust!")
2248@@ -317,20 +332,29 @@ def isourl_of_bug(q):
2249 return img.isolink
2250
2251
2252-if __name__ == '__main__':
2253- usage = "usage: %prog --method [rsync|zsync --auth_token " \
2254- "<auth_token>] iso_url|bug_number"
2255+if __name__ == "__main__":
2256+ usage = (
2257+ "usage: %prog --method [rsync|zsync --auth_token "
2258+ "<auth_token>] iso_url|bug_number"
2259+ )
2260 parser = OptionParser(usage=usage)
2261 parser.add_option(
2262- "--method", dest="method", default="rsync", metavar="METHOD",
2263- help="The METHOD of download. Defaults to 'rsync'.")
2264+ "--method",
2265+ dest="method",
2266+ default="rsync",
2267+ metavar="METHOD",
2268+ help="The METHOD of download. Defaults to 'rsync'.",
2269+ )
2270 parser.add_option(
2271- "--auth_token", dest="auth_token", metavar="AUTH_TOKEN",
2272- help="The authetication token needed to access oem-share.")
2273+ "--auth_token",
2274+ dest="auth_token",
2275+ metavar="AUTH_TOKEN",
2276+ help="The authetication token needed to access oem-share.",
2277+ )
2278 (options, args) = parser.parse_args()
2279
2280 if __DEBUG__:
2281- print('self testing...')
2282+ print("self testing...")
2283 __testself()
2284 sys.exit()
2285
2286@@ -353,8 +377,7 @@ if __name__ == '__main__':
2287 if options.auth_token is None:
2288 sys.exit(parser.get_usage())
2289 if not os.path.exists("/usr/bin/zsync_curl"):
2290- sys.exit(
2291- "The zync-curl package must be installed to use this method.")
2292+ sys.exit("The zync-curl package must be installed to use this method.")
2293 runner = ZsyncDownloader(url, options.auth_token)
2294 else:
2295 sys.exit(parser.get_usage())
2296diff --git a/oem-meta-packages b/oem-meta-packages
2297index 91b07ef..c98dee8 100755
2298--- a/oem-meta-packages
2299+++ b/oem-meta-packages
2300@@ -40,7 +40,8 @@ from string import Template
2301 from tempfile import TemporaryDirectory
2302
2303
2304-staging_copy_template = Template(f'''Operation: copy_package
2305+staging_copy_template = Template(
2306+ f'''Operation: copy_package
2307 Source: $source
2308 Destination: $destination
2309 Package: $packages
2310@@ -60,7 +61,8 @@ And then verify the production archive.
2311 """
2312 deb http://oem.archive.canonical.com $distribution $component
2313 deb-src http://oem.archive.canonical.com $distribution $component
2314-"""''')
2315+"""'''
2316+)
2317
2318
2319 class DataJSONEncoder(json.JSONEncoder):
2320@@ -90,10 +92,11 @@ class PkgInfo:
2321 proposed_version: str
2322
2323
2324-pattern = re.compile(r'oem-([^-]*)-(.*)-meta')
2325-staging_pattern = re.compile(r'.*>(.*)/</a>')
2326-parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,
2327- epilog="""
2328+pattern = re.compile(r"oem-([^-]*)-(.*)-meta")
2329+staging_pattern = re.compile(r".*>(.*)/</a>")
2330+parser = argparse.ArgumentParser(
2331+ formatter_class=argparse.RawDescriptionHelpFormatter,
2332+ epilog="""
2333 You need to have the [oem-scripts] section in ~/.config/oem-scripts/config.ini.
2334 Executing `launchpad-api get people/+me` will generate it.
2335
2336@@ -113,35 +116,38 @@ url = SomewhereToProvideTheInformationForUsernameAndPassword
2337 username = UserName
2338 password = PassWord
2339 archive = https://cesg.canonical.com/canonical
2340-fingerprint = 54F1860295829CE3""")
2341-
2342-
2343-parser.add_argument("-d", "--debug",
2344- help="print debug messages", action="store_true")
2345-parser.add_argument("-q", "--quiet",
2346- help="Don't print info messages", action="store_true")
2347-parser.add_argument("-v", "--verbose", action="store_true",
2348- help="print verbose messages")
2349-parser.add_argument("--dry-run",
2350- help="Dry run the process.", action="store_true")
2351-parser.add_argument("--yes",
2352- help="Say yes for all prompts.", action="store_true")
2353-parser.add_argument("--skip",
2354- help="Skip some projects or some meta packages.", type=str, default="")
2355-parser.add_argument("--only",
2356- help="Specified the meta package. Skip others.", type=str)
2357-parser.add_argument("--since",
2358- help="Begin from the specified meta package. Skip previous meta packages.", type=str)
2359-parser.add_argument("--apt-dir",
2360- type=str,
2361- help="specify the dir for apt")
2362+fingerprint = 54F1860295829CE3""",
2363+)
2364+
2365+
2366+parser.add_argument("-d", "--debug", help="print debug messages", action="store_true")
2367+parser.add_argument(
2368+ "-q", "--quiet", help="Don't print info messages", action="store_true"
2369+)
2370+parser.add_argument(
2371+ "-v", "--verbose", action="store_true", help="print verbose messages"
2372+)
2373+parser.add_argument("--dry-run", help="Dry run the process.", action="store_true")
2374+parser.add_argument("--yes", help="Say yes for all prompts.", action="store_true")
2375+parser.add_argument(
2376+ "--skip", help="Skip some projects or some meta packages.", type=str, default=""
2377+)
2378+parser.add_argument("--only", help="Specified the meta package. Skip others.", type=str)
2379+parser.add_argument(
2380+ "--since",
2381+ help="Begin from the specified meta package. Skip previous meta packages.",
2382+ type=str,
2383+)
2384+parser.add_argument("--apt-dir", type=str, help="specify the dir for apt")
2385
2386
2387 subparsers = parser.add_subparsers(dest="subcommand")
2388
2389-collect = subparsers.add_parser('collect', help='[-h] --json platforms.json | --meta oem-qemu-meta -o|--output meta-info.json',
2390- formatter_class=argparse.RawDescriptionHelpFormatter,
2391- epilog="""
2392+collect = subparsers.add_parser(
2393+ "collect",
2394+ help="[-h] --json platforms.json | --meta oem-qemu-meta -o|--output meta-info.json",
2395+ formatter_class=argparse.RawDescriptionHelpFormatter,
2396+ epilog="""
2397 For example,
2398 oem-meta-packages collect --json platforms.json --output meta-info.json
2399
2400@@ -179,24 +185,37 @@ platforms.json:
2401
2402 oem-meta-packages collect --meta oem-qemu-meta --output meta-info.json
2403
2404-Collect the information of OEM metapackages in PPAs and devel/staging archives.""")
2405-
2406-collect.add_argument("--json",
2407- help="Specify the json file to read the platforms information.",
2408- type=argparse.FileType('r', encoding='UTF-8'))
2409-collect.add_argument("--meta",
2410- help="Specify the meta package to collect the information.")
2411-collect.add_argument("-o", "--output", required=True,
2412- help="Specify a filename to write the meta information.",
2413- type=argparse.FileType('w', encoding='UTF-8'))
2414-
2415-list_pkg = subparsers.add_parser('list', help='[-h]',
2416- formatter_class=argparse.RawDescriptionHelpFormatter,
2417- epilog="List all OEM meta packages in Ubuntu archive.")
2418-
2419-subscribe = subparsers.add_parser('subscribe', help='[-h]',
2420- formatter_class=argparse.RawDescriptionHelpFormatter,
2421- epilog="""
2422+Collect the information of OEM metapackages in PPAs and devel/staging archives.""",
2423+)
2424+
2425+collect.add_argument(
2426+ "--json",
2427+ help="Specify the json file to read the platforms information.",
2428+ type=argparse.FileType("r", encoding="UTF-8"),
2429+)
2430+collect.add_argument(
2431+ "--meta", help="Specify the meta package to collect the information."
2432+)
2433+collect.add_argument(
2434+ "-o",
2435+ "--output",
2436+ required=True,
2437+ help="Specify a filename to write the meta information.",
2438+ type=argparse.FileType("w", encoding="UTF-8"),
2439+)
2440+
2441+list_pkg = subparsers.add_parser(
2442+ "list",
2443+ help="[-h]",
2444+ formatter_class=argparse.RawDescriptionHelpFormatter,
2445+ epilog="List all OEM meta packages in Ubuntu archive.",
2446+)
2447+
2448+subscribe = subparsers.add_parser(
2449+ "subscribe",
2450+ help="[-h]",
2451+ formatter_class=argparse.RawDescriptionHelpFormatter,
2452+ epilog="""
2453 Make all bugs of all oem meta packages be subscribed by oem-solutions-engineers.
2454 (search current apt source for package lists)
2455
2456@@ -205,56 +224,81 @@ Make all bugs of all oem meta packages be subscribed by oem-solutions-engineers.
2457 (search current apt source for package lists)
2458
2459 Check "To all bugs in oem-qemu-meta in Ubuntu:" on https://launchpad.net/ubuntu/+source/oem-qemu-meta/+subscribe for example.
2460-""")
2461-
2462-unsubscribe = subparsers.add_parser('unsubscribe', help='[-h] pkgName',
2463- formatter_class=argparse.RawDescriptionHelpFormatter,
2464- epilog="""
2465+""",
2466+)
2467+
2468+unsubscribe = subparsers.add_parser(
2469+ "unsubscribe",
2470+ help="[-h] pkgName",
2471+ formatter_class=argparse.RawDescriptionHelpFormatter,
2472+ epilog="""
2473 Unsubscribe oem-solutions-engineers from oem-qemu-meta
2474
2475 For example,
2476 oem-meta-packages unsubscribe oem-qemu-meta
2477-""")
2478-unsubscribe.add_argument("pkgName", type=str,
2479- help="Specify the package name to unsubscribe.")
2480-
2481-staging_copy = subparsers.add_parser('staging-copy', help='[-h] [--ignore-staging-lock] --json meta-info.json | --meta oem-qemu-meta',
2482- formatter_class=argparse.RawDescriptionHelpFormatter,
2483- epilog="""
2484+""",
2485+)
2486+unsubscribe.add_argument(
2487+ "pkgName", type=str, help="Specify the package name to unsubscribe."
2488+)
2489+
2490+staging_copy = subparsers.add_parser(
2491+ "staging-copy",
2492+ help="[-h] [--ignore-staging-lock] --json meta-info.json | --meta oem-qemu-meta",
2493+ formatter_class=argparse.RawDescriptionHelpFormatter,
2494+ epilog="""
2495 For example,
2496 oem-meta-packages --dry-run staging-copy --meta oem-qemu-meta
2497 or
2498 oem-meta-packages --dry-run staging-copy --json meta-info.json (generated by the 'collect' subcommand.)
2499
2500-Copy the meta package from the devel archive into the staging archive.""")
2501-
2502-staging_copy.add_argument("--json",
2503- help="Specify the json file to read the meta information.",
2504- type=argparse.FileType('r', encoding='UTF-8'))
2505-staging_copy.add_argument("--meta",
2506- help="Specify the meta package to copy.")
2507-staging_copy.add_argument("--ignore-staging-lock",
2508- help="Ignore the staging-lock tag.", action="store_true")
2509-
2510-update = subparsers.add_parser('update', help='[-h] [--autopkgtest] --json meta-info.json | --meta oem-qemu-meta',
2511- formatter_class=argparse.RawDescriptionHelpFormatter,
2512- epilog="""
2513+Copy the meta package from the devel archive into the staging archive.""",
2514+)
2515+
2516+staging_copy.add_argument(
2517+ "--json",
2518+ help="Specify the json file to read the meta information.",
2519+ type=argparse.FileType("r", encoding="UTF-8"),
2520+)
2521+staging_copy.add_argument("--meta", help="Specify the meta package to copy.")
2522+staging_copy.add_argument(
2523+ "--ignore-staging-lock", help="Ignore the staging-lock tag.", action="store_true"
2524+)
2525+
2526+update = subparsers.add_parser(
2527+ "update",
2528+ help="[-h] [--autopkgtest] --json meta-info.json | --meta oem-qemu-meta",
2529+ formatter_class=argparse.RawDescriptionHelpFormatter,
2530+ epilog="""
2531 For example,
2532 oem-meta-packages --dry-run update --meta oem-qemu-meta --kernel linux-oem-20.04
2533 or
2534 oem-meta-packages --dry-run update --json meta-info.json (generated by the 'collect' subcommand.)
2535
2536-Update the market name and the kernel flavour of the OEM meta package to the default kernel flavour, i.e. linux-generic-hwe-20.04.""")
2537-update.add_argument("--autopkgtest", action="store_true",
2538- help="Run autopkgtest when checking the git repository.")
2539-update.add_argument("--json", type=argparse.FileType('r', encoding='UTF-8'),
2540- help="Specify the json file to read the meta information.")
2541-update.add_argument("--meta", type=str,
2542- help="Specify the meta package to update.")
2543-update.add_argument("--kernel", type=str, default="linux-generic-hwe-20.04",
2544- help="Specify the kernel meta to update. linux-generic-hwe-20.04|linux-oem-20.04b|linux-oem-20.04")
2545-update.add_argument("--factory", action="store_true",
2546- help="Make the factory meta to depend on the kernel meta directly.")
2547+Update the market name and the kernel flavour of the OEM meta package to the default kernel flavour, i.e. linux-generic-hwe-20.04.""",
2548+)
2549+update.add_argument(
2550+ "--autopkgtest",
2551+ action="store_true",
2552+ help="Run autopkgtest when checking the git repository.",
2553+)
2554+update.add_argument(
2555+ "--json",
2556+ type=argparse.FileType("r", encoding="UTF-8"),
2557+ help="Specify the json file to read the meta information.",
2558+)
2559+update.add_argument("--meta", type=str, help="Specify the meta package to update.")
2560+update.add_argument(
2561+ "--kernel",
2562+ type=str,
2563+ default="linux-generic-hwe-20.04",
2564+ help="Specify the kernel meta to update. linux-generic-hwe-20.04|linux-oem-20.04b|linux-oem-20.04",
2565+)
2566+update.add_argument(
2567+ "--factory",
2568+ action="store_true",
2569+ help="Make the factory meta to depend on the kernel meta directly.",
2570+)
2571
2572 args = parser.parse_args()
2573
2574@@ -263,18 +307,20 @@ setup_logging(debug=args.debug, quiet=args.quiet)
2575 if args.subcommand:
2576 login = LaunchpadLogin()
2577 lp = login.lp
2578- oem_archive = lp.people['oem-archive']
2579+ oem_archive = lp.people["oem-archive"]
2580
2581 if args.apt_dir:
2582 apt_pkg.init_config()
2583 if args.debug:
2584 old = apt_pkg.config.dump()
2585 apt_pkg.config.set("Dir", args.apt_dir)
2586- apt_pkg.config.set("Dir::State::status", os.path.join(args.apt_dir, "var/lib/dpkg/status"))
2587+ apt_pkg.config.set(
2588+ "Dir::State::status", os.path.join(args.apt_dir, "var/lib/dpkg/status")
2589+ )
2590 if args.debug:
2591 new = apt_pkg.config.dump()
2592 d = difflib.Differ()
2593- diff = d.compare(old.split('\n'), new.split('\n'))
2594+ diff = d.compare(old.split("\n"), new.split("\n"))
2595 for line in diff:
2596 debug(line.strip())
2597 apt_pkg.init_system()
2598@@ -288,7 +334,7 @@ def yes_or_ask(yes: bool, message: str) -> bool:
2599 res = input(f"> \033[1;34m{message}\033[1;0m (y/n) ").lower()
2600 if res not in {"y", "n"}:
2601 continue
2602- if res == 'y':
2603+ if res == "y":
2604 return True
2605 else:
2606 return False
2607@@ -302,21 +348,25 @@ def _debug_obj(pkg) -> None:
2608 debug(dir(pkg))
2609
2610 for attr in dir(pkg):
2611- if not attr.startswith('__'):
2612+ if not attr.startswith("__"):
2613 if not isinstance(pkg.__getattribute__(attr), types.BuiltinFunctionType):
2614 debug(f"{attr}: {pkg.__getattribute__(attr)}")
2615
2616
2617-def _run_command(command: list or tuple, returncode=(0,), env=None, silent=False) -> (str, str, int):
2618+def _run_command(
2619+ command: list or tuple, returncode=(0,), env=None, silent=False
2620+) -> (str, str, int):
2621 if not silent:
2622 debug("$ " + " ".join(command))
2623- proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env)
2624+ proc = subprocess.Popen(
2625+ command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env
2626+ )
2627 out, err = proc.communicate()
2628
2629 if out:
2630- out = out.decode('utf-8').strip()
2631+ out = out.decode("utf-8").strip()
2632 if err:
2633- err = err.decode('utf-8').strip()
2634+ err = err.decode("utf-8").strip()
2635
2636 if proc.returncode not in returncode:
2637 critical(f"return {proc.returncode}")
2638@@ -339,7 +389,11 @@ def get_oem_meta_packages(cache) -> list:
2639 pkg_list = []
2640 for pkg in cache.packages:
2641 name = pkg.name
2642- if not name.startswith('oem-') or not name.endswith('-meta') or '-factory-' in name:
2643+ if (
2644+ not name.startswith("oem-")
2645+ or not name.endswith("-meta")
2646+ or "-factory-" in name
2647+ ):
2648 continue
2649 pkg_list.append(name)
2650 return sorted(pkg_list)
2651@@ -347,8 +401,8 @@ def get_oem_meta_packages(cache) -> list:
2652
2653 def _grouping_market_names(market_names: list, maxsplit=1) -> str:
2654 # Remove empty item
2655- while '' in market_names:
2656- market_names.remove('')
2657+ while "" in market_names:
2658+ market_names.remove("")
2659 tmp = collections.defaultdict(list)
2660 space_in_model = False
2661 try:
2662@@ -356,28 +410,28 @@ def _grouping_market_names(market_names: list, maxsplit=1) -> str:
2663 if maxsplit == 1:
2664 name, model = market_name.split(maxsplit=maxsplit)
2665 tmp[name].append(model)
2666- if ' ' in model:
2667+ if " " in model:
2668 space_in_model = True
2669 elif maxsplit == 2:
2670 brand, name, model = market_name.split(maxsplit=maxsplit)
2671- tmp[brand + ' ' + name].append(model)
2672- if ' ' in model:
2673+ tmp[brand + " " + name].append(model)
2674+ if " " in model:
2675 space_in_model = True
2676 except ValueError:
2677- return ', '.join(sorted(market_names))
2678+ return ", ".join(sorted(market_names))
2679
2680 if space_in_model:
2681- return ', '.join(f"{name} {', '.join(models)}" for name, models in tmp.items())
2682+ return ", ".join(f"{name} {', '.join(models)}" for name, models in tmp.items())
2683 else:
2684- return ', '.join(f"{name} {'/'.join(models)}" for name, models in tmp.items())
2685+ return ", ".join(f"{name} {'/'.join(models)}" for name, models in tmp.items())
2686
2687
2688 def deal_with_description(git_dir, old, new) -> bool:
2689 if not old or not new:
2690 return False
2691 os.chdir(git_dir)
2692- file_path = os.path.join(git_dir, 'debian', 'control')
2693- with open(file_path, 'r') as control:
2694+ file_path = os.path.join(git_dir, "debian", "control")
2695+ with open(file_path, "r") as control:
2696 lines = control.readlines()
2697 changed = False
2698 for i, line in enumerate(lines):
2699@@ -385,7 +439,7 @@ def deal_with_description(git_dir, old, new) -> bool:
2700 changed = True
2701 lines[i] = line.replace(old, new)
2702 info(f'"{old}" will be replaced by "{new}".')
2703- with open(file_path, 'w') as control:
2704+ with open(file_path, "w") as control:
2705 control.writelines(lines)
2706 _run_command(["git", "add", "debian/control"])
2707 return changed
2708@@ -395,39 +449,41 @@ def deal_with_kernel_flavour(pkg_name, branch, git_dir) -> bool:
2709 os.chdir(git_dir)
2710 idx = -1
2711 kernel_flavour = None
2712- file_path = os.path.join(git_dir, 'debian', 'control')
2713- with open(file_path, 'r') as control:
2714+ file_path = os.path.join(git_dir, "debian", "control")
2715+ with open(file_path, "r") as control:
2716 lines = control.readlines()
2717 for i, line in enumerate(lines):
2718- if line.startswith('XB-Ubuntu-OEM-Kernel-Flavour:'):
2719- kernel_flavour = line[len('XB-Ubuntu-OEM-Kernel-Flavour:'):].strip()
2720+ if line.startswith("XB-Ubuntu-OEM-Kernel-Flavour:"):
2721+ kernel_flavour = line[len("XB-Ubuntu-OEM-Kernel-Flavour:") :].strip()
2722 idx = i
2723 break
2724
2725 if not kernel_flavour:
2726- critical(f"There is no XB-Ubuntu-OEM-Kernel-Flavour in debian/control of {branch} for {pkg_name}.")
2727+ critical(
2728+ f"There is no XB-Ubuntu-OEM-Kernel-Flavour in debian/control of {branch} for {pkg_name}."
2729+ )
2730 exit(1)
2731
2732 debug(f"XB-Ubuntu-OEM-Kernel-Flavour: {kernel_flavour}")
2733
2734- if args.kernel == 'linux-generic-hwe-20.04':
2735- if kernel_flavour == 'default':
2736+ if args.kernel == "linux-generic-hwe-20.04":
2737+ if kernel_flavour == "default":
2738 return False
2739- kernel_flavour = 'default'
2740- elif args.kernel == 'linux-oem-20.04':
2741- if kernel_flavour == 'oem':
2742+ kernel_flavour = "default"
2743+ elif args.kernel == "linux-oem-20.04":
2744+ if kernel_flavour == "oem":
2745 return False
2746- kernel_flavour = 'oem'
2747- elif args.kernel == 'linux-oem-20.04b':
2748- if kernel_flavour == 'oem':
2749+ kernel_flavour = "oem"
2750+ elif args.kernel == "linux-oem-20.04b":
2751+ if kernel_flavour == "oem":
2752 return False
2753- kernel_flavour = 'oem'
2754+ kernel_flavour = "oem"
2755 else:
2756 print(f"{args.kernel} is not supported.")
2757 exit(1)
2758
2759 lines[idx] = f"XB-Ubuntu-OEM-Kernel-Flavour: {kernel_flavour}\n"
2760- with open(file_path, 'w') as control:
2761+ with open(file_path, "w") as control:
2762 control.writelines(lines)
2763 _run_command(["git", "add", "debian/control"])
2764 return True
2765@@ -436,12 +492,12 @@ def deal_with_kernel_flavour(pkg_name, branch, git_dir) -> bool:
2766 def deal_with_kernel_depends(pkg_name, branch, git_dir) -> bool:
2767 os.chdir(git_dir)
2768 idx = -1
2769- file_path = os.path.join(git_dir, 'debian', 'control')
2770+ file_path = os.path.join(git_dir, "debian", "control")
2771 changed = False
2772- with open(file_path, 'r') as control:
2773+ with open(file_path, "r") as control:
2774 lines = control.readlines()
2775 for i, line in enumerate(lines):
2776- if line.startswith('Depends:'):
2777+ if line.startswith("Depends:"):
2778 idx = i
2779 break
2780
2781@@ -452,24 +508,42 @@ def deal_with_kernel_depends(pkg_name, branch, git_dir) -> bool:
2782 debug(lines[idx].strip())
2783
2784 # this only works for updating auto-generated code
2785- if args.kernel == 'linux-generic-hwe-20.04' and ', linux-generic-hwe-20.04,' not in lines[idx]:
2786- lines[idx] = lines[idx].replace(', linux-oem-20.04,', ', linux-generic-hwe-20.04,')
2787- lines[idx] = lines[idx].replace(', linux-oem-20.04b | linux-oem-20.04,', ', linux-generic-hwe-20.04,')
2788+ if (
2789+ args.kernel == "linux-generic-hwe-20.04"
2790+ and ", linux-generic-hwe-20.04," not in lines[idx]
2791+ ):
2792+ lines[idx] = lines[idx].replace(
2793+ ", linux-oem-20.04,", ", linux-generic-hwe-20.04,"
2794+ )
2795+ lines[idx] = lines[idx].replace(
2796+ ", linux-oem-20.04b | linux-oem-20.04,", ", linux-generic-hwe-20.04,"
2797+ )
2798 changed = True
2799- elif args.kernel == 'linux-oem-20.04' and ', linux-oem-20.04,' not in lines[idx]:
2800- lines[idx] = lines[idx].replace(', linux-generic-hwe-20.04,', ', linux-oem-20.04,')
2801- lines[idx] = lines[idx].replace(', linux-oem-20.04b | linux-oem-20.04,', ', linux-oem-20.04,')
2802+ elif args.kernel == "linux-oem-20.04" and ", linux-oem-20.04," not in lines[idx]:
2803+ lines[idx] = lines[idx].replace(
2804+ ", linux-generic-hwe-20.04,", ", linux-oem-20.04,"
2805+ )
2806+ lines[idx] = lines[idx].replace(
2807+ ", linux-oem-20.04b | linux-oem-20.04,", ", linux-oem-20.04,"
2808+ )
2809 changed = True
2810- elif args.kernel == 'linux-oem-20.04b' and ', linux-oem-20.04b | linux-oem-20.04,' not in lines[idx]:
2811- lines[idx] = lines[idx].replace(', linux-generic-hwe-20.04,', ', linux-oem-20.04b | linux-oem-20.04,')
2812- lines[idx] = lines[idx].replace(', linux-oem-20.04,', ', linux-oem-20.04b | linux-oem-20.04,')
2813+ elif (
2814+ args.kernel == "linux-oem-20.04b"
2815+ and ", linux-oem-20.04b | linux-oem-20.04," not in lines[idx]
2816+ ):
2817+ lines[idx] = lines[idx].replace(
2818+ ", linux-generic-hwe-20.04,", ", linux-oem-20.04b | linux-oem-20.04,"
2819+ )
2820+ lines[idx] = lines[idx].replace(
2821+ ", linux-oem-20.04,", ", linux-oem-20.04b | linux-oem-20.04,"
2822+ )
2823 changed = True
2824
2825 if args.factory:
2826 factory_idx = -1
2827 # Find the factory depends.
2828 for i, line in enumerate(lines):
2829- if i > idx and line.startswith('Depends:'):
2830+ if i > idx and line.startswith("Depends:"):
2831 factory_idx = i
2832 depends_line = lines[factory_idx].strip()
2833 break
2834@@ -478,7 +552,11 @@ def deal_with_kernel_depends(pkg_name, branch, git_dir) -> bool:
2835 critical("It can not find factory 'Depends' in debian/control.")
2836 exit(1)
2837
2838- for kernel in ('linux-generic-hwe-20.04', 'linux-oem-20.04', 'linux-oem-20.04b'):
2839+ for kernel in (
2840+ "linux-generic-hwe-20.04",
2841+ "linux-oem-20.04",
2842+ "linux-oem-20.04b",
2843+ ):
2844 if depends_line.endswith(kernel) or f"{kernel}," in depends_line:
2845 if kernel != args.kernel:
2846 lines[factory_idx] = lines[factory_idx].replace(kernel, args.kernel)
2847@@ -488,7 +566,7 @@ def deal_with_kernel_depends(pkg_name, branch, git_dir) -> bool:
2848 lines[factory_idx] = depends_line + f", {args.kernel}\n"
2849 changed = True
2850
2851- with open(file_path, 'w') as control:
2852+ with open(file_path, "w") as control:
2853 control.writelines(lines)
2854
2855 if changed:
2856@@ -497,12 +575,14 @@ def deal_with_kernel_depends(pkg_name, branch, git_dir) -> bool:
2857 return changed
2858
2859
2860-def deal_with_debian_tests(pkg_name: str, git_dir: str, branch: str, bootstrap: bool) -> bool:
2861+def deal_with_debian_tests(
2862+ pkg_name: str, git_dir: str, branch: str, bootstrap: bool
2863+) -> bool:
2864 os.chdir(git_dir)
2865
2866 changed = False
2867
2868- tests_folder = os.path.join(git_dir, 'debian', 'tests')
2869+ tests_folder = os.path.join(git_dir, "debian", "tests")
2870 if not os.path.exists(tests_folder):
2871 os.mkdir(tests_folder)
2872
2873@@ -510,17 +590,17 @@ def deal_with_debian_tests(pkg_name: str, git_dir: str, branch: str, bootstrap:
2874 Depends: @
2875 Restrictions: needs-root
2876 """
2877- control = os.path.join(git_dir, 'debian', 'tests', 'control')
2878+ control = os.path.join(git_dir, "debian", "tests", "control")
2879
2880 if os.path.exists(control):
2881- with open(control, 'r') as f:
2882+ with open(control, "r") as f:
2883 if f.read() != control_content:
2884- with open(control, 'w') as fp:
2885+ with open(control, "w") as fp:
2886 fp.write(control_content)
2887 _run_command(["git", "add", "debian/tests/control"])
2888 changed = True
2889 else:
2890- with open(control, 'w') as fp:
2891+ with open(control, "w") as fp:
2892 fp.write(control_content)
2893 _run_command(["git", "add", "debian/tests/control"])
2894 changed = True
2895@@ -538,29 +618,37 @@ apt-get update
2896 apt-get full-upgrade --yes
2897 """
2898 if not bootstrap:
2899- if 'oem' in args.kernel:
2900- grub_flavour = 'oem'
2901+ if "oem" in args.kernel:
2902+ grub_flavour = "oem"
2903 else:
2904- grub_flavour = 'generic'
2905- meta_content += '\ngrep ^GRUB_FLAVOUR_ORDER=' + grub_flavour + '$ /etc/default/grub.d/oem-flavour.cfg\n'
2906- meta_content += '\ndpkg-query -W -f=\'${Status}\' ' + args.kernel + ' | grep "install ok installed"\n'
2907+ grub_flavour = "generic"
2908+ meta_content += (
2909+ "\ngrep ^GRUB_FLAVOUR_ORDER="
2910+ + grub_flavour
2911+ + "$ /etc/default/grub.d/oem-flavour.cfg\n"
2912+ )
2913+ meta_content += (
2914+ "\ndpkg-query -W -f='${Status}' "
2915+ + args.kernel
2916+ + ' | grep "install ok installed"\n'
2917+ )
2918 meta_content += f"\napt-get autoremove --purge --yes {pkg_name}\n"
2919- meta = os.path.join(git_dir, 'debian', 'tests', 'meta')
2920- old_meta = os.path.join(git_dir, 'debian', 'tests', pkg_name)
2921+ meta = os.path.join(git_dir, "debian", "tests", "meta")
2922+ old_meta = os.path.join(git_dir, "debian", "tests", pkg_name)
2923
2924 if os.path.exists(old_meta):
2925 _run_command(["git", "rm", "-f", f"debian/tests/{pkg_name}"])
2926 changed = True
2927
2928 if os.path.exists(meta):
2929- with open(meta, 'r') as f:
2930+ with open(meta, "r") as f:
2931 if f.read() != meta_content:
2932- with open(meta, 'w') as fp:
2933+ with open(meta, "w") as fp:
2934 fp.write(meta_content)
2935 _run_command(["git", "add", "debian/tests/meta"])
2936 changed = True
2937 else:
2938- with open(meta, 'w') as fp:
2939+ with open(meta, "w") as fp:
2940 fp.write(meta_content)
2941 _run_command(["git", "add", "debian/tests/meta"])
2942 changed = True
2943@@ -570,29 +658,30 @@ apt-get full-upgrade --yes
2944
2945 def deal_with_gbp_conf(git_dir, branch) -> bool:
2946 os.chdir(git_dir)
2947- file_path = os.path.join(git_dir, 'debian', 'gbp.conf')
2948+ file_path = os.path.join(git_dir, "debian", "gbp.conf")
2949 gbp_conf = f"""[DEFAULT]
2950 pristine-tar = False
2951 debian-branch = {branch}
2952 debian-tag = {branch}_%(version)s
2953 """
2954 if os.path.exists(file_path):
2955- with open(file_path, 'r') as f:
2956+ with open(file_path, "r") as f:
2957 if f.read() == gbp_conf:
2958 return False
2959- with open(file_path, 'w') as f:
2960+ with open(file_path, "w") as f:
2961 f.write(gbp_conf)
2962 _run_command(["git", "add", "debian/gbp.conf"])
2963 return True
2964
2965
2966 def deal_with_maintainer_scripts(pkg_name, branch, git_dir) -> bool:
2967- postinst_path = os.path.join(git_dir, 'debian', 'postinst')
2968- postrm_path = os.path.join(git_dir, 'debian', 'postrm')
2969+ postinst_path = os.path.join(git_dir, "debian", "postinst")
2970+ postrm_path = os.path.join(git_dir, "debian", "postrm")
2971 modified = False
2972
2973- with open(postinst_path, 'w') as f:
2974- f.write(f'''#!/bin/sh
2975+ with open(postinst_path, "w") as f:
2976+ f.write(
2977+ f"""#!/bin/sh
2978
2979 set -e
2980
2981@@ -609,14 +698,16 @@ case "$1" in
2982 esac
2983
2984 #DEBHELPER#
2985-''')
2986+"""
2987+ )
2988 _run_command(["git", "add", "debian/postinst"])
2989 output, _, _ = _run_command(["git", "status", "--porcelain", "debian/postinst"])
2990 if output:
2991 modified = True
2992
2993- with open(postrm_path, 'w') as f:
2994- f.write('''#!/bin/sh
2995+ with open(postrm_path, "w") as f:
2996+ f.write(
2997+ """#!/bin/sh
2998
2999 set -e
3000
3001@@ -632,7 +723,8 @@ case "$1" in
3002 esac
3003
3004 #DEBHELPER#
3005-''')
3006+"""
3007+ )
3008 _run_command(["git", "add", "debian/postrm"])
3009 output, _, _ = _run_command(["git", "status", "--porcelain", "debian/postrm"])
3010 if output:
3011@@ -644,39 +736,41 @@ esac
3012 def deal_with_grub_flavour(pkg_name, branch, git_dir) -> bool:
3013 os.chdir(git_dir)
3014 grub_flavour = None
3015- file_path = os.path.join(git_dir, 'oem-flavour.cfg')
3016+ file_path = os.path.join(git_dir, "oem-flavour.cfg")
3017 if os.path.exists(file_path):
3018- with open(file_path, 'r') as oem_flavour:
3019+ with open(file_path, "r") as oem_flavour:
3020 for line in oem_flavour:
3021- if line.startswith('GRUB_FLAVOUR_ORDER='):
3022- grub_flavour = line[len('GRUB_FLAVOUR_ORDER='):].strip()
3023+ if line.startswith("GRUB_FLAVOUR_ORDER="):
3024+ grub_flavour = line[len("GRUB_FLAVOUR_ORDER=") :].strip()
3025 break
3026
3027- if args.kernel == 'linux-generic-hwe-20.04':
3028- if grub_flavour == 'generic':
3029+ if args.kernel == "linux-generic-hwe-20.04":
3030+ if grub_flavour == "generic":
3031 return False
3032- grub_flavour = 'generic'
3033- elif args.kernel == 'linux-oem-20.04':
3034- if grub_flavour == 'oem':
3035+ grub_flavour = "generic"
3036+ elif args.kernel == "linux-oem-20.04":
3037+ if grub_flavour == "oem":
3038 return False
3039- grub_flavour = 'oem'
3040- elif args.kernel == 'linux-oem-20.04b':
3041- if grub_flavour == 'oem':
3042+ grub_flavour = "oem"
3043+ elif args.kernel == "linux-oem-20.04b":
3044+ if grub_flavour == "oem":
3045 return False
3046- grub_flavour = 'oem'
3047+ grub_flavour = "oem"
3048 else:
3049 print(f"{args.kernel} is not supported.")
3050 exit(1)
3051
3052 if not os.path.exists(file_path):
3053- with open(os.path.join(git_dir, 'debian', 'install'), 'a') as f:
3054+ with open(os.path.join(git_dir, "debian", "install"), "a") as f:
3055 f.write(f"oem-flavour.cfg /usr/share/{pkg_name}/\n")
3056 _run_command(["git", "add", "debian/install"])
3057
3058- with open(file_path, 'w') as f:
3059- f.write(f"""# This file is automatically generated by {pkg_name}, and changes will be overriden
3060+ with open(file_path, "w") as f:
3061+ f.write(
3062+ f"""# This file is automatically generated by {pkg_name}, and changes will be overriden
3063 GRUB_FLAVOUR_ORDER={grub_flavour}
3064-""")
3065+"""
3066+ )
3067 _run_command(["git", "add", "oem-flavour.cfg"])
3068
3069 return True
3070@@ -684,29 +778,31 @@ GRUB_FLAVOUR_ORDER={grub_flavour}
3071
3072 # Python 3.9 supports this.
3073 def remove_prefix(s, prefix):
3074- return s[len(prefix):] if s.startswith(prefix) else s
3075+ return s[len(prefix) :] if s.startswith(prefix) else s
3076
3077
3078 # Python 3.9 supports this.
3079 def remove_suffix(s, suffix):
3080- return s[:-len(suffix)] if s.endswith(suffix) else s
3081+ return s[: -len(suffix)] if s.endswith(suffix) else s
3082
3083
3084 def remove_prefix_suffix(s, prefix, suffix):
3085 return remove_suffix(remove_prefix(s, prefix), suffix)
3086
3087
3088-def search_ppa_and_version(project: str, group: str, platform: str, pkg_name: str, archive_name=None):
3089+def search_ppa_and_version(
3090+ project: str, group: str, platform: str, pkg_name: str, archive_name=None
3091+):
3092 if archive_name:
3093 archive = oem_archive.getPPAByName(name=archive_name)
3094- elif project == 'somerville':
3095+ elif project == "somerville":
3096 try:
3097 archive = oem_archive.getPPAByName(name=f"{project}-fossa-{platform}")
3098 except lazr.restfulclient.errors.NotFound:
3099 archive = oem_archive.getPPAByName(name=f"{project}-{platform}")
3100- elif project == 'stella':
3101+ elif project == "stella":
3102 archive = oem_archive.getPPAByName(name=f"{project}-{group}-ouagadougou")
3103- elif project == 'sutton':
3104+ elif project == "sutton":
3105 try:
3106 archive = oem_archive.getPPAByName(name=f"{project}-{group}-ouagadougou")
3107 except lazr.restfulclient.errors.NotFound:
3108@@ -718,7 +814,7 @@ def search_ppa_and_version(project: str, group: str, platform: str, pkg_name: st
3109 archive.newSubscription(subscriber=lp.me)
3110 archive.lp_save()
3111 except lazr.restfulclient.errors.BadRequest as e:
3112- if 'already has a current subscription for' not in str(e):
3113+ if "already has a current subscription for" not in str(e):
3114 raise e
3115 _run_command(["get-private-ppa", f"ppa:oem-archive/{archive.name}"])
3116 source_lists = "\n".join(lp.me.getArchiveSubscriptionURLs())
3117@@ -726,8 +822,10 @@ def search_ppa_and_version(project: str, group: str, platform: str, pkg_name: st
3118 fingerprint = archive.signing_key_fingerprint
3119 version = ""
3120 for source in sources:
3121- if source.source_package_name == pkg_name and \
3122- apt_pkg.version_compare(source.source_package_version, version) > 0:
3123+ if (
3124+ source.source_package_name == pkg_name
3125+ and apt_pkg.version_compare(source.source_package_version, version) > 0
3126+ ):
3127 version = source.source_package_version
3128 if version:
3129 return archive.name, version, fingerprint
3130@@ -741,8 +839,8 @@ def get_debian_version_from_git(pkg_name: str) -> str:
3131 if not result:
3132 return None
3133
3134- if '.' in result.group(1):
3135- project, group = result.group(1).split('.')
3136+ if "." in result.group(1):
3137+ project, group = result.group(1).split(".")
3138 else:
3139 project = result.group(1)
3140 group = None
3141@@ -756,137 +854,215 @@ def get_debian_version_from_git(pkg_name: str) -> str:
3142 ubuntu_branch = f"{platform}-focal-ubuntu"
3143 oem_branch = f"{platform}-focal-oem"
3144
3145- wget_changelog_command = ("wget", '-q', "-O", "changelog",
3146- f"https://git.launchpad.net/~oem-solutions-engineers/pc-enablement/+git/oem-{project}-projects-meta/plain/debian/changelog?h={ubuntu_branch}")
3147+ wget_changelog_command = (
3148+ "wget",
3149+ "-q",
3150+ "-O",
3151+ "changelog",
3152+ f"https://git.launchpad.net/~oem-solutions-engineers/pc-enablement/+git/oem-{project}-projects-meta/plain/debian/changelog?h={ubuntu_branch}",
3153+ )
3154
3155 bootstrap_version = ""
3156 with TemporaryDirectory() as tmpdir:
3157 os.chdir(tmpdir)
3158 _run_command(wget_changelog_command)
3159- bootstrap_version, _, _ = _run_command(["dpkg-parsechangelog", "--show-field", "Version", "-l", f"changelog"])
3160-
3161- wget_changelog_command = ("wget", '-q', "-O", "changelog",
3162- f"https://git.launchpad.net/~oem-solutions-engineers/pc-enablement/+git/oem-{project}-projects-meta/plain/debian/changelog?h={oem_branch}")
3163+ bootstrap_version, _, _ = _run_command(
3164+ ["dpkg-parsechangelog", "--show-field", "Version", "-l", f"changelog"]
3165+ )
3166+
3167+ wget_changelog_command = (
3168+ "wget",
3169+ "-q",
3170+ "-O",
3171+ "changelog",
3172+ f"https://git.launchpad.net/~oem-solutions-engineers/pc-enablement/+git/oem-{project}-projects-meta/plain/debian/changelog?h={oem_branch}",
3173+ )
3174
3175 oem_version = ""
3176 with TemporaryDirectory() as tmpdir:
3177 os.chdir(tmpdir)
3178 _run_command(wget_changelog_command)
3179- oem_version, _, _ = _run_command(["dpkg-parsechangelog", "--show-field", "Version", "-l", f"changelog"])
3180+ oem_version, _, _ = _run_command(
3181+ ["dpkg-parsechangelog", "--show-field", "Version", "-l", f"changelog"]
3182+ )
3183
3184 return bootstrap_version, oem_version
3185
3186
3187 def search_public_archive(pkg_name: str, project: str, codename: str) -> tuple:
3188- if project == 'somerville':
3189- source_line = 'http://dell.archive.canonical.com/'
3190+ if project == "somerville":
3191+ source_line = "http://dell.archive.canonical.com/"
3192 archive = f"somerville-{codename}"
3193- elif project == 'stella':
3194- source_line = 'http://hp.archive.canonical.com/'
3195+ elif project == "stella":
3196+ source_line = "http://hp.archive.canonical.com/"
3197 archive = f"stella.{codename}"
3198- elif project == 'sutton':
3199- source_line = 'http://lenovo.archive.canonical.com/'
3200+ elif project == "sutton":
3201+ source_line = "http://lenovo.archive.canonical.com/"
3202 archive = f"sutton.{codename}"
3203 oem_version = ""
3204 with TemporaryDirectory() as tmpdir:
3205 os.chdir(tmpdir)
3206- _run_command(['setup-apt-dir.sh',
3207- '-c', 'focal',
3208- '--disable-base',
3209- '--disable-updates',
3210- '--disable-backports',
3211- '--apt-dir', tmpdir,
3212- '--extra-key', '59AC787C2A8C78BA5ECA0B2ED4D1EAED36962F69',
3213- '--extra-repo', f"deb [signed-by={tmpdir}/59AC787C2A8C78BA5ECA0B2ED4D1EAED36962F69.pub arch=amd64] {source_line} focal {archive}"],
3214- silent=True)
3215- output, _, _ = _run_command(['pkg-list', '--long', '--apt-dir', tmpdir, pkg_name], returncode=(0, 1), silent=True)
3216- for line in output.split('\n'):
3217+ _run_command(
3218+ [
3219+ "setup-apt-dir.sh",
3220+ "-c",
3221+ "focal",
3222+ "--disable-base",
3223+ "--disable-updates",
3224+ "--disable-backports",
3225+ "--apt-dir",
3226+ tmpdir,
3227+ "--extra-key",
3228+ "59AC787C2A8C78BA5ECA0B2ED4D1EAED36962F69",
3229+ "--extra-repo",
3230+ f"deb [signed-by={tmpdir}/59AC787C2A8C78BA5ECA0B2ED4D1EAED36962F69.pub arch=amd64] {source_line} focal {archive}",
3231+ ],
3232+ silent=True,
3233+ )
3234+ output, _, _ = _run_command(
3235+ ["pkg-list", "--long", "--apt-dir", tmpdir, pkg_name],
3236+ returncode=(0, 1),
3237+ silent=True,
3238+ )
3239+ for line in output.split("\n"):
3240 if pkg_name in line and source_line in line:
3241- oem_version = line.split(' ')[1]
3242- info(f"{pkg_name} {oem_version} exists in 'deb {source_line} focal {archive}'.")
3243+ oem_version = line.split(" ")[1]
3244+ info(
3245+ f"{pkg_name} {oem_version} exists in 'deb {source_line} focal {archive}'."
3246+ )
3247 break
3248 ubuntu_version = ""
3249 with TemporaryDirectory() as tmpdir:
3250 os.chdir(tmpdir)
3251- _run_command(['setup-apt-dir.sh',
3252- '-c', 'focal',
3253- '--disable-backports',
3254- '--apt-dir', tmpdir],
3255- silent=True)
3256- output, _, _ = _run_command(['pkg-list', '--long', '--apt-dir', tmpdir, pkg_name], returncode=(0, 1), silent=True)
3257- for line in output.split('\n'):
3258- if pkg_name in line and 'http://archive.ubuntu.com/ubuntu' in line:
3259- ubuntu_version = line.split(' ')[1]
3260+ _run_command(
3261+ [
3262+ "setup-apt-dir.sh",
3263+ "-c",
3264+ "focal",
3265+ "--disable-backports",
3266+ "--apt-dir",
3267+ tmpdir,
3268+ ],
3269+ silent=True,
3270+ )
3271+ output, _, _ = _run_command(
3272+ ["pkg-list", "--long", "--apt-dir", tmpdir, pkg_name],
3273+ returncode=(0, 1),
3274+ silent=True,
3275+ )
3276+ for line in output.split("\n"):
3277+ if pkg_name in line and "http://archive.ubuntu.com/ubuntu" in line:
3278+ ubuntu_version = line.split(" ")[1]
3279 info(f"{pkg_name} {ubuntu_version} exists in Ubuntu archive.")
3280 break
3281 proposed_version = ""
3282 with TemporaryDirectory() as tmpdir:
3283 os.chdir(tmpdir)
3284- _run_command(['setup-apt-dir.sh',
3285- '-c', 'focal',
3286- '--proposed',
3287- '--disable-base',
3288- '--disable-updates',
3289- '--disable-backports',
3290- '--apt-dir', tmpdir],
3291- silent=True)
3292- output, _, _ = _run_command(['pkg-list', '--long', '--apt-dir', tmpdir, pkg_name], returncode=(0, 1), silent=True)
3293- for line in output.split('\n'):
3294- if pkg_name in line and 'http://archive.ubuntu.com/ubuntu' in line:
3295- proposed_version = line.split(' ')[1]
3296+ _run_command(
3297+ [
3298+ "setup-apt-dir.sh",
3299+ "-c",
3300+ "focal",
3301+ "--proposed",
3302+ "--disable-base",
3303+ "--disable-updates",
3304+ "--disable-backports",
3305+ "--apt-dir",
3306+ tmpdir,
3307+ ],
3308+ silent=True,
3309+ )
3310+ output, _, _ = _run_command(
3311+ ["pkg-list", "--long", "--apt-dir", tmpdir, pkg_name],
3312+ returncode=(0, 1),
3313+ silent=True,
3314+ )
3315+ for line in output.split("\n"):
3316+ if pkg_name in line and "http://archive.ubuntu.com/ubuntu" in line:
3317+ proposed_version = line.split(" ")[1]
3318 info(f"{pkg_name} {proposed_version} exists in focal-proposed.")
3319 break
3320 return ubuntu_version, proposed_version, oem_version, archive
3321
3322
3323-def search_private_archive(pkg_name: str, project: str, platform: str, index: str, config: str, branch: str) -> tuple:
3324- domain = config['archive'].split("://")[1].split("/")[0]
3325+def search_private_archive(
3326+ pkg_name: str, project: str, platform: str, index: str, config: str, branch: str
3327+) -> tuple:
3328+ domain = config["archive"].split("://")[1].split("/")[0]
3329 archive = None
3330 version = None
3331- for line in index.split('\n'):
3332- if project in line and platform in line and f'focal-{branch}' in line:
3333+ for line in index.split("\n"):
3334+ if project in line and platform in line and f"focal-{branch}" in line:
3335 result = staging_pattern.match(line)
3336 if result:
3337 archive = result.group(1)
3338 with TemporaryDirectory() as tmpdir:
3339 os.chdir(tmpdir)
3340- source_line = config['archive'].replace("https://", f"https://{config['username']}:{config['password']}@")
3341- _run_command(['setup-apt-dir.sh',
3342- '-c', 'focal',
3343- '--disable-updates',
3344- '--disable-backports',
3345- '--apt-dir', tmpdir,
3346- '--extra-key', config['fingerprint'],
3347- '--extra-repo', f"deb [signed-by={tmpdir}/{config['fingerprint']}.pub arch=amd64] {source_line} {archive} public"],
3348- silent=True)
3349- output, _, _ = _run_command(['pkg-list', '--long', '--apt-dir', tmpdir, pkg_name], returncode=(0, 1), silent=True)
3350- for line in output.split('\n'):
3351+ source_line = config["archive"].replace(
3352+ "https://", f"https://{config['username']}:{config['password']}@"
3353+ )
3354+ _run_command(
3355+ [
3356+ "setup-apt-dir.sh",
3357+ "-c",
3358+ "focal",
3359+ "--disable-updates",
3360+ "--disable-backports",
3361+ "--apt-dir",
3362+ tmpdir,
3363+ "--extra-key",
3364+ config["fingerprint"],
3365+ "--extra-repo",
3366+ f"deb [signed-by={tmpdir}/{config['fingerprint']}.pub arch=amd64] {source_line} {archive} public",
3367+ ],
3368+ silent=True,
3369+ )
3370+ output, _, _ = _run_command(
3371+ ["pkg-list", "--long", "--apt-dir", tmpdir, pkg_name],
3372+ returncode=(0, 1),
3373+ silent=True,
3374+ )
3375+ for line in output.split("\n"):
3376 if pkg_name in line and domain in line:
3377- version = line.split(' ')[1]
3378+ version = line.split(" ")[1]
3379 break
3380 if version is None and project == "somerville":
3381 archive = f"somerville-focal-{branch}"
3382 with TemporaryDirectory() as tmpdir:
3383 os.chdir(tmpdir)
3384- source_line = config['archive'].replace("https://", f"https://{config['username']}:{config['password']}@")
3385- _run_command(['setup-apt-dir.sh',
3386- '-c', 'focal',
3387- '--disable-updates',
3388- '--disable-backports',
3389- '--apt-dir', tmpdir,
3390- '--extra-key', config['fingerprint'],
3391- '--extra-repo', f"deb [signed-by={tmpdir}/{config['fingerprint']}.pub arch=amd64] {source_line} {archive} public"],
3392- silent=True)
3393- output, _, _ = _run_command(['pkg-list', '--long', '--apt-dir', tmpdir, pkg_name], silent=True)
3394- for line in output.split('\n'):
3395+ source_line = config["archive"].replace(
3396+ "https://", f"https://{config['username']}:{config['password']}@"
3397+ )
3398+ _run_command(
3399+ [
3400+ "setup-apt-dir.sh",
3401+ "-c",
3402+ "focal",
3403+ "--disable-updates",
3404+ "--disable-backports",
3405+ "--apt-dir",
3406+ tmpdir,
3407+ "--extra-key",
3408+ config["fingerprint"],
3409+ "--extra-repo",
3410+ f"deb [signed-by={tmpdir}/{config['fingerprint']}.pub arch=amd64] {source_line} {archive} public",
3411+ ],
3412+ silent=True,
3413+ )
3414+ output, _, _ = _run_command(
3415+ ["pkg-list", "--long", "--apt-dir", tmpdir, pkg_name], silent=True
3416+ )
3417+ for line in output.split("\n"):
3418 if pkg_name in line and domain in line:
3419- version = line.split(' ')[1]
3420+ version = line.split(" ")[1]
3421 break
3422
3423 return (archive, version)
3424
3425
3426-def collect_pkg_info(data, check_private: bool = False, index=None, config=None) -> dict:
3427+def collect_pkg_info(
3428+ data, check_private: bool = False, index=None, config=None
3429+) -> dict:
3430 if type(data) is str:
3431 result = pattern.match(data)
3432
3433@@ -894,8 +1070,8 @@ def collect_pkg_info(data, check_private: bool = False, index=None, config=None)
3434 print(f"{data} is not supported.")
3435 exit(1)
3436
3437- if '.' in result.group(1):
3438- project, group = result.group(1).split('.')
3439+ if "." in result.group(1):
3440+ project, group = result.group(1).split(".")
3441 else:
3442 project = result.group(1)
3443 group = "N/A"
3444@@ -917,14 +1093,16 @@ def collect_pkg_info(data, check_private: bool = False, index=None, config=None)
3445 print(f"{data} is not supported.")
3446 exit(1)
3447
3448- json_data = json.loads(f"""[{{
3449+ json_data = json.loads(
3450+ f"""[{{
3451 "Customer": "{customer}",
3452 "Group": "{group}",
3453 "Codename": "{codename}",
3454 "Platform": "",
3455 "MarketName": "",
3456 "PlatformLPTag": "{tag}"
3457-}}]""")
3458+}}]"""
3459+ )
3460 else:
3461 json_data = json.load(data)
3462
3463@@ -935,27 +1113,27 @@ def collect_pkg_info(data, check_private: bool = False, index=None, config=None)
3464 sutton = dict()
3465
3466 for item in json_data:
3467- customer = item['Customer'].lower()
3468- platform = item['Platform'].lower()
3469- codename = item['Codename'].lower()
3470- group = item['Group'].lower()
3471- market_name = item['MarketName']
3472- lp_tag = item['PlatformLPTag'].lower()
3473- if 'dell' in customer:
3474- if 'somerville' in args.skip:
3475+ customer = item["Customer"].lower()
3476+ platform = item["Platform"].lower()
3477+ codename = item["Codename"].lower()
3478+ group = item["Group"].lower()
3479+ market_name = item["MarketName"]
3480+ lp_tag = item["PlatformLPTag"].lower()
3481+ if "dell" in customer:
3482+ if "somerville" in args.skip:
3483 continue
3484- platform = remove_prefix(lp_tag, 'fossa-')
3485+ platform = remove_prefix(lp_tag, "fossa-")
3486 lst = somerville.get(platform, [])
3487 lst.append(market_name)
3488 somerville[platform] = lst
3489- elif 'hp' in customer:
3490- if 'stella' in args.skip:
3491+ elif "hp" in customer:
3492+ if "stella" in args.skip:
3493 continue
3494 lst = stella.get(f"{group}-{codename}", [])
3495 lst.append(market_name)
3496 stella[f"{group}-{codename}"] = lst
3497- elif 'lenovo' in customer:
3498- if 'sutton' in args.skip:
3499+ elif "lenovo" in customer:
3500+ if "sutton" in args.skip:
3501 continue
3502 lst = sutton.get(f"{group}-{codename}", [])
3503 lst.append(market_name)
3504@@ -975,37 +1153,58 @@ def collect_pkg_info(data, check_private: bool = False, index=None, config=None)
3505 info("Finding the corresponding PPAs...")
3506
3507 for codename, v in somerville.items():
3508- pkg_name = 'oem-somerville-' + codename + '-meta'
3509+ pkg_name = "oem-somerville-" + codename + "-meta"
3510 if args.only and pkg_name != args.only:
3511 warning(f"Skip {pkg_name}")
3512 continue
3513 if pkg_name in args.skip:
3514 warning(f"Skip {pkg_name}")
3515 continue
3516- ppa_archive, ppa_version, fingerprint = search_ppa_and_version("somerville", None, codename, pkg_name)
3517+ ppa_archive, ppa_version, fingerprint = search_ppa_and_version(
3518+ "somerville", None, codename, pkg_name
3519+ )
3520 if ppa_archive is None:
3521- ppa_archive, ppa_version, fingerprint = search_ppa_and_version("somerville", None, codename, pkg_name, "somerville")
3522+ ppa_archive, ppa_version, fingerprint = search_ppa_and_version(
3523+ "somerville", None, codename, pkg_name, "somerville"
3524+ )
3525 if ppa_archive is None:
3526 critical(f"It can not find any private PPA that contains {pkg_name}.")
3527 exit(1)
3528
3529 bootstrap_version, real_version = get_debian_version_from_git(pkg_name)
3530 if ppa_version != real_version:
3531- warning(f"{pkg_name}'s version in Git is {real_version}, but the version in ppa:oem-archive/{ppa_archive} is {ppa_version}.")
3532+ warning(
3533+ f"{pkg_name}'s version in Git is {real_version}, but the version in ppa:oem-archive/{ppa_archive} is {ppa_version}."
3534+ )
3535 info(f"{pkg_name} {ppa_version} exists in ppa:oem-archive/{ppa_archive}.")
3536
3537- ubuntu_version, proposed_version, oem_version, oem_archive = search_public_archive(pkg_name, "somerville", codename)
3538-
3539- pkgInfo[pkg_name] = PkgInfo(ppa_archive=ppa_archive, ppa_version=ppa_version,
3540- bootstrap_version=bootstrap_version, real_version=real_version, git_version=real_version,
3541- old_desc="", new_desc="", fingerprint=fingerprint,
3542- staging_archive="", staging_version="",
3543- devel_archive="", devel_version="",
3544- oem_archive=oem_archive, oem_version=oem_version,
3545- ubuntu_version=ubuntu_version, proposed_version=proposed_version)
3546+ ubuntu_version, proposed_version, oem_version, oem_archive = search_public_archive(
3547+ pkg_name, "somerville", codename
3548+ )
3549+
3550+ pkgInfo[pkg_name] = PkgInfo(
3551+ ppa_archive=ppa_archive,
3552+ ppa_version=ppa_version,
3553+ bootstrap_version=bootstrap_version,
3554+ real_version=real_version,
3555+ git_version=real_version,
3556+ old_desc="",
3557+ new_desc="",
3558+ fingerprint=fingerprint,
3559+ staging_archive="",
3560+ staging_version="",
3561+ devel_archive="",
3562+ devel_version="",
3563+ oem_archive=oem_archive,
3564+ oem_version=oem_version,
3565+ ubuntu_version=ubuntu_version,
3566+ proposed_version=proposed_version,
3567+ )
3568
3569 if check_private:
3570- staging_archive, staging_version = search_private_archive(pkg_name, "somerville", codename, index, config, branch="staging")
3571+ staging_archive, staging_version = search_private_archive(
3572+ pkg_name, "somerville", codename, index, config, branch="staging"
3573+ )
3574 pkgInfo[pkg_name].staging_archive = staging_archive
3575 if staging_version:
3576 info(f"{pkg_name} {staging_version} exists in {staging_archive}.")
3577@@ -1013,7 +1212,9 @@ def collect_pkg_info(data, check_private: bool = False, index=None, config=None)
3578 else:
3579 debug(f"{pkg_name} doesn't exist in {staging_archive} yet.")
3580
3581- devel_archive, devel_version = search_private_archive(pkg_name, "somerville", codename, index, config, branch="devel")
3582+ devel_archive, devel_version = search_private_archive(
3583+ pkg_name, "somerville", codename, index, config, branch="devel"
3584+ )
3585 pkgInfo[pkg_name].devel_archive = devel_archive
3586 if devel_version:
3587 info(f"{pkg_name} {devel_version} exists in {devel_archive}.")
3588@@ -1021,7 +1222,7 @@ def collect_pkg_info(data, check_private: bool = False, index=None, config=None)
3589 else:
3590 debug(f"{pkg_name} doesn't exist in {devel_archive} yet.")
3591
3592- if pkg_name in pkgNamesInArchive and ''.join(v):
3593+ if pkg_name in pkgNamesInArchive and "".join(v):
3594 new_desc = _grouping_market_names(v)
3595 if "Dell" not in new_desc:
3596 new_desc = "Dell " + new_desc
3597@@ -1029,37 +1230,56 @@ def collect_pkg_info(data, check_private: bool = False, index=None, config=None)
3598 pkgInfo[pkg_name].new_desc = new_desc
3599
3600 for k, v in stella.items():
3601- pkg_name = 'oem-stella.' + k + '-meta'
3602+ pkg_name = "oem-stella." + k + "-meta"
3603 if args.only and pkg_name != args.only:
3604 warning(f"Skip {pkg_name}")
3605 continue
3606 if pkg_name in args.skip:
3607 warning(f"Skip {pkg_name}")
3608 continue
3609- group, codename = k.split('-', 1)
3610+ group, codename = k.split("-", 1)
3611
3612- ppa_archive, ppa_version, fingerprint = search_ppa_and_version("stella", group, codename, pkg_name)
3613+ ppa_archive, ppa_version, fingerprint = search_ppa_and_version(
3614+ "stella", group, codename, pkg_name
3615+ )
3616 if ppa_archive is None:
3617 critical(f"It can not find any private PPA that contains {pkg_name}.")
3618 exit(1)
3619
3620 bootstrap_version, real_version = get_debian_version_from_git(pkg_name)
3621 if ppa_version != real_version:
3622- warning(f"{pkg_name}'s version in Git is {real_version}, but the version in ppa:oem-archive/{ppa_archive} is {ppa_version}.")
3623+ warning(
3624+ f"{pkg_name}'s version in Git is {real_version}, but the version in ppa:oem-archive/{ppa_archive} is {ppa_version}."
3625+ )
3626 info(f"{pkg_name} {ppa_version} exists in ppa:oem-archive/{ppa_archive}")
3627
3628- ubuntu_version, proposed_version, oem_version, oem_archive = search_public_archive(pkg_name, "stella", group)
3629-
3630- pkgInfo[pkg_name] = PkgInfo(ppa_archive=ppa_archive, ppa_version=ppa_version,
3631- bootstrap_version=bootstrap_version, real_version=real_version, git_version=real_version,
3632- old_desc="", new_desc="", fingerprint=fingerprint,
3633- staging_archive="", staging_version="",
3634- devel_archive="", devel_version="",
3635- oem_archive=oem_archive, oem_version=oem_version,
3636- ubuntu_version=ubuntu_version, proposed_version=proposed_version)
3637+ ubuntu_version, proposed_version, oem_version, oem_archive = search_public_archive(
3638+ pkg_name, "stella", group
3639+ )
3640+
3641+ pkgInfo[pkg_name] = PkgInfo(
3642+ ppa_archive=ppa_archive,
3643+ ppa_version=ppa_version,
3644+ bootstrap_version=bootstrap_version,
3645+ real_version=real_version,
3646+ git_version=real_version,
3647+ old_desc="",
3648+ new_desc="",
3649+ fingerprint=fingerprint,
3650+ staging_archive="",
3651+ staging_version="",
3652+ devel_archive="",
3653+ devel_version="",
3654+ oem_archive=oem_archive,
3655+ oem_version=oem_version,
3656+ ubuntu_version=ubuntu_version,
3657+ proposed_version=proposed_version,
3658+ )
3659
3660 if check_private:
3661- staging_archive, staging_version = search_private_archive(pkg_name, "stella", group, index, config, branch="staging")
3662+ staging_archive, staging_version = search_private_archive(
3663+ pkg_name, "stella", group, index, config, branch="staging"
3664+ )
3665 pkgInfo[pkg_name].staging_archive = staging_archive
3666 if staging_version:
3667 info(f"{pkg_name} {staging_version} exists in {staging_archive}.")
3668@@ -1067,7 +1287,9 @@ def collect_pkg_info(data, check_private: bool = False, index=None, config=None)
3669 else:
3670 debug(f"{pkg_name} doesn't exist in {staging_archive} yet.")
3671
3672- devel_archive, devel_version = search_private_archive(pkg_name, "stella", group, index, config, branch="devel")
3673+ devel_archive, devel_version = search_private_archive(
3674+ pkg_name, "stella", group, index, config, branch="devel"
3675+ )
3676 pkgInfo[pkg_name].devel_archive = devel_archive
3677 if devel_version:
3678 info(f"{pkg_name} {devel_version} exists in {devel_archive}.")
3679@@ -1075,45 +1297,66 @@ def collect_pkg_info(data, check_private: bool = False, index=None, config=None)
3680 else:
3681 debug(f"{pkg_name} doesn't exist in {devel_archive} yet.")
3682
3683- if pkg_name in pkgNamesInArchive and ''.join(v):
3684+ if pkg_name in pkgNamesInArchive and "".join(v):
3685 new_desc = _grouping_market_names(v, maxsplit=2)
3686 if "HP" not in new_desc:
3687 new_desc = "HP " + new_desc
3688- pkgInfo[pkg_name].old_desc = f"Stella {group.title()} {codename.title()} platform"
3689+ pkgInfo[
3690+ pkg_name
3691+ ].old_desc = f"Stella {group.title()} {codename.title()} platform"
3692 pkgInfo[pkg_name].new_desc = new_desc
3693
3694 for k, v in sutton.items():
3695- pkg_name = 'oem-sutton.' + k + '-meta'
3696+ pkg_name = "oem-sutton." + k + "-meta"
3697 if args.only and pkg_name != args.only:
3698 warning(f"Skip {pkg_name}")
3699 continue
3700 if pkg_name in args.skip:
3701 warning(f"Skip {pkg_name}")
3702 continue
3703- group, codename = k.split('-', 1)
3704+ group, codename = k.split("-", 1)
3705
3706- ppa_archive, ppa_version, fingerprint = search_ppa_and_version("sutton", group, codename, pkg_name)
3707+ ppa_archive, ppa_version, fingerprint = search_ppa_and_version(
3708+ "sutton", group, codename, pkg_name
3709+ )
3710 if ppa_archive is None:
3711 critical(f"It can not find any private PPA that contains {pkg_name}.")
3712 exit(1)
3713
3714 bootstrap_version, real_version = get_debian_version_from_git(pkg_name)
3715 if ppa_version != real_version:
3716- warning(f"{pkg_name}'s version in Git is {real_version}, but the version in ppa:oem-archive/{ppa_archive} is {ppa_version}.")
3717+ warning(
3718+ f"{pkg_name}'s version in Git is {real_version}, but the version in ppa:oem-archive/{ppa_archive} is {ppa_version}."
3719+ )
3720 info(f"{pkg_name} {ppa_version} exists in ppa:oem-archive/{ppa_archive}")
3721
3722- ubuntu_version, proposed_version, oem_version, oem_archive = search_public_archive(pkg_name, "sutton", group)
3723-
3724- pkgInfo[pkg_name] = PkgInfo(ppa_archive=ppa_archive, ppa_version=ppa_version,
3725- bootstrap_version=bootstrap_version, real_version=real_version, git_version=real_version,
3726- old_desc="", new_desc="", fingerprint=fingerprint,
3727- staging_archive="", staging_version="",
3728- devel_archive="", devel_version="",
3729- oem_archive=oem_archive, oem_version=oem_version,
3730- ubuntu_version=ubuntu_version, proposed_version=proposed_version)
3731+ ubuntu_version, proposed_version, oem_version, oem_archive = search_public_archive(
3732+ pkg_name, "sutton", group
3733+ )
3734+
3735+ pkgInfo[pkg_name] = PkgInfo(
3736+ ppa_archive=ppa_archive,
3737+ ppa_version=ppa_version,
3738+ bootstrap_version=bootstrap_version,
3739+ real_version=real_version,
3740+ git_version=real_version,
3741+ old_desc="",
3742+ new_desc="",
3743+ fingerprint=fingerprint,
3744+ staging_archive="",
3745+ staging_version="",
3746+ devel_archive="",
3747+ devel_version="",
3748+ oem_archive=oem_archive,
3749+ oem_version=oem_version,
3750+ ubuntu_version=ubuntu_version,
3751+ proposed_version=proposed_version,
3752+ )
3753
3754 if check_private:
3755- staging_archive, staging_version = search_private_archive(pkg_name, "sutton", group, index, config, branch="staging")
3756+ staging_archive, staging_version = search_private_archive(
3757+ pkg_name, "sutton", group, index, config, branch="staging"
3758+ )
3759 pkgInfo[pkg_name].staging_archive = staging_archive
3760 if staging_version:
3761 info(f"{pkg_name} {staging_version} exists in {staging_archive}.")
3762@@ -1121,7 +1364,9 @@ def collect_pkg_info(data, check_private: bool = False, index=None, config=None)
3763 else:
3764 debug(f"{pkg_name} doesn't exist in {staging_archive} yet.")
3765
3766- devel_archive, devel_version = search_private_archive(pkg_name, "sutton", group, index, config, branch="devel")
3767+ devel_archive, devel_version = search_private_archive(
3768+ pkg_name, "sutton", group, index, config, branch="devel"
3769+ )
3770 pkgInfo[pkg_name].devel_archive = devel_archive
3771 if devel_version:
3772 info(f"{pkg_name} {devel_version} exists in {devel_archive}.")
3773@@ -1129,11 +1374,13 @@ def collect_pkg_info(data, check_private: bool = False, index=None, config=None)
3774 else:
3775 debug(f"{pkg_name} doesn't exist in {devel_archive} yet.")
3776
3777- if pkg_name in pkgNamesInArchive and ''.join(v):
3778+ if pkg_name in pkgNamesInArchive and "".join(v):
3779 new_desc = _grouping_market_names(v)
3780 if "Lenovo" not in new_desc:
3781 new_desc = "Lenovo " + new_desc
3782- pkgInfo[pkg_name].old_desc = f"Sutton {group.title()} {codename.title()} platform"
3783+ pkgInfo[
3784+ pkg_name
3785+ ].old_desc = f"Sutton {group.title()} {codename.title()} platform"
3786 pkgInfo[pkg_name].new_desc = new_desc
3787
3788 debug(json.dumps(pkgInfo, indent=4, sort_keys=True, cls=DataJSONEncoder))
3789@@ -1145,22 +1392,24 @@ def load_pkg_info(data) -> dict:
3790 pkgInfo = dict()
3791 data = json.load(data)
3792 for meta in data.keys():
3793- pkgInfo[meta] = PkgInfo(ppa_archive=data[meta]['ppa_archive'],
3794- ppa_version=data[meta]['ppa_version'],
3795- git_version=data[meta]['git_version'],
3796- bootstrap_version=data[meta]['bootstrap_version'],
3797- real_version=data[meta]['real_version'],
3798- old_desc=data[meta]['old_desc'],
3799- new_desc=data[meta]['new_desc'],
3800- fingerprint=data[meta]['fingerprint'],
3801- staging_archive=data[meta]['staging_archive'],
3802- staging_version=data[meta]['staging_version'],
3803- devel_archive=data[meta]['devel_archive'],
3804- devel_version=data[meta]['devel_version'],
3805- oem_archive=data[meta]['oem_archive'],
3806- oem_version=data[meta]['oem_version'],
3807- ubuntu_version=data[meta]['ubuntu_version'],
3808- proposed_version=data[meta]['proposed_version'])
3809+ pkgInfo[meta] = PkgInfo(
3810+ ppa_archive=data[meta]["ppa_archive"],
3811+ ppa_version=data[meta]["ppa_version"],
3812+ git_version=data[meta]["git_version"],
3813+ bootstrap_version=data[meta]["bootstrap_version"],
3814+ real_version=data[meta]["real_version"],
3815+ old_desc=data[meta]["old_desc"],
3816+ new_desc=data[meta]["new_desc"],
3817+ fingerprint=data[meta]["fingerprint"],
3818+ staging_archive=data[meta]["staging_archive"],
3819+ staging_version=data[meta]["staging_version"],
3820+ devel_archive=data[meta]["devel_archive"],
3821+ devel_version=data[meta]["devel_version"],
3822+ oem_archive=data[meta]["oem_archive"],
3823+ oem_version=data[meta]["oem_version"],
3824+ ubuntu_version=data[meta]["ubuntu_version"],
3825+ proposed_version=data[meta]["proposed_version"],
3826+ )
3827
3828 debug(json.dumps(pkgInfo, indent=4, sort_keys=True, cls=DataJSONEncoder))
3829 return pkgInfo
3830@@ -1184,8 +1433,8 @@ def deal_with_meta_git(pkg_name: str, pkg_info: PkgInfo, bootstrap: bool) -> boo
3831 if not result:
3832 return
3833
3834- if '.' in result.group(1):
3835- project, group = result.group(1).split('.')
3836+ if "." in result.group(1):
3837+ project, group = result.group(1).split(".")
3838 else:
3839 project = result.group(1)
3840 group = None
3841@@ -1203,15 +1452,34 @@ def deal_with_meta_git(pkg_name: str, pkg_info: PkgInfo, bootstrap: bool) -> boo
3842 else:
3843 branch = f"{platform}-focal-oem"
3844
3845- git_command = ("git", "clone", "--depth", "1", "-b", branch, f"https://git.launchpad.net/~oem-solutions-engineers/pc-enablement/+git/oem-{project}-projects-meta", pkg_name)
3846+ git_command = (
3847+ "git",
3848+ "clone",
3849+ "--depth",
3850+ "1",
3851+ "-b",
3852+ branch,
3853+ f"https://git.launchpad.net/~oem-solutions-engineers/pc-enablement/+git/oem-{project}-projects-meta",
3854+ pkg_name,
3855+ )
3856
3857 with TemporaryDirectory() as tmpdir:
3858 messages = list()
3859 os.chdir(tmpdir)
3860 _run_command(git_command)
3861- git_version, _, _ = _run_command(["dpkg-parsechangelog", "--show-field", "Version", "-l", f"{pkg_name}/debian/changelog"])
3862+ git_version, _, _ = _run_command(
3863+ [
3864+ "dpkg-parsechangelog",
3865+ "--show-field",
3866+ "Version",
3867+ "-l",
3868+ f"{pkg_name}/debian/changelog",
3869+ ]
3870+ )
3871 if git_version != pkg_info.ppa_version:
3872- critical(f"{pkg_name}'s version is {pkg_info.ppa_version} in ppa:oem-archive/{pkg_info.ppa_archive} but the version in Git repository is {git_version}.")
3873+ critical(
3874+ f"{pkg_name}'s version is {pkg_info.ppa_version} in ppa:oem-archive/{pkg_info.ppa_archive} but the version in Git repository is {git_version}."
3875+ )
3876 exit(1)
3877 git_dir = os.path.join(tmpdir, pkg_name)
3878
3879@@ -1240,29 +1508,41 @@ def deal_with_meta_git(pkg_name: str, pkg_info: PkgInfo, bootstrap: bool) -> boo
3880 return False
3881
3882 # Prepare the changelog and commit the changes.
3883- commit_message = 'Update the ' + ' and'.join(', '.join(messages).rsplit(',', 1)) + f' for {args.kernel}.'
3884- _run_command(['dch', '--increment', commit_message])
3885+ commit_message = (
3886+ "Update the "
3887+ + " and".join(", ".join(messages).rsplit(",", 1))
3888+ + f" for {args.kernel}."
3889+ )
3890+ _run_command(["dch", "--increment", commit_message])
3891 _run_command(["git", "add", "debian/changelog"])
3892- _run_command(['git', 'commit', '-a', '-m', f"{commit_message}\n\nUpdated by oem-scripts {oem_scripts.__version__:.2f}."])
3893-
3894- out, _, _ = _run_command(['git', 'show', '--color=always'])
3895- if out != b'':
3896+ _run_command(
3897+ [
3898+ "git",
3899+ "commit",
3900+ "-a",
3901+ "-m",
3902+ f"{commit_message}\n\nUpdated by oem-scripts {oem_scripts.__version__:.2f}.",
3903+ ]
3904+ )
3905+
3906+ out, _, _ = _run_command(["git", "show", "--color=always"])
3907+ if out != b"":
3908 debug(f"({pkg_name}:{branch}) $ git show")
3909 debug(out)
3910
3911 # Run autopkgtest
3912 if args.autopkgtest:
3913- with open(f'{pkg_name}.list', 'r') as f:
3914+ with open(f"{pkg_name}.list", "r") as f:
3915 source_list = f.read().strip()
3916
3917 archives = set()
3918 archives.add(pkg_info.ppa_archive)
3919
3920- if project == 'somerville':
3921+ if project == "somerville":
3922 common_archive = oem_archive.getPPAByName(name=project)
3923 fingerprint = common_archive.signing_key_fingerprint
3924 archives.add(f"{project}")
3925- elif project == 'stella' or project == 'sutton':
3926+ elif project == "stella" or project == "sutton":
3927 common_archive = oem_archive.getPPAByName(name=f"{project}-ouagadougou")
3928 fingerprint = common_archive.signing_key_fingerprint
3929 archives.add(f"{project}-ouagadougou")
3930@@ -1286,8 +1566,9 @@ def deal_with_meta_git(pkg_name: str, pkg_info: PkgInfo, bootstrap: bool) -> boo
3931 if f"oem-archive/{ppa}/ubuntu" in url:
3932 source_list += "\ndeb " + url + " focal main"
3933
3934- with open(f'autopkgtest-{pkg_name}-auto', 'w') as f:
3935- f.write(f'''#!/bin/bash
3936+ with open(f"autopkgtest-{pkg_name}-auto", "w") as f:
3937+ f.write(
3938+ f"""#!/bin/bash
3939
3940 set -euo pipefail
3941 IFS=$'\n\t'
3942@@ -1319,54 +1600,83 @@ true
3943 ENDLINE
3944 chmod 755 "\\$root/usr/sbin/update-grub"
3945 END
3946-''')
3947+"""
3948+ )
3949 if args.debug:
3950- _run_command(['cat', f'autopkgtest-{pkg_name}-auto'])
3951- os.chmod(f'autopkgtest-{pkg_name}-auto', 0o755)
3952+ _run_command(["cat", f"autopkgtest-{pkg_name}-auto"])
3953+ os.chmod(f"autopkgtest-{pkg_name}-auto", 0o755)
3954 info(f"({pkg_name}:{branch}) $ run-autopkgtest lxc focal -C")
3955- _run_command(['run-autopkgtest', 'lxc', 'focal', '-C'])
3956- _run_command(['git', 'reset', '--hard', 'HEAD'])
3957- _run_command(['git', 'clean', '-x', '-d', '-f'])
3958+ _run_command(["run-autopkgtest", "lxc", "focal", "-C"])
3959+ _run_command(["git", "reset", "--hard", "HEAD"])
3960+ _run_command(["git", "clean", "-x", "-d", "-f"])
3961
3962 # Don't use UNRELEASED in the real meta.
3963 if not bootstrap:
3964- _run_command(['sed', '-i', 's/UNRELEASED/focal/', 'debian/changelog'])
3965- _run_command(['git', 'commit', '-a', '--amend', '--no-edit'])
3966+ _run_command(["sed", "-i", "s/UNRELEASED/focal/", "debian/changelog"])
3967+ _run_command(["git", "commit", "-a", "--amend", "--no-edit"])
3968
3969 # Tag and find it out.
3970- out, _, _ = _run_command(['gbp', 'tag'])
3971- if out != b'':
3972+ out, _, _ = _run_command(["gbp", "tag"])
3973+ if out != b"":
3974 info(out)
3975- out, _, _ = _run_command(['git', 'describe'])
3976- if out != b'':
3977+ out, _, _ = _run_command(["git", "describe"])
3978+ if out != b"":
3979 tag = out.strip()
3980 info(tag)
3981
3982 # Build Debian binary packages
3983- _run_command(['gbp', 'buildpackage', '-us', '-uc'])
3984- _run_command(['git', 'reset', '--hard', 'HEAD'])
3985- _run_command(['git', 'clean', '-x', '-d', '-f'])
3986+ _run_command(["gbp", "buildpackage", "-us", "-uc"])
3987+ _run_command(["git", "reset", "--hard", "HEAD"])
3988+ _run_command(["git", "clean", "-x", "-d", "-f"])
3989
3990 # Build Debian source packages
3991- _run_command(['gbp', 'buildpackage', '-S', '-us', '-uc'])
3992- _run_command(['git', 'reset', '--hard', 'HEAD'])
3993- _run_command(['git', 'clean', '-x', '-d', '-f'])
3994+ _run_command(["gbp", "buildpackage", "-S", "-us", "-uc"])
3995+ _run_command(["git", "reset", "--hard", "HEAD"])
3996+ _run_command(["git", "clean", "-x", "-d", "-f"])
3997
3998 # Show the commit
3999- out, _, _ = _run_command(['git', 'show', '--color=always'])
4000- if out != b'':
4001+ out, _, _ = _run_command(["git", "show", "--color=always"])
4002+ if out != b"":
4003 warning(f"({pkg_name}:{branch}) $ git show")
4004 print(out)
4005- version, _, _ = _run_command(["dpkg-parsechangelog", "--show-field", "Version", "-l", f"debian/changelog"])
4006- if not args.dry_run and yes_or_ask(args.yes, f"Would you like to commit and push the changes of {version} into {pkg_name}'s git {branch} branch?"):
4007+ version, _, _ = _run_command(
4008+ [
4009+ "dpkg-parsechangelog",
4010+ "--show-field",
4011+ "Version",
4012+ "-l",
4013+ f"debian/changelog",
4014+ ]
4015+ )
4016+ if not args.dry_run and yes_or_ask(
4017+ args.yes,
4018+ f"Would you like to commit and push the changes of {version} into {pkg_name}'s git {branch} branch?",
4019+ ):
4020 os.chdir(git_dir)
4021- _run_command(['git', 'remote', 'add', 'oem-solutions-engineers', f"git+ssh://{lp.me.name}@git.launchpad.net/~oem-solutions-engineers/pc-enablement/+git/oem-{project}-projects-meta"])
4022- _run_command(['git', 'push', 'oem-solutions-engineers'])
4023- _run_command(['git', 'push', 'oem-solutions-engineers', tag])
4024- if not args.dry_run and yes_or_ask(args.yes, f"Would you like to dput Debian source package into ppa:oem-archive/{pkg_info.ppa_archive}?"):
4025- os.chdir(os.path.join(git_dir, '..'))
4026- _run_command(['debsign', f'{pkg_name}_{version}_source.changes'])
4027- _run_command(['dput', f'ppa:oem-archive/{pkg_info.ppa_archive}', f'{pkg_name}_{version}_source.changes'])
4028+ _run_command(
4029+ [
4030+ "git",
4031+ "remote",
4032+ "add",
4033+ "oem-solutions-engineers",
4034+ f"git+ssh://{lp.me.name}@git.launchpad.net/~oem-solutions-engineers/pc-enablement/+git/oem-{project}-projects-meta",
4035+ ]
4036+ )
4037+ _run_command(["git", "push", "oem-solutions-engineers"])
4038+ _run_command(["git", "push", "oem-solutions-engineers", tag])
4039+ if not args.dry_run and yes_or_ask(
4040+ args.yes,
4041+ f"Would you like to dput Debian source package into ppa:oem-archive/{pkg_info.ppa_archive}?",
4042+ ):
4043+ os.chdir(os.path.join(git_dir, ".."))
4044+ _run_command(["debsign", f"{pkg_name}_{version}_source.changes"])
4045+ _run_command(
4046+ [
4047+ "dput",
4048+ f"ppa:oem-archive/{pkg_info.ppa_archive}",
4049+ f"{pkg_name}_{version}_source.changes",
4050+ ]
4051+ )
4052
4053
4054 def check_meta_git(pkg_name: str, pkg_info: PkgInfo, skip_bootstrap: False) -> None:
4055@@ -1383,84 +1693,119 @@ def check_meta_git(pkg_name: str, pkg_info: PkgInfo, skip_bootstrap: False) -> N
4056
4057 cache = apt_pkg.Cache(progress=None)
4058
4059-if args.subcommand == 'list':
4060+if args.subcommand == "list":
4061 for name in get_oem_meta_packages(cache):
4062 print(name)
4063-elif args.subcommand == 'subscribe':
4064+elif args.subcommand == "subscribe":
4065 for name in get_oem_meta_packages(cache):
4066 info(f"Checking the subscriptions for {name}...")
4067- source = lp.distributions['ubuntu'].getSourcePackage(name=name)
4068- if 'oem-solutions-engineers' in map(lambda x: x.subscriber.name, source.getSubscriptions()):
4069+ source = lp.distributions["ubuntu"].getSourcePackage(name=name)
4070+ if "oem-solutions-engineers" in map(
4071+ lambda x: x.subscriber.name, source.getSubscriptions()
4072+ ):
4073 info(f"ubuntu/{name} has subscribed oem-solutions-engineers.")
4074 continue
4075 warning(f"ubuntu/{name} didn't subscribe oem-solutions-engineers yet.")
4076- if yes_or_ask(args.yes, f"Would you like to subscribe 'oem-solutions-engineers' for ubuntu/{name}?"):
4077+ if yes_or_ask(
4078+ args.yes,
4079+ f"Would you like to subscribe 'oem-solutions-engineers' for ubuntu/{name}?",
4080+ ):
4081 try:
4082 # When a person is subscribed to a source package, one actually subscribe all bugs for it.
4083- source.addBugSubscription(subscriber=lp.people['oem-solutions-engineers'])
4084+ source.addBugSubscription(
4085+ subscriber=lp.people["oem-solutions-engineers"]
4086+ )
4087 except lazr.restfulclient.errors.Unauthorized as e:
4088- error(f"{lp.me.name} does not have permission to subscribe oem-solutions-engineers.")
4089+ error(
4090+ f"{lp.me.name} does not have permission to subscribe oem-solutions-engineers."
4091+ )
4092 if args.verbose:
4093 print(e)
4094 exit(1)
4095-elif args.subcommand == 'unsubscribe':
4096- source = lp.distributions['ubuntu'].getSourcePackage(name=args.pkgName)
4097+elif args.subcommand == "unsubscribe":
4098+ source = lp.distributions["ubuntu"].getSourcePackage(name=args.pkgName)
4099 subscriptions = source.getSubscriptions()
4100 for subscription in subscriptions:
4101- if subscription.subscriber.name == 'oem-solutions-engineers':
4102+ if subscription.subscriber.name == "oem-solutions-engineers":
4103 info(f"ubuntu/{args.pkgName} has subscribed oem-solutions-engineers.")
4104- if yes_or_ask(args.yes, f"Would you like to unsubscribe 'oem-solutions-engineers' for ubuntu/{args.pkgName}?"):
4105+ if yes_or_ask(
4106+ args.yes,
4107+ f"Would you like to unsubscribe 'oem-solutions-engineers' for ubuntu/{args.pkgName}?",
4108+ ):
4109 try:
4110- source.removeBugSubscription(subscriber=lp.people['oem-solutions-engineers'])
4111+ source.removeBugSubscription(
4112+ subscriber=lp.people["oem-solutions-engineers"]
4113+ )
4114 except lazr.restfulclient.errors.Unauthorized as e:
4115- error(f"{lp.me.name} does not have permission to unsubscribe oem-solutions-engineers.")
4116+ error(
4117+ f"{lp.me.name} does not have permission to unsubscribe oem-solutions-engineers."
4118+ )
4119 if args.verbose:
4120 print(e)
4121 exit(1)
4122 exit(0)
4123-elif args.subcommand == 'update':
4124- oem_scripts_config_ini = os.path.join(os.environ["HOME"],
4125- ".config/oem-scripts/config.ini")
4126+elif args.subcommand == "update":
4127+ oem_scripts_config_ini = os.path.join(
4128+ os.environ["HOME"], ".config/oem-scripts/config.ini"
4129+ )
4130 oem_scripts_config = ConfigParser()
4131 oem_scripts_config.read(oem_scripts_config_ini)
4132- config = oem_scripts_config['private']
4133+ config = oem_scripts_config["private"]
4134 if args.json:
4135 pkgInfo = load_pkg_info(args.json)
4136 elif args.meta:
4137- r = requests.get(config['archive'] + "/dists/", auth=(config['username'], config['password']))
4138- pkgInfo = collect_pkg_info(args.meta, check_private=True, index=r.text, config=config)
4139+ r = requests.get(
4140+ config["archive"] + "/dists/", auth=(config["username"], config["password"])
4141+ )
4142+ pkgInfo = collect_pkg_info(
4143+ args.meta, check_private=True, index=r.text, config=config
4144+ )
4145 else:
4146 print("You needto use --json or --meta.")
4147 exit(1)
4148 process_update_task(pkgInfo)
4149-elif args.subcommand == 'collect':
4150- oem_scripts_config_ini = os.path.join(os.environ["HOME"],
4151- ".config/oem-scripts/config.ini")
4152+elif args.subcommand == "collect":
4153+ oem_scripts_config_ini = os.path.join(
4154+ os.environ["HOME"], ".config/oem-scripts/config.ini"
4155+ )
4156 oem_scripts_config = ConfigParser()
4157 oem_scripts_config.read(oem_scripts_config_ini)
4158- config = oem_scripts_config['private']
4159- r = requests.get(config['archive'] + "/dists/", auth=(config['username'], config['password']))
4160+ config = oem_scripts_config["private"]
4161+ r = requests.get(
4162+ config["archive"] + "/dists/", auth=(config["username"], config["password"])
4163+ )
4164 if args.json:
4165- pkgInfo = collect_pkg_info(args.json, check_private=True, index=r.text, config=config)
4166+ pkgInfo = collect_pkg_info(
4167+ args.json, check_private=True, index=r.text, config=config
4168+ )
4169 elif args.meta:
4170- pkgInfo = collect_pkg_info(args.meta, check_private=True, index=r.text, config=config)
4171+ pkgInfo = collect_pkg_info(
4172+ args.meta, check_private=True, index=r.text, config=config
4173+ )
4174 else:
4175 print("You need to use --json or --meta.")
4176 exit(1)
4177- args.output.write(json.dumps(pkgInfo, indent=4, sort_keys=True, cls=DataJSONEncoder))
4178+ args.output.write(
4179+ json.dumps(pkgInfo, indent=4, sort_keys=True, cls=DataJSONEncoder)
4180+ )
4181 args.output.write("\n")
4182-elif args.subcommand == 'staging-copy':
4183- oem_scripts_config_ini = os.path.join(os.environ["HOME"],
4184- ".config/oem-scripts/config.ini")
4185+elif args.subcommand == "staging-copy":
4186+ oem_scripts_config_ini = os.path.join(
4187+ os.environ["HOME"], ".config/oem-scripts/config.ini"
4188+ )
4189 oem_scripts_config = ConfigParser()
4190 oem_scripts_config.read(oem_scripts_config_ini)
4191- config = oem_scripts_config['private']
4192+ config = oem_scripts_config["private"]
4193
4194 if args.json:
4195 pkgInfo = load_pkg_info(args.json)
4196 elif args.meta:
4197- r = requests.get(config['archive'] + "/dists/", auth=(config['username'], config['password']))
4198- pkgInfo = collect_pkg_info(args.meta, check_private=True, index=r.text, config=config)
4199+ r = requests.get(
4200+ config["archive"] + "/dists/", auth=(config["username"], config["password"])
4201+ )
4202+ pkgInfo = collect_pkg_info(
4203+ args.meta, check_private=True, index=r.text, config=config
4204+ )
4205 else:
4206 print("You need to use --json or --meta.")
4207 exit(1)
4208@@ -1470,12 +1815,21 @@ elif args.subcommand == 'staging-copy':
4209 staging_locked = set()
4210 for pkg_name in sorted(pkgInfo.keys()):
4211 pkg_info = pkgInfo[pkg_name]
4212- debug(f"{pkg_name} ppa: {pkg_info.ppa_version}, devel: {pkg_info.devel_version}, staging: {pkg_info.staging_version}.")
4213+ debug(
4214+ f"{pkg_name} ppa: {pkg_info.ppa_version}, devel: {pkg_info.devel_version}, staging: {pkg_info.staging_version}."
4215+ )
4216 if pkg_info.ppa_version != pkg_info.devel_version:
4217- warning(f"{pkg_name} versions are not synced between ppa:oem-archive/{pkg_info.ppa_archive} and {pkg_info.devel_archive}.")
4218+ warning(
4219+ f"{pkg_name} versions are not synced between ppa:oem-archive/{pkg_info.ppa_archive} and {pkg_info.devel_archive}."
4220+ )
4221 elif pkg_info.staging_version == pkg_info.devel_version:
4222- info(f"{pkg_name} {pkg_info.devel_version} (devel) == {pkg_info.staging_version} (staging) so it doesn't need to copy.")
4223- elif apt_pkg.version_compare(pkg_info.staging_version, pkg_info.devel_version) > 0:
4224+ info(
4225+ f"{pkg_name} {pkg_info.devel_version} (devel) == {pkg_info.staging_version} (staging) so it doesn't need to copy."
4226+ )
4227+ elif (
4228+ apt_pkg.version_compare(pkg_info.staging_version, pkg_info.devel_version)
4229+ > 0
4230+ ):
4231 critical(f"This should never happen.")
4232 exit(1)
4233 else:
4234@@ -1485,65 +1839,90 @@ elif args.subcommand == 'staging-copy':
4235 jobs[identity] = list()
4236 jobs[identity].append(pkg_name)
4237 debug(json.dumps(jobs, indent=4, sort_keys=True))
4238- cloudberry = lp.projects['cloudberry']
4239- assignee = lp.people['oem-archive']
4240+ cloudberry = lp.projects["cloudberry"]
4241+ assignee = lp.people["oem-archive"]
4242 tasks = cloudberry.searchTasks(
4243- status=['New', 'Triaged', 'Confirmed', 'In Progress', 'Fix Committed'],
4244- search_text='request of')
4245+ status=["New", "Triaged", "Confirmed", "In Progress", "Fix Committed"],
4246+ search_text="request of",
4247+ )
4248 for task in tasks:
4249 bug = task.bug
4250 for staging in sorted(dest):
4251- if staging in bug.description and 'staging-lock' in bug.tags and 'cqa-verified-staging' not in bug.tags:
4252+ if (
4253+ staging in bug.description
4254+ and "staging-lock" in bug.tags
4255+ and "cqa-verified-staging" not in bug.tags
4256+ ):
4257 debug(bug.description)
4258 tags = ",".join(bug.tags)
4259- for line in bug.description.split('\n'):
4260- if line.startswith('Package: '):
4261+ for line in bug.description.split("\n"):
4262+ if line.startswith("Package: "):
4263 package = line
4264- warning(f"https://bugs.launchpad.net/bugs/{bug.id}\n\t({staging})\n\t[{tags}]\n\t{bug.title}\n\t{package}")
4265+ warning(
4266+ f"https://bugs.launchpad.net/bugs/{bug.id}\n\t({staging})\n\t[{tags}]\n\t{bug.title}\n\t{package}"
4267+ )
4268 staging_locked.add(staging)
4269 for job in jobs:
4270- source, dest = job.split(':')
4271+ source, dest = job.split(":")
4272 if dest and dest in staging_locked and not args.ignore_staging_lock:
4273- warning(f"The following OEM metapackages will be skipped due to the staging-lock of {dest}.\n" + '\n'.join(jobs[job]))
4274+ warning(
4275+ f"The following OEM metapackages will be skipped due to the staging-lock of {dest}.\n"
4276+ + "\n".join(jobs[job])
4277+ )
4278 else:
4279 title = f"request of copy_package [{source}]"
4280- tags = ["archive-request", "via-request-script", f"oem-scripts-{oem_scripts.__version__:.2f}", "oem-metapackages"]
4281+ tags = [
4282+ "archive-request",
4283+ "via-request-script",
4284+ f"oem-scripts-{oem_scripts.__version__:.2f}",
4285+ "oem-metapackages",
4286+ ]
4287 if args.dry_run:
4288 info(f"TITLE: {title}")
4289- info("TAGS: " + ','.join(tags))
4290+ info("TAGS: " + ",".join(tags))
4291 else:
4292 debug(f"TITLE: {title}")
4293- debug("TAGS: " + ','.join(tags))
4294+ debug("TAGS: " + ",".join(tags))
4295 packages = list()
4296 for pkg_name in jobs[job]:
4297 pkg_info = pkgInfo[pkg_name]
4298 packages.append(f"{pkg_name} (=={pkg_info.devel_version})")
4299- packages = ', '.join(packages)
4300+ packages = ", ".join(packages)
4301 distribution = "focal"
4302- if dest.startswith('somerville'):
4303- component = remove_suffix(dest, f"-{distribution}-staging").replace('-fossa', '')
4304+ if dest.startswith("somerville"):
4305+ component = remove_suffix(dest, f"-{distribution}-staging").replace(
4306+ "-fossa", ""
4307+ )
4308 else:
4309 debug(dest)
4310- project, group, _ = dest.split('-', 2)
4311+ project, group, _ = dest.split("-", 2)
4312 component = f"{project}.{group}"
4313- production = distribution + '-' + component
4314+ production = distribution + "-" + component
4315 description = staging_copy_template.substitute(
4316 source=source,
4317 destination=dest,
4318 packages=packages,
4319 production=production,
4320- username=config['username'],
4321- url=config['url'],
4322+ username=config["username"],
4323+ url=config["url"],
4324 distribution=distribution,
4325- component=component)
4326+ component=component,
4327+ )
4328 print(description)
4329- if not args.dry_run and yes_or_ask(args.yes, f"Would you like to create a cloudberry bug to copy the {packages} from {source} to {dest}?"):
4330- bug = lp.bugs.createBug(description=description, target=cloudberry, title=title, tags=tags)
4331+ if not args.dry_run and yes_or_ask(
4332+ args.yes,
4333+ f"Would you like to create a cloudberry bug to copy the {packages} from {source} to {dest}?",
4334+ ):
4335+ bug = lp.bugs.createBug(
4336+ description=description, target=cloudberry, title=title, tags=tags
4337+ )
4338 for task in bug.bug_tasks:
4339- task.importance = 'High'
4340+ task.importance = "High"
4341 task.assignee = assignee
4342 task.lp_save()
4343 bug.lp_save()
4344- print(f"The cloudberry staging copy bug has been created on {bug.web_link}.\n")
4345+ print(
4346+ f"The cloudberry staging copy bug has been created on {bug.web_link}.\n"
4347+ )
4348 else:
4349 parser.print_help()
4350diff --git a/oem_scripts/LaunchpadLogin.py b/oem_scripts/LaunchpadLogin.py
4351index 0583ad2..01bae8b 100644
4352--- a/oem_scripts/LaunchpadLogin.py
4353+++ b/oem_scripts/LaunchpadLogin.py
4354@@ -8,20 +8,28 @@ import logging
4355 import os
4356
4357
4358-class ShutUpAndTakeMyTokenAuthorizationEngine(credentials.RequestTokenAuthorizationEngine):
4359+class ShutUpAndTakeMyTokenAuthorizationEngine(
4360+ credentials.RequestTokenAuthorizationEngine
4361+):
4362 """This stub class prevents launchpadlib from nulling out consumer_name
4363 in its demented campaign to force the use of desktop integration. """
4364
4365- def __init__(self, service_root, application_name=None, consumer_name=None,
4366- credential_save_failed=None, allow_access_levels=None):
4367+ def __init__(
4368+ self,
4369+ service_root,
4370+ application_name=None,
4371+ consumer_name=None,
4372+ credential_save_failed=None,
4373+ allow_access_levels=None,
4374+ ):
4375 super(ShutUpAndTakeMyTokenAuthorizationEngine, self).__init__(
4376- service_root, application_name, consumer_name,
4377- credential_save_failed)
4378+ service_root, application_name, consumer_name, credential_save_failed
4379+ )
4380
4381
4382-def launchpad_login(pkg, service_root='production', version='devel'):
4383+def launchpad_login(pkg, service_root="production", version="devel"):
4384 """Log into Launchpad API with stored credentials."""
4385- creds_dir = os.path.expanduser(os.path.join('~', '.' + pkg))
4386+ creds_dir = os.path.expanduser(os.path.join("~", "." + pkg))
4387 if not os.path.exists(creds_dir):
4388 os.makedirs(creds_dir, 0o700)
4389 os.chmod(creds_dir, 0o700)
4390@@ -29,69 +37,83 @@ def launchpad_login(pkg, service_root='production', version='devel'):
4391 consumer_name = pkg
4392 return Launchpad.login_with(
4393 consumer_name=consumer_name,
4394- credentials_file=os.path.join(creds_dir, 'launchpad.credentials'),
4395+ credentials_file=os.path.join(creds_dir, "launchpad.credentials"),
4396 service_root=api_endpoint,
4397 version=version,
4398 authorization_engine=ShutUpAndTakeMyTokenAuthorizationEngine(
4399- service_root=api_endpoint,
4400- consumer_name=consumer_name,
4401+ service_root=api_endpoint, consumer_name=consumer_name
4402 ),
4403 )
4404
4405
4406-class LaunchpadLogin():
4407+class LaunchpadLogin:
4408 """Try to unify all Launchpad login"""
4409- def __init__(self, application_name='oem-scripts',
4410- service_root=None, launchpadlib_dir=None,
4411- version="devel", bot=False):
4412+
4413+ def __init__(
4414+ self,
4415+ application_name="oem-scripts",
4416+ service_root=None,
4417+ launchpadlib_dir=None,
4418+ version="devel",
4419+ bot=False,
4420+ ):
4421
4422 if launchpadlib_dir is None:
4423 launchpadlib_dir = os.path.join(os.environ["HOME"], ".launchpadlib/cache")
4424
4425 if service_root is None:
4426- if os.environ.get('LAUNCHPAD_API') == lookup_service_root('staging'):
4427- service_root = 'staging'
4428+ if os.environ.get("LAUNCHPAD_API") == lookup_service_root("staging"):
4429+ service_root = "staging"
4430 else:
4431- service_root = 'production'
4432+ service_root = "production"
4433
4434- oem_scripts_config_ini = os.path.join(os.environ["HOME"],
4435- ".config/oem-scripts/config.ini")
4436+ oem_scripts_config_ini = os.path.join(
4437+ os.environ["HOME"], ".config/oem-scripts/config.ini"
4438+ )
4439 launchpad_token = os.environ.get("LAUNCHPAD_TOKEN")
4440
4441 if bot:
4442 logging.info("Using oem-taipei-bot credentials")
4443- self.lp = launchpad_login('/', service_root)
4444+ self.lp = launchpad_login("/", service_root)
4445
4446 elif launchpad_token:
4447 if launchpad_token == "::":
4448 logging.info("Using anonymously login")
4449 self.lp = Launchpad.login_anonymously(application_name, service_root)
4450 elif ":" in launchpad_token:
4451- oauth_token, oauth_token_secret, oauth_consumer_key = launchpad_token.split(":", maxsplit=2)
4452- self.lp = Launchpad.login(oauth_consumer_key,
4453- oauth_token,
4454- oauth_token_secret,
4455- service_root=service_root,
4456- cache=launchpadlib_dir,
4457- version=version)
4458+ oauth_token, oauth_token_secret, oauth_consumer_key = launchpad_token.split(
4459+ ":", maxsplit=2
4460+ )
4461+ self.lp = Launchpad.login(
4462+ oauth_consumer_key,
4463+ oauth_token,
4464+ oauth_token_secret,
4465+ service_root=service_root,
4466+ cache=launchpadlib_dir,
4467+ version=version,
4468+ )
4469 else:
4470 logging.error(f"invalid LAUNCHPAD_TOKEN '{launchpad_token}'")
4471 exit(1)
4472
4473- elif os.environ.get('LAUNCHPAD_API') and os.path.exists(oem_scripts_config_ini):
4474+ elif os.environ.get("LAUNCHPAD_API") and os.path.exists(oem_scripts_config_ini):
4475 logging.info("Using oem-scripts oauth token")
4476 oem_scripts_config = ConfigParser()
4477 oem_scripts_config.read(oem_scripts_config_ini)
4478- config = oem_scripts_config['oem-scripts']
4479- self.lp = Launchpad.login(config['oauth_consumer_key'],
4480- config['oauth_token'],
4481- config['oauth_token_secret'],
4482- service_root=service_root,
4483- cache=launchpadlib_dir,
4484- version=version)
4485+ config = oem_scripts_config["oem-scripts"]
4486+ self.lp = Launchpad.login(
4487+ config["oauth_consumer_key"],
4488+ config["oauth_token"],
4489+ config["oauth_token_secret"],
4490+ service_root=service_root,
4491+ cache=launchpadlib_dir,
4492+ version=version,
4493+ )
4494 else:
4495 logging.info("Using oem-scripts login")
4496- self.lp = Launchpad.login_with(application_name=application_name,
4497- service_root=service_root,
4498- launchpadlib_dir=launchpadlib_dir,
4499- version=version)
4500+ self.lp = Launchpad.login_with(
4501+ application_name=application_name,
4502+ service_root=service_root,
4503+ launchpadlib_dir=launchpadlib_dir,
4504+ version=version,
4505+ )
4506diff --git a/oem_scripts/logging.py b/oem_scripts/logging.py
4507index 5351f86..0571d3c 100644
4508--- a/oem_scripts/logging.py
4509+++ b/oem_scripts/logging.py
4510@@ -22,29 +22,36 @@ import sys
4511
4512
4513 def setup_logging(debug=False, quiet=False):
4514- logging.addLevelName(logging.DEBUG,
4515- "\033[1;96m%s\033[1;0m" %
4516- logging.getLevelName(logging.DEBUG))
4517- logging.addLevelName(logging.INFO,
4518- "\033[1;32m%s\033[1;0m" %
4519- logging.getLevelName(logging.INFO))
4520- logging.addLevelName(logging.WARNING,
4521- "\033[1;33m%s\033[1;0m" %
4522- logging.getLevelName(logging.WARNING))
4523- logging.addLevelName(logging.ERROR,
4524- "\033[1;31m%s\033[1;0m" %
4525- logging.getLevelName(logging.ERROR))
4526- logging.addLevelName(logging.CRITICAL,
4527- "\033[1;41m%s\033[1;0m" %
4528- logging.getLevelName(logging.CRITICAL))
4529+ logging.addLevelName(
4530+ logging.DEBUG, "\033[1;96m%s\033[1;0m" % logging.getLevelName(logging.DEBUG)
4531+ )
4532+ logging.addLevelName(
4533+ logging.INFO, "\033[1;32m%s\033[1;0m" % logging.getLevelName(logging.INFO)
4534+ )
4535+ logging.addLevelName(
4536+ logging.WARNING, "\033[1;33m%s\033[1;0m" % logging.getLevelName(logging.WARNING)
4537+ )
4538+ logging.addLevelName(
4539+ logging.ERROR, "\033[1;31m%s\033[1;0m" % logging.getLevelName(logging.ERROR)
4540+ )
4541+ logging.addLevelName(
4542+ logging.CRITICAL,
4543+ "\033[1;41m%s\033[1;0m" % logging.getLevelName(logging.CRITICAL),
4544+ )
4545 if debug:
4546- logging.basicConfig(format='<%(levelname)s> %(message)s',
4547- level=logging.DEBUG,
4548- handlers=[logging.StreamHandler(sys.stdout)])
4549+ logging.basicConfig(
4550+ format="<%(levelname)s> %(message)s",
4551+ level=logging.DEBUG,
4552+ handlers=[logging.StreamHandler(sys.stdout)],
4553+ )
4554 elif not quiet:
4555- logging.basicConfig(format='<%(levelname)s> %(message)s',
4556- level=logging.INFO,
4557- handlers=[logging.StreamHandler(sys.stdout)])
4558+ logging.basicConfig(
4559+ format="<%(levelname)s> %(message)s",
4560+ level=logging.INFO,
4561+ handlers=[logging.StreamHandler(sys.stdout)],
4562+ )
4563 else:
4564- logging.basicConfig(format='<%(levelname)s> %(message)s',
4565- handlers=[logging.StreamHandler(sys.stdout)])
4566+ logging.basicConfig(
4567+ format="<%(levelname)s> %(message)s",
4568+ handlers=[logging.StreamHandler(sys.stdout)],
4569+ )
4570diff --git a/pkg-list b/pkg-list
4571index e5700ac..c5eeca0 100755
4572--- a/pkg-list
4573+++ b/pkg-list
4574@@ -29,8 +29,9 @@ from logging import debug, error, critical, info, warning
4575 from urllib.parse import urljoin
4576
4577
4578-parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,
4579- epilog="""
4580+parser = argparse.ArgumentParser(
4581+ formatter_class=argparse.RawDescriptionHelpFormatter,
4582+ epilog="""
4583 examples:
4584 pkg-list ubuntu-desktop --recommends > ubuntu-desktop.list
4585 pkg-list dkms --exclude ubuntu-desktop.list > dkms.list
4586@@ -39,63 +40,74 @@ examples:
4587 pkg-list linux-generic --exclude all.list
4588 pkg-list linux-generic-hwe-20.04 --exclude all.list
4589 pkg-list linux-oem-20.04 --exclude all.list
4590- pkg-list linux-oem-20.04-edge --exclude all.list""")
4591-
4592-parser.add_argument("-d", "--debug",
4593- action="store_true",
4594- help="print debug messages")
4595-parser.add_argument("-l", "--long",
4596- action="store_true",
4597- help="print long list including the URL, MD5, SHA1 and SHA256.")
4598-parser.add_argument("--apt-dir",
4599- type=str,
4600- help="specify the dir for apt")
4601-parser.add_argument("--recommends",
4602- action="store_true",
4603- help="include recommends packages")
4604-parser.add_argument("--suggests",
4605- action="store_true",
4606- help="include suggests packages")
4607-parser.add_argument("--non-installed",
4608- action="store_true",
4609- help="only get non-installed packages per check current running environments")
4610-parser.add_argument("--fail-unavailable",
4611- action="store_true",
4612- help="Return error when any package is unavailable.")
4613-parser.add_argument("--exclude",
4614- metavar='pkg.list',
4615- type=argparse.FileType('r', encoding='UTF-8'),
4616- help="package names and versions to exclude.")
4617-parser.add_argument('pkgs',
4618- metavar='PKG_NAME',
4619- type=str, nargs='+',
4620- help='the names of Debian binary packages')
4621+ pkg-list linux-oem-20.04-edge --exclude all.list""",
4622+)
4623+
4624+parser.add_argument("-d", "--debug", action="store_true", help="print debug messages")
4625+parser.add_argument(
4626+ "-l",
4627+ "--long",
4628+ action="store_true",
4629+ help="print long list including the URL, MD5, SHA1 and SHA256.",
4630+)
4631+parser.add_argument("--apt-dir", type=str, help="specify the dir for apt")
4632+parser.add_argument(
4633+ "--recommends", action="store_true", help="include recommends packages"
4634+)
4635+parser.add_argument("--suggests", action="store_true", help="include suggests packages")
4636+parser.add_argument(
4637+ "--non-installed",
4638+ action="store_true",
4639+ help="only get non-installed packages per check current running environments",
4640+)
4641+parser.add_argument(
4642+ "--fail-unavailable",
4643+ action="store_true",
4644+ help="Return error when any package is unavailable.",
4645+)
4646+parser.add_argument(
4647+ "--exclude",
4648+ metavar="pkg.list",
4649+ type=argparse.FileType("r", encoding="UTF-8"),
4650+ help="package names and versions to exclude.",
4651+)
4652+parser.add_argument(
4653+ "pkgs",
4654+ metavar="PKG_NAME",
4655+ type=str,
4656+ nargs="+",
4657+ help="the names of Debian binary packages",
4658+)
4659
4660 args = parser.parse_args()
4661
4662-logging.addLevelName(logging.DEBUG,
4663- "\033[1;96m%s\033[1;0m" %
4664- logging.getLevelName(logging.DEBUG))
4665-logging.addLevelName(logging.INFO,
4666- "\033[1;32m%s\033[1;0m" %
4667- logging.getLevelName(logging.INFO))
4668-logging.addLevelName(logging.WARNING,
4669- "\033[1;33m%s\033[1;0m" %
4670- logging.getLevelName(logging.WARNING))
4671-logging.addLevelName(logging.ERROR,
4672- "\033[1;31m%s\033[1;0m" %
4673- logging.getLevelName(logging.ERROR))
4674-logging.addLevelName(logging.CRITICAL,
4675- "\033[1;41m%s\033[1;0m" %
4676- logging.getLevelName(logging.CRITICAL))
4677+logging.addLevelName(
4678+ logging.DEBUG, "\033[1;96m%s\033[1;0m" % logging.getLevelName(logging.DEBUG)
4679+)
4680+logging.addLevelName(
4681+ logging.INFO, "\033[1;32m%s\033[1;0m" % logging.getLevelName(logging.INFO)
4682+)
4683+logging.addLevelName(
4684+ logging.WARNING, "\033[1;33m%s\033[1;0m" % logging.getLevelName(logging.WARNING)
4685+)
4686+logging.addLevelName(
4687+ logging.ERROR, "\033[1;31m%s\033[1;0m" % logging.getLevelName(logging.ERROR)
4688+)
4689+logging.addLevelName(
4690+ logging.CRITICAL, "\033[1;41m%s\033[1;0m" % logging.getLevelName(logging.CRITICAL)
4691+)
4692
4693 if args.debug:
4694- logging.basicConfig(format='<%(levelname)s> %(message)s',
4695- level=logging.DEBUG,
4696- handlers=[logging.StreamHandler(sys.stdout)])
4697+ logging.basicConfig(
4698+ format="<%(levelname)s> %(message)s",
4699+ level=logging.DEBUG,
4700+ handlers=[logging.StreamHandler(sys.stdout)],
4701+ )
4702 else:
4703- logging.basicConfig(format='<%(levelname)s> %(message)s',
4704- handlers=[logging.StreamHandler(sys.stdout)])
4705+ logging.basicConfig(
4706+ format="<%(levelname)s> %(message)s",
4707+ handlers=[logging.StreamHandler(sys.stdout)],
4708+ )
4709
4710
4711 def _debug_pkg(pkg: str) -> None:
4712@@ -106,12 +118,18 @@ def _debug_pkg(pkg: str) -> None:
4713 debug(dir(pkg))
4714
4715 for attr in dir(pkg):
4716- if not attr.startswith('__'):
4717+ if not attr.startswith("__"):
4718 if not isinstance(pkg.__getattribute__(attr), types.BuiltinFunctionType):
4719 debug(f"{attr}: {pkg.__getattribute__(attr)}")
4720
4721
4722-def get_depends(pkg_name: str, depends_list: list, recommends: bool, suggests: bool, non_installed: bool) -> bool:
4723+def get_depends(
4724+ pkg_name: str,
4725+ depends_list: list,
4726+ recommends: bool,
4727+ suggests: bool,
4728+ non_installed: bool,
4729+) -> bool:
4730 """Recursively get all dependencies.
4731
4732 Args:
4733@@ -158,25 +176,34 @@ def get_depends(pkg_name: str, depends_list: list, recommends: bool, suggests: b
4734 if pkg_name in map(lambda x: x[0], depends_list):
4735 continue
4736
4737- if any(pkg_name == name and version.ver_str == ver for name, ver in exclude_list):
4738+ if any(
4739+ pkg_name == name and version.ver_str == ver for name, ver in exclude_list
4740+ ):
4741 break
4742
4743 if non_installed and pkg.current_ver == version:
4744 break
4745
4746 for pfile in version.file_list:
4747- if pfile[0].filename != '/var/lib/dpkg/status' and record.lookup(pfile):
4748- url = urljoin("http://" + pfile[0].site, 'ubuntu/' + record.filename)
4749+ if pfile[0].filename != "/var/lib/dpkg/status" and record.lookup(pfile):
4750+ url = urljoin("http://" + pfile[0].site, "ubuntu/" + record.filename)
4751 break
4752
4753 debug(f"{pkg_name} {version.ver_str} {pkg.architecture} {url}")
4754- item = (pkg_name, version.ver_str, url, record.hashes.find("MD5Sum"), record.hashes.find("SHA1"), record.hashes.find("SHA256"))
4755+ item = (
4756+ pkg_name,
4757+ version.ver_str,
4758+ url,
4759+ record.hashes.find("MD5Sum"),
4760+ record.hashes.find("SHA1"),
4761+ record.hashes.find("SHA256"),
4762+ )
4763 depends_list.append(item)
4764
4765- for target in ('PreDepends', 'Depends', 'Recommends', 'Suggests'):
4766- if target == 'Recommends' and not recommends:
4767+ for target in ("PreDepends", "Depends", "Recommends", "Suggests"):
4768+ if target == "Recommends" and not recommends:
4769 continue
4770- if target == 'Suggests' and not suggests:
4771+ if target == "Suggests" and not suggests:
4772 continue
4773 if target not in version.depends_list_str:
4774 continue
4775@@ -188,10 +215,16 @@ def get_depends(pkg_name: str, depends_list: list, recommends: bool, suggests: b
4776 if name in map(lambda x: x[0], depends_list):
4777 found = True
4778 break
4779- if arch == 'i386' and pkg.architecture == 'amd64':
4780+ if arch == "i386" and pkg.architecture == "amd64":
4781 name = name + ":i386"
4782 pkg = cache[name]
4783- found = get_depends(name, depends_list, recommends=recommends, suggests=suggests, non_installed=non_installed)
4784+ found = get_depends(
4785+ name,
4786+ depends_list,
4787+ recommends=recommends,
4788+ suggests=suggests,
4789+ non_installed=non_installed,
4790+ )
4791 if found:
4792 break
4793 if not found and args.fail_unavailable:
4794@@ -205,11 +238,13 @@ if args.apt_dir:
4795 if args.debug:
4796 old = apt_pkg.config.dump()
4797 apt_pkg.config.set("Dir", args.apt_dir)
4798- apt_pkg.config.set("Dir::State::status", os.path.join(args.apt_dir, "var/lib/dpkg/status"))
4799+ apt_pkg.config.set(
4800+ "Dir::State::status", os.path.join(args.apt_dir, "var/lib/dpkg/status")
4801+ )
4802 if args.debug:
4803 new = apt_pkg.config.dump()
4804 d = difflib.Differ()
4805- diff = d.compare(old.split('\n'), new.split('\n'))
4806+ diff = d.compare(old.split("\n"), new.split("\n"))
4807 for line in diff:
4808 debug(line.strip())
4809 apt_pkg.init_system()
4810@@ -225,12 +260,18 @@ exclude_list = []
4811
4812 if args.exclude:
4813 for line in args.exclude.readlines():
4814- (name, ver) = line.strip().split(' ')
4815+ (name, ver) = line.strip().split(" ")
4816 if not any(name == _name and ver == _ver for _name, _ver in exclude_list):
4817 exclude_list.append((name, ver))
4818
4819 for pkg in args.pkgs:
4820- get_depends(pkg, pkg_list, recommends=args.recommends, suggests=args.suggests, non_installed=args.non_installed)
4821+ get_depends(
4822+ pkg,
4823+ pkg_list,
4824+ recommends=args.recommends,
4825+ suggests=args.suggests,
4826+ non_installed=args.non_installed,
4827+ )
4828
4829 for pkg, ver, url, md5, sha1, sha256 in sorted(pkg_list):
4830 if args.long:
4831diff --git a/pkg-oem-meta b/pkg-oem-meta
4832index e5e3cf5..c924bfa 100755
4833--- a/pkg-oem-meta
4834+++ b/pkg-oem-meta
4835@@ -16,155 +16,164 @@ program_name = os.path.basename(sys.argv[0])
4836
4837 setup_logging()
4838
4839-if program_name == 'pkg-somerville-meta':
4840- codename = 'somerville'
4841+if program_name == "pkg-somerville-meta":
4842+ codename = "somerville"
4843 Codename = codename.title()
4844- brand = 'dell'
4845+ brand = "dell"
4846 parser = argparse.ArgumentParser(
4847 formatter_class=argparse.RawTextHelpFormatter,
4848 description=f"{Codename} platform meta package generator.",
4849 epilog=f"Ex. {program_name} --public-bug 1868254 -s focal -k"
4850- " oem -p three-eyed-raven 0962")
4851-elif program_name == 'pkg-stella-meta':
4852- codename = 'stella'
4853+ " oem -p three-eyed-raven 0962",
4854+ )
4855+elif program_name == "pkg-stella-meta":
4856+ codename = "stella"
4857 Codename = codename.title()
4858- brand = 'hp'
4859+ brand = "hp"
4860 parser = argparse.ArgumentParser(
4861 formatter_class=argparse.RawTextHelpFormatter,
4862 description=f"{Codename} platform meta package generator.",
4863- epilog=f"Ex. {program_name} -s focal -k oem"
4864- " -g cmit -p beedrill 8594")
4865-elif program_name == 'pkg-sutton-meta':
4866- codename = 'sutton'
4867+ epilog=f"Ex. {program_name} -s focal -k oem" " -g cmit -p beedrill 8594",
4868+ )
4869+elif program_name == "pkg-sutton-meta":
4870+ codename = "sutton"
4871 Codename = codename.title()
4872- brand = 'lenovo'
4873+ brand = "lenovo"
4874 parser = argparse.ArgumentParser(
4875 formatter_class=argparse.RawTextHelpFormatter,
4876 description=f"{Codename} platform meta package generator.",
4877- epilog=f"Ex. {program_name} -s focal -k oem"
4878- " -g bachman -p banaing S08")
4879+ epilog=f"Ex. {program_name} -s focal -k oem" " -g bachman -p banaing S08",
4880+ )
4881 else:
4882- error('This program can not be executed.')
4883+ error("This program can not be executed.")
4884 exit(1)
4885
4886-parser.add_argument('-k', '--kernel',
4887- choices=['linux-oem-20.04', 'linux-oem-20.04b', 'linux-oem-20.04c', 'linux-generic-hwe-20.04'],
4888- default='linux-oem-20.04',
4889- help="Specify the kernel meta. [linux-oem-20.04|linux-oem-20.04b|linux-oem-20.04c|linux-generic-hwe-20.04]")
4890+parser.add_argument(
4891+ "-k",
4892+ "--kernel",
4893+ choices=[
4894+ "linux-oem-20.04",
4895+ "linux-oem-20.04b",
4896+ "linux-oem-20.04c",
4897+ "linux-generic-hwe-20.04",
4898+ ],
4899+ default="linux-oem-20.04",
4900+ help="Specify the kernel meta. [linux-oem-20.04|linux-oem-20.04b|linux-oem-20.04c|linux-generic-hwe-20.04]",
4901+)
4902 info = UbuntuDistroInfo()
4903 try:
4904 # for bionic
4905- series = lsb_release.get_lsb_information()['CODENAME']
4906+ series = lsb_release.get_lsb_information()["CODENAME"]
4907 except AttributeError:
4908 # for focal
4909- series = lsb_release.get_os_release()['CODENAME']
4910-
4911-parser.add_argument('-s', '--series', choices=info.supported(), default=series,
4912- help=f"Ubuntu series, such as {series} by default.")
4913-parser.add_argument('--public-bug',
4914- help="Launchpad public bug number.",
4915- type=int)
4916-parser.add_argument('--private-bug',
4917- help="Launchpad private bug number.",
4918- type=int)
4919-
4920-if codename == 'somerville':
4921- parser.add_argument('bios_id', nargs='+', help="BIOS ID")
4922- parser.add_argument('-p', '--platform', help="platform tag", required=True)
4923-elif codename == 'stella':
4924- parser.add_argument('-g', '--group', help="OEM-group", required=True)
4925- parser.add_argument('-p', '--platform', help="platform-codename",
4926- required=True)
4927- parser.add_argument('sd_id', nargs='+',
4928- help="subsystem device ID, such as 0962")
4929-elif codename == 'sutton':
4930- parser.add_argument('-g', '--group', help="OEM-group", required=True)
4931- parser.add_argument('-p', '--platform', help="platform-codename",
4932- required=True)
4933- parser.add_argument('bios_ver', nargs='+',
4934- help="First three chars in bios version, "
4935- + "such as S08 or bvnLENOVO:bvrS08")
4936+ series = lsb_release.get_os_release()["CODENAME"]
4937+
4938+parser.add_argument(
4939+ "-s",
4940+ "--series",
4941+ choices=info.supported(),
4942+ default=series,
4943+ help=f"Ubuntu series, such as {series} by default.",
4944+)
4945+parser.add_argument("--public-bug", help="Launchpad public bug number.", type=int)
4946+parser.add_argument("--private-bug", help="Launchpad private bug number.", type=int)
4947+
4948+if codename == "somerville":
4949+ parser.add_argument("bios_id", nargs="+", help="BIOS ID")
4950+ parser.add_argument("-p", "--platform", help="platform tag", required=True)
4951+elif codename == "stella":
4952+ parser.add_argument("-g", "--group", help="OEM-group", required=True)
4953+ parser.add_argument("-p", "--platform", help="platform-codename", required=True)
4954+ parser.add_argument("sd_id", nargs="+", help="subsystem device ID, such as 0962")
4955+elif codename == "sutton":
4956+ parser.add_argument("-g", "--group", help="OEM-group", required=True)
4957+ parser.add_argument("-p", "--platform", help="platform-codename", required=True)
4958+ parser.add_argument(
4959+ "bios_ver",
4960+ nargs="+",
4961+ help="First three chars in bios version, " + "such as S08 or bvnLENOVO:bvrS08",
4962+ )
4963 args = parser.parse_args()
4964
4965 platform = args.platform.lower()
4966 Platform = platform.title()
4967 series = args.series
4968-versions = dict(zip(info.get_all(result='codename'),
4969- info.get_all(result='release')))
4970-version = versions[series].split(' ')[0]
4971+versions = dict(zip(info.get_all(result="codename"), info.get_all(result="release")))
4972+version = versions[series].split(" ")[0]
4973
4974 # Sanity check
4975-if codename == 'somerville':
4976- group = ''
4977+if codename == "somerville":
4978+ group = ""
4979 if args.platform.startswith("fossa-"):
4980- error('Please remove fossa- prefix from the platform name.')
4981+ error("Please remove fossa- prefix from the platform name.")
4982 exit(1)
4983 for bios_id in args.bios_id:
4984- if not re.match('[0-9a-fA-F]{4}$', bios_id):
4985- error('Invalid BIOS ID: {%s}' % bios_id)
4986+ if not re.match("[0-9a-fA-F]{4}$", bios_id):
4987+ error("Invalid BIOS ID: {%s}" % bios_id)
4988 exit(1)
4989- meta = 'oem-' + codename + '-' + platform + '-meta'
4990-elif codename == 'stella':
4991+ meta = "oem-" + codename + "-" + platform + "-meta"
4992+elif codename == "stella":
4993 group = args.group.lower()
4994 for sd_id in args.sd_id:
4995- if not re.match('[0-9a-fA-F]{4}$', sd_id):
4996- error('Invalid subsystem device ID: {%s}' % sd_id)
4997+ if not re.match("[0-9a-fA-F]{4}$", sd_id):
4998+ error("Invalid subsystem device ID: {%s}" % sd_id)
4999 exit(1)
5000- meta = 'oem-' + codename + '.' + group + '-' + platform + '-meta'
The diff has been truncated for viewing.

Subscribers

People subscribed via source and target branches