Merge ~fourdollars/pc-enablement/+git/oem-scripts:master into ~oem-solutions-engineers/pc-enablement/+git/oem-scripts:master

Proposed by Shih-Yuan Lee
Status: Merged
Approved by: Shih-Yuan Lee
Approved revision: 633af21b9daeea13ce683c10089d4db0efcda4bd
Merged at revision: f08dc80657cb9ef6f23ee18eea7cdc3076a7cc44
Proposed branch: ~fourdollars/pc-enablement/+git/oem-scripts:master
Merge into: ~oem-solutions-engineers/pc-enablement/+git/oem-scripts:master
Diff against target: 5933 lines (+2255/-1371)
22 files modified
bug-bind.py (+53/-30)
copyPackage.py (+23/-18)
debian/changelog (+19/-0)
debian/control (+1/-1)
debian/rules (+1/-1)
dev/null (+0/-16)
get-oem-auth-token (+16/-14)
get-oemshare-auth-token (+2/-2)
lp-bug (+143/-99)
mir-bug (+455/-224)
oem-getiso (+96/-73)
oem-meta-packages (+836/-457)
oem_scripts/LaunchpadLogin.py (+61/-39)
oem_scripts/logging.py (+30/-23)
pkg-list (+109/-68)
pkg-oem-meta (+235/-189)
rename-everything.py (+89/-60)
review-merge-proposal (+20/-6)
setup.py (+43/-42)
tests/test_black (+3/-0)
tests/test_bugbind.py (+12/-7)
tests/test_flake8 (+8/-2)
Reviewer Review Type Date Requested Status
Bin Li Approve
Review via email: mp+404053@code.launchpad.net

Description of the change

There is no functional change.
It is just to refactor the Python checking by flake8 and black.

To post a comment you must log in.
Revision history for this message
Bin Li (binli) wrote :

LGTM.

review: Approve
Revision history for this message
Shih-Yuan Lee (fourdollars) wrote :

[BOT]
$ cat oem-scripts-0.98-633af21-in-docker-focal-summary.log
autopkgtest-collect-credentials PASS
autopkgtest-oem-scripts-auto PASS
pkg-somerville-meta PASS
pkg-stella-meta PASS
pkg-sutton-meta PASS
bug-bind PASS
get-private-ppa PASS
jq-lp PASS
launchpad-api PASS
lp-bug PASS
oem-meta-packages PASS
pkg-list PASS
review-merge-proposal PASS
run-autopkgtest PASS
setup-apt-dir PASS
mir-bug SKIP exit status 77 and marked as skippable
git-url-insteadof-setting PASS
recovery-from-iso.sh PASS
mir-bug-verification PASS
https://paste.ubuntu.com/p/g3wxnW9kXF/ oem-scripts-0.98-633af21-in-docker-focal-complete.log

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
diff --git a/bug-bind.py b/bug-bind.py
index 16056b6..60a9db0 100755
--- a/bug-bind.py
+++ b/bug-bind.py
@@ -9,18 +9,19 @@ import re
9import lazr.restfulclient.resource9import lazr.restfulclient.resource
10from oem_scripts.LaunchpadLogin import LaunchpadLogin10from oem_scripts.LaunchpadLogin import LaunchpadLogin
1111
12HWE_PUBLIC_PROJECT = 'hwe-next'12HWE_PUBLIC_PROJECT = "hwe-next"
13OEM_PUBLIC_PROJECT = 'oem-priority'13OEM_PUBLIC_PROJECT = "oem-priority"
1414
15lp = None15lp = None
16log = logging.getLogger('bug-bind-logger')16log = logging.getLogger("bug-bind-logger")
17log.setLevel(logging.DEBUG)17log.setLevel(logging.DEBUG)
18logging.basicConfig(format='%(levelname)s %(asctime)s - %(message)s',18logging.basicConfig(
19 datefmt='%m/%d/%Y %I:%M:%S %p')19 format="%(levelname)s %(asctime)s - %(message)s", datefmt="%m/%d/%Y %I:%M:%S %p"
20)
2021
2122
22def link_bugs(public_bugnum, privates, ihv):23def link_bugs(public_bugnum, privates, ihv):
23 assert(public_bugnum.isdigit())24 assert public_bugnum.isdigit()
24 login = LaunchpadLogin()25 login = LaunchpadLogin()
25 lp = login.lp26 lp = login.lp
26 pub_bug = lp.bugs[public_bugnum]27 pub_bug = lp.bugs[public_bugnum]
@@ -29,7 +30,7 @@ def link_bugs(public_bugnum, privates, ihv):
2930
30 # Add X-HWE-Bug: tag to description.31 # Add X-HWE-Bug: tag to description.
31 for priv in privates:32 for priv in privates:
32 assert(priv.isdigit())33 assert priv.isdigit()
33 bug = lp.bugs[priv]34 bug = lp.bugs[priv]
3435
35 if re.search(tag, bug.description) is None:36 if re.search(tag, bug.description) is None:
@@ -41,30 +42,35 @@ def link_bugs(public_bugnum, privates, ihv):
4142
42 if ihv == "hwe":43 if ihv == "hwe":
43 hwe_next = lp.projects[HWE_PUBLIC_PROJECT]44 hwe_next = lp.projects[HWE_PUBLIC_PROJECT]
44 sub_url = "%s~%s" % (lp._root_uri, 'canonical-hwe-team')45 sub_url = "%s~%s" % (lp._root_uri, "canonical-hwe-team")
45 pub_bug.subscribe(person=sub_url)46 pub_bug.subscribe(person=sub_url)
46 remote_bug_tag(pub_bug, 'hwe-needs-public-bug')47 remote_bug_tag(pub_bug, "hwe-needs-public-bug")
47 elif ihv == "swe":48 elif ihv == "swe":
48 hwe_next = lp.projects[OEM_PUBLIC_PROJECT]49 hwe_next = lp.projects[OEM_PUBLIC_PROJECT]
49 sub_url = "%s~%s" % (lp._root_uri, 'oem-solutions-engineers')50 sub_url = "%s~%s" % (lp._root_uri, "oem-solutions-engineers")
50 pub_bug.subscribe(person=sub_url)51 pub_bug.subscribe(person=sub_url)
51 remote_bug_tag(pub_bug, 'swe-needs-public-bug')52 remote_bug_tag(pub_bug, "swe-needs-public-bug")
52 else:53 else:
53 if lp.projects[ihv]:54 if lp.projects[ihv]:
54 hwe_next = lp.projects[ihv]55 hwe_next = lp.projects[ihv]
55 remote_bug_tag(pub_bug, 'hwe-needs-public-bug')56 remote_bug_tag(pub_bug, "hwe-needs-public-bug")
56 else:57 else:
57 log.error('Project ' + ihv + ' not defined')58 log.error("Project " + ihv + " not defined")
5859
59 add_bug_tags(pub_bug, ['originate-from-' + str(bug.id),60 add_bug_tags(
60 bug.bug_tasks_collection[0].bug_target_name, # OEM codename61 pub_bug,
61 'oem-priority'])62 [
63 "originate-from-" + str(bug.id),
64 bug.bug_tasks_collection[0].bug_target_name, # OEM codename
65 "oem-priority",
66 ],
67 )
6268
63 add_bug_task(pub_bug, hwe_next)69 add_bug_task(pub_bug, hwe_next)
6470
6571
66def link_priv_bugs(main_bugnum, privates, ihv):72def link_priv_bugs(main_bugnum, privates, ihv):
67 assert(main_bugnum.isdigit())73 assert main_bugnum.isdigit()
68 login = LaunchpadLogin()74 login = LaunchpadLogin()
69 lp = login.lp75 lp = login.lp
70 main_bug = lp.bugs[main_bugnum]76 main_bug = lp.bugs[main_bugnum]
@@ -73,7 +79,7 @@ def link_priv_bugs(main_bugnum, privates, ihv):
7379
74 # Add X-HWE-Bug: tag to description.80 # Add X-HWE-Bug: tag to description.
75 for priv in privates:81 for priv in privates:
76 assert(priv.isdigit())82 assert priv.isdigit()
77 bug = lp.bugs[priv]83 bug = lp.bugs[priv]
7884
79 if re.search(tag, bug.description) is None:85 if re.search(tag, bug.description) is None:
@@ -83,20 +89,20 @@ def link_priv_bugs(main_bugnum, privates, ihv):
83 else:89 else:
84 log.warning("Bug already linked to main bug " + tag)90 log.warning("Bug already linked to main bug " + tag)
8591
86 add_bug_tags(main_bug, ['originate-from-' + str(bug.id)])92 add_bug_tags(main_bug, ["originate-from-" + str(bug.id)])
8793
8894
89def add_bug_task(bug, bug_task):95def add_bug_task(bug, bug_task):
90 assert(type(bug_task) == lazr.restfulclient.resource.Entry)96 assert type(bug_task) == lazr.restfulclient.resource.Entry
9197
92 # Check if already have the requested98 # Check if already have the requested
93 for i in bug.bug_tasks:99 for i in bug.bug_tasks:
94 if bug_task.name == i.bug_target_name:100 if bug_task.name == i.bug_target_name:
95 log.warning('Also-affects on {} already complete.'.format(bug_task))101 log.warning("Also-affects on {} already complete.".format(bug_task))
96 return102 return
97 bug.addTask(target=bug_task)103 bug.addTask(target=bug_task)
98 bug.lp_save()104 bug.lp_save()
99 log.info('Also-affects on {} successful.'.format(bug_task))105 log.info("Also-affects on {} successful.".format(bug_task))
100106
101107
102def remote_bug_tag(bug, tag):108def remote_bug_tag(bug, tag):
@@ -110,7 +116,7 @@ def remote_bug_tag(bug, tag):
110116
111def add_bug_tags(bug, tags):117def add_bug_tags(bug, tags):
112 """ add tags to the bug. """118 """ add tags to the bug. """
113 log.info('Add tags {} to bug {}'.format(tags, bug.web_link))119 log.info("Add tags {} to bug {}".format(tags, bug.web_link))
114 new_tags = []120 new_tags = []
115 for tag_to_add in tags:121 for tag_to_add in tags:
116 if tag_to_add not in bug.tags:122 if tag_to_add not in bug.tags:
@@ -119,7 +125,7 @@ def add_bug_tags(bug, tags):
119 bug.lp_save()125 bug.lp_save()
120126
121127
122if __name__ == '__main__':128if __name__ == "__main__":
123 description = """bind private bugs with pubilc bug129 description = """bind private bugs with pubilc bug
124bud-bind -p bugnumber private_bugnumber1 private_bugnumber2"""130bud-bind -p bugnumber private_bugnumber1 private_bugnumber2"""
125 help = """The expected live cycle of an oem-priority bug is:131 help = """The expected live cycle of an oem-priority bug is:
@@ -127,11 +133,26 @@ bud-bind -p bugnumber private_bugnumber1 private_bugnumber2"""
127 2. SWE/HWE manually create a public bug.133 2. SWE/HWE manually create a public bug.
128 3. Use bug-bind to bind public and private bug."""134 3. Use bug-bind to bind public and private bug."""
129135
130 parser = argparse.ArgumentParser(description=description, epilog=help, formatter_class=argparse.RawDescriptionHelpFormatter)136 parser = argparse.ArgumentParser(
131 parser.add_argument('-m', '--main', help='main bug for private bugs')137 description=description,
132 parser.add_argument('-p', '--public', help='The public bug number')138 epilog=help,
133 parser.add_argument('-i', '--ihv', help='Launchpad project name for IHV\nExpecting "swe", "hwe", "intel", "amd", "nvidia", "lsi", "emulex"', default='swe')139 formatter_class=argparse.RawDescriptionHelpFormatter,
134 parser.add_argument('-v', '--vebose', help='shows debug messages', action='store_true', default=False)140 )
141 parser.add_argument("-m", "--main", help="main bug for private bugs")
142 parser.add_argument("-p", "--public", help="The public bug number")
143 parser.add_argument(
144 "-i",
145 "--ihv",
146 help='Launchpad project name for IHV\nExpecting "swe", "hwe", "intel", "amd", "nvidia", "lsi", "emulex"',
147 default="swe",
148 )
149 parser.add_argument(
150 "-v",
151 "--vebose",
152 help="shows debug messages",
153 action="store_true",
154 default=False,
155 )
135 # TODO156 # TODO
136 # parser.add_argument('-c', '--clean', help='unlnk the bug between public and private', action='store_true', default=False)157 # parser.add_argument('-c', '--clean', help='unlnk the bug between public and private', action='store_true', default=False)
137158
@@ -139,7 +160,9 @@ bud-bind -p bugnumber private_bugnumber1 private_bugnumber2"""
139 if args.vebose:160 if args.vebose:
140 log.setLevel(logging.DEBUG)161 log.setLevel(logging.DEBUG)
141 if args.ihv not in ["swe", "hwe", "intel", "amd", "nvidia", "lsi", "emulex"]:162 if args.ihv not in ["swe", "hwe", "intel", "amd", "nvidia", "lsi", "emulex"]:
142 raise Exception('Expecting "swe", "hwe", "intel", "amd", "nvidia", "lsi", "emulex" for ihv')163 raise Exception(
164 'Expecting "swe", "hwe", "intel", "amd", "nvidia", "lsi", "emulex" for ihv'
165 )
143 if len(private_bugs) == 0:166 if len(private_bugs) == 0:
144 parser.error("must provide private bug numbers.")167 parser.error("must provide private bug numbers.")
145 if args.main:168 if args.main:
diff --git a/copyPackage.py b/copyPackage.py
index 5358674..c6c0b5c 100755
--- a/copyPackage.py
+++ b/copyPackage.py
@@ -1,7 +1,8 @@
1#!/usr/bin/python1#!/usr/bin/python
2import sys2import sys
3from launchpadlib.launchpad import Launchpad3from launchpadlib.launchpad import Launchpad
4launchpad = Launchpad.login_with('test', "production")4
5launchpad = Launchpad.login_with("test", "production")
56
67
7def getValueWithDefault(prompt, default):8def getValueWithDefault(prompt, default):
@@ -10,8 +11,9 @@ def getValueWithDefault(prompt, default):
10 return result and result or default11 return result and result or default
1112
1213
13from_pocket = getValueWithDefault("From Pocket (Proposed|Updates|Release...)?",14from_pocket = getValueWithDefault(
14 "Proposed")15 "From Pocket (Proposed|Updates|Release...)?", "Proposed"
16)
1517
16team = None18team = None
17while not team:19while not team:
@@ -29,8 +31,7 @@ while not ppa:
29 except e:31 except e:
30 print("Invalid ppa name")32 print("Invalid ppa name")
3133
32to_pocket = getValueWithDefault("To Pocket (Proposed|Updates|Release...)?",34to_pocket = getValueWithDefault("To Pocket (Proposed|Updates|Release...)?", "Release")
33 "Release")
34to_series = getValueWithDefault("To Series?", "precise")35to_series = getValueWithDefault("To Series?", "precise")
3536
36# Get link to ubuntu archive37# Get link to ubuntu archive
@@ -42,33 +43,37 @@ while True:
4243
43 # View packages in ubuntu archive44 # View packages in ubuntu archive
44 pkgs = archive.getPublishedSources(45 pkgs = archive.getPublishedSources(
45 source_name=package_name, pocket=from_pocket, status="Published")46 source_name=package_name, pocket=from_pocket, status="Published"
47 )
4648
47 while True:49 while True:
48 print("\n----")50 print("\n----")
49 names = [p.display_name for p in pkgs]51 names = [p.display_name for p in pkgs]
50 for i, name in enumerate(names):52 for i, name in enumerate(names):
51 print " %d: %s" % (i, name)53 print(" %d: %s" % (i, name))
52 print("----\n")54 print("----\n")
53 i = raw_input("Enter pkg to transfer (0..%d/[Q]uit/[a]nother)> "55 i = raw_input(
54 % (len(names) - 1))56 "Enter pkg to transfer (0..%d/[Q]uit/[a]nother)> " % (len(names) - 1)
57 )
55 try:58 try:
56 pkg = pkgs[int(i)]59 pkg = pkgs[int(i)]
5760
58 print("Ready to copy package %s" % pkg.display_name)61 print("Ready to copy package %s" % pkg.display_name)
59 if raw_input("Confirm: [Y/n]").lower()[:1] != 'n':62 if raw_input("Confirm: [Y/n]").lower()[:1] != "n":
60 pass63 pass
61 ppa.syncSource(from_archive=archive,64 ppa.syncSource(
62 include_binaries=True,65 from_archive=archive,
63 source_name=pkg.display_name.split()[0],66 include_binaries=True,
64 to_pocket=to_pocket,67 source_name=pkg.display_name.split()[0],
65 to_series=to_series,68 to_pocket=to_pocket,
66 version=pkg.source_package_version)69 to_series=to_series,
70 version=pkg.source_package_version,
71 )
6772
68 except (ValueError, IndexError):73 except (ValueError, IndexError):
69 if i.lower()[:1] == 'q':74 if i.lower()[:1] == "q":
70 print("Quitting")75 print("Quitting")
71 sys.exit(0)76 sys.exit(0)
72 if i.lower()[:1] == 'a':77 if i.lower()[:1] == "a":
73 break78 break
74 print("invalid input\n")79 print("invalid input\n")
diff --git a/debian/changelog b/debian/changelog
index da72513..02a4499 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,3 +1,22 @@
1oem-scripts (0.98) UNRELEASED; urgency=medium
2
3 * Black all Python files.
4 * debian/control,
5 debian/rules: Use black to check all Python files.
6 * tests/test_flake8: Ignore W503 because it is incompatible with PEP 8.
7 * tests/test_flake8: Ignore E203 because flake8 doesn't deal with it well
8 and black will cover it.
9 * debian/rules,
10 tests/test_black: Move the black check script out of debian/rules so
11 people can use it to test Python files directly.
12 * debian/control,
13 debian/rules,
14 tests/test_flake8,
15 tests/test_pep8: Remove the pep8 check because flake8 will use
16 pycodestyle (formerly called pep8) to check.
17
18 -- Shih-Yuan Lee (FourDollars) <sylee@canonical.com> Wed, 09 Jun 2021 20:55:21 +0800
19
1oem-scripts (0.97) focal; urgency=medium20oem-scripts (0.97) focal; urgency=medium
221
3 * pkg-oem-meta: Add linux-oem-20.04c parameter support22 * pkg-oem-meta: Add linux-oem-20.04c parameter support
diff --git a/debian/control b/debian/control
index f9e9d76..b805787 100644
--- a/debian/control
+++ b/debian/control
@@ -2,10 +2,10 @@ Source: oem-scripts
2Section: admin2Section: admin
3Priority: optional3Priority: optional
4Build-Depends:4Build-Depends:
5 black,
5 debhelper (>=11),6 debhelper (>=11),
6 dh-python,7 dh-python,
7 flake8,8 flake8,
8 pep8,
9 python3-all,9 python3-all,
10 python3-debian,10 python3-debian,
11 python3-launchpadlib,11 python3-launchpadlib,
diff --git a/debian/rules b/debian/rules
index a739670..98284ec 100755
--- a/debian/rules
+++ b/debian/rules
@@ -5,6 +5,6 @@
5 dh $@ --with python3 --buildsystem=pybuild5 dh $@ --with python3 --buildsystem=pybuild
66
7override_dh_auto_test:7override_dh_auto_test:
8 ./tests/test_black
8 ./tests/test_flake89 ./tests/test_flake8
9 ./tests/test_pep8
10 ./tests/test_shellcheck10 ./tests/test_shellcheck
diff --git a/get-oem-auth-token b/get-oem-auth-token
index d3e8428..2e8c679 100755
--- a/get-oem-auth-token
+++ b/get-oem-auth-token
@@ -23,7 +23,7 @@ def prompt_for_credentials():
23 """23 """
24 Return username and password collected from stdin.24 Return username and password collected from stdin.
25 """25 """
26 print("\rEmail: ", file=sys.stderr, end='')26 print("\rEmail: ", file=sys.stderr, end="")
27 username = input()27 username = input()
28 password = getpass.getpass(stream=sys.stderr)28 password = getpass.getpass(stream=sys.stderr)
29 return username, password29 return username, password
@@ -41,7 +41,7 @@ def prompt_for_code():
41 """41 """
42 Return code collected from stdin.42 Return code collected from stdin.
43 """43 """
44 print("\r2FA Code: ", file=sys.stderr, end='')44 print("\r2FA Code: ", file=sys.stderr, end="")
45 return input()45 return input()
4646
4747
@@ -52,14 +52,16 @@ def get_session_cookie(browser=get_browser(), args=None):
52 """52 """
5353
54 prop = {54 prop = {
55 'oem-ibs': {55 "oem-ibs": {
56 "name": "oem-ibs.canonical.com",56 "name": "oem-ibs.canonical.com",
57 "url": "https://oem-ibs.canonical.com/builds/",57 "url": "https://oem-ibs.canonical.com/builds/",
58 "session": "sessionid"},58 "session": "sessionid",
59 'oem-share': {59 },
60 "oem-share": {
60 "name": "oem-share.canonical.com",61 "name": "oem-share.canonical.com",
61 "url": "https://oem-share.canonical.com/oem/cesg-builds/",62 "url": "https://oem-share.canonical.com/oem/cesg-builds/",
62 "session": "pysid"}63 "session": "pysid",
64 },
63 }65 }
6466
65 bad_creds = False67 bad_creds = False
@@ -75,8 +77,7 @@ def get_session_cookie(browser=get_browser(), args=None):
75 return77 return
76 server = prop[args.server]78 server = prop[args.server]
7779
78 print("Authenticating %s with Ubuntu SSO..." % args.server,80 print("Authenticating %s with Ubuntu SSO..." % args.server, file=sys.stderr)
79 file=sys.stderr)
80 browser.open(server["url"])81 browser.open(server["url"])
81 # apachd2-openid82 # apachd2-openid
82 try:83 try:
@@ -144,15 +145,16 @@ def get_session_cookie(browser=get_browser(), args=None):
144145
145def main():146def main():
146 parser = argparse.ArgumentParser(147 parser = argparse.ArgumentParser(
147 description='Retrieve Canonical internal websites session ID',148 description="Retrieve Canonical internal websites session ID",
148 formatter_class=argparse.ArgumentDefaultsHelpFormatter)149 formatter_class=argparse.ArgumentDefaultsHelpFormatter,
149 parser.add_argument("server",150 )
150 help="Specify server identifier [oem-ibs|oem-share]")151 parser.add_argument("server", help="Specify server identifier [oem-ibs|oem-share]")
151 parser.add_argument("-u", "--username", help="Specify user's email")152 parser.add_argument("-u", "--username", help="Specify user's email")
152 parser.add_argument("-p", "--password", help="Specify password")153 parser.add_argument("-p", "--password", help="Specify password")
153 parser.add_argument("-c", "--code", help="Specify 2-factor code")154 parser.add_argument("-c", "--code", help="Specify 2-factor code")
154 parser.add_argument("-r", "--retry", help="Sepcify authentication retry",155 parser.add_argument(
155 type=int, default=3)156 "-r", "--retry", help="Sepcify authentication retry", type=int, default=3
157 )
156 args = parser.parse_args()158 args = parser.parse_args()
157 result = 1159 result = 1
158 try:160 try:
diff --git a/get-oemshare-auth-token b/get-oemshare-auth-token
index 7e1379b..182649f 100755
--- a/get-oemshare-auth-token
+++ b/get-oemshare-auth-token
@@ -18,7 +18,7 @@ def prompt_for_credentials():
18 """18 """
19 Return username and password collected from stdin.19 Return username and password collected from stdin.
20 """20 """
21 print("\rEmail: ", file=sys.stderr, end='')21 print("\rEmail: ", file=sys.stderr, end="")
22 username = input()22 username = input()
23 password = getpass.getpass(stream=sys.stderr)23 password = getpass.getpass(stream=sys.stderr)
24 return username, password24 return username, password
@@ -28,7 +28,7 @@ def prompt_for_code():
28 """28 """
29 Return code collected from stdin.29 Return code collected from stdin.
30 """30 """
31 print("\r2FA Code: ", file=sys.stderr, end='')31 print("\r2FA Code: ", file=sys.stderr, end="")
32 return input()32 return input()
3333
3434
diff --git a/lp-bug b/lp-bug
index 6bfb087..e76abb2 100755
--- a/lp-bug
+++ b/lp-bug
@@ -30,77 +30,94 @@ from logging import debug, warning, info, critical
30from oem_scripts.LaunchpadLogin import LaunchpadLogin30from oem_scripts.LaunchpadLogin import LaunchpadLogin
31from tempfile import TemporaryDirectory31from tempfile import TemporaryDirectory
3232
33parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,33parser = argparse.ArgumentParser(
34 epilog="""34 formatter_class=argparse.RawDescriptionHelpFormatter,
35 epilog="""
35examples:36examples:
36 lp-bug copy --output=target_bug_id SOURCE_BUG_ID37 lp-bug copy --output=target_bug_id SOURCE_BUG_ID
37 lp-bug cleanup BUG_ID38 lp-bug cleanup BUG_ID
38 lp-bug cqa-verify [BUG_ID]""")39 lp-bug cqa-verify [BUG_ID]""",
40)
3941
40parser.add_argument("-d", "--debug",42parser.add_argument("-d", "--debug", help="print debug messages", action="store_true")
41 help="print debug messages", action="store_true")43parser.add_argument(
42parser.add_argument("-q", "--quiet",44 "-q", "--quiet", help="Don't print info messages", action="store_true"
43 help="Don't print info messages", action="store_true")45)
4446
45subparsers = parser.add_subparsers(dest="subcommand")47subparsers = parser.add_subparsers(dest="subcommand")
4648
47copy = subparsers.add_parser('copy', help='[-h] [-o=targetBugID|--output=targetBugID] sourceBugID')49copy = subparsers.add_parser(
48copy.add_argument("-o", "--output",50 "copy", help="[-h] [-o=targetBugID|--output=targetBugID] sourceBugID"
49 help="Specify a file name to write the bug number.",51)
50 type=argparse.FileType('w', encoding='UTF-8'))52copy.add_argument(
51copy.add_argument("-t", "--target",53 "-o",
52 help="Specify the target project, 'oem-priority' by default.",54 "--output",
53 type=str)55 help="Specify a file name to write the bug number.",
54copy.add_argument("bugID",56 type=argparse.FileType("w", encoding="UTF-8"),
55 help="Specify the bug number on Launchpad to copy from.", type=int)57)
56copy.add_argument("--public",58copy.add_argument(
57 help="Make the bug public.", action="store_true")59 "-t",
5860 "--target",
59cleanup = subparsers.add_parser('cleanup', help='[-h] [--yes] bugID')61 help="Specify the target project, 'oem-priority' by default.",
60cleanup.add_argument("bugID",62 type=str,
61 help="Specify the bug number on Launchpad to clean up.", type=int)63)
62cleanup.add_argument("--yes",64copy.add_argument(
63 help="Say yes for all prompts.", action="store_true")65 "bugID", help="Specify the bug number on Launchpad to copy from.", type=int
6466)
65cqa_verify = subparsers.add_parser('cqa-verify', help='[-h] [--yes] [--dry-run] [bugID]',67copy.add_argument("--public", help="Make the bug public.", action="store_true")
66 formatter_class=argparse.RawDescriptionHelpFormatter,68
67 epilog="""69cleanup = subparsers.add_parser("cleanup", help="[-h] [--yes] bugID")
68The 'cqa-verify' subcommand will check the versions in the production archive automatically.""")70cleanup.add_argument(
69cqa_verify.add_argument("--yes",71 "bugID", help="Specify the bug number on Launchpad to clean up.", type=int
70 help="Say yes for all prompts.", action="store_true")72)
71cqa_verify.add_argument("--dry-run",73cleanup.add_argument("--yes", help="Say yes for all prompts.", action="store_true")
72 help="Dry run the process.", action="store_true")74
73cqa_verify.add_argument('bugID', nargs='?', type=int)75cqa_verify = subparsers.add_parser(
76 "cqa-verify",
77 help="[-h] [--yes] [--dry-run] [bugID]",
78 formatter_class=argparse.RawDescriptionHelpFormatter,
79 epilog="""
80The 'cqa-verify' subcommand will check the versions in the production archive automatically.""",
81)
82cqa_verify.add_argument("--yes", help="Say yes for all prompts.", action="store_true")
83cqa_verify.add_argument("--dry-run", help="Dry run the process.", action="store_true")
84cqa_verify.add_argument("bugID", nargs="?", type=int)
7485
75args = parser.parse_args()86args = parser.parse_args()
7687
77logging.addLevelName(logging.DEBUG,88logging.addLevelName(
78 "\033[1;96m%s\033[1;0m" %89 logging.DEBUG, "\033[1;96m%s\033[1;0m" % logging.getLevelName(logging.DEBUG)
79 logging.getLevelName(logging.DEBUG))90)
80logging.addLevelName(logging.INFO,91logging.addLevelName(
81 "\033[1;32m%s\033[1;0m" %92 logging.INFO, "\033[1;32m%s\033[1;0m" % logging.getLevelName(logging.INFO)
82 logging.getLevelName(logging.INFO))93)
83logging.addLevelName(logging.WARNING,94logging.addLevelName(
84 "\033[1;33m%s\033[1;0m" %95 logging.WARNING, "\033[1;33m%s\033[1;0m" % logging.getLevelName(logging.WARNING)
85 logging.getLevelName(logging.WARNING))96)
86logging.addLevelName(logging.ERROR,97logging.addLevelName(
87 "\033[1;31m%s\033[1;0m" %98 logging.ERROR, "\033[1;31m%s\033[1;0m" % logging.getLevelName(logging.ERROR)
88 logging.getLevelName(logging.ERROR))99)
89logging.addLevelName(logging.CRITICAL,100logging.addLevelName(
90 "\033[1;41m%s\033[1;0m" %101 logging.CRITICAL, "\033[1;41m%s\033[1;0m" % logging.getLevelName(logging.CRITICAL)
91 logging.getLevelName(logging.CRITICAL))102)
92103
93if args.debug:104if args.debug:
94 logging.basicConfig(format='<%(levelname)s> %(message)s',105 logging.basicConfig(
95 level=logging.DEBUG,106 format="<%(levelname)s> %(message)s",
96 handlers=[logging.StreamHandler(sys.stdout)])107 level=logging.DEBUG,
108 handlers=[logging.StreamHandler(sys.stdout)],
109 )
97elif not args.quiet:110elif not args.quiet:
98 logging.basicConfig(format='<%(levelname)s> %(message)s',111 logging.basicConfig(
99 level=logging.INFO,112 format="<%(levelname)s> %(message)s",
100 handlers=[logging.StreamHandler(sys.stdout)])113 level=logging.INFO,
114 handlers=[logging.StreamHandler(sys.stdout)],
115 )
101else:116else:
102 logging.basicConfig(format='<%(levelname)s> %(message)s',117 logging.basicConfig(
103 handlers=[logging.StreamHandler(sys.stdout)])118 format="<%(levelname)s> %(message)s",
119 handlers=[logging.StreamHandler(sys.stdout)],
120 )
104121
105122
106def _yes_or_ask(yes: bool, message: str) -> bool:123def _yes_or_ask(yes: bool, message: str) -> bool:
@@ -111,7 +128,7 @@ def _yes_or_ask(yes: bool, message: str) -> bool:
111 res = input(f"> \033[1;34m{message}\033[1;0m (y/n) ").lower()128 res = input(f"> \033[1;34m{message}\033[1;0m (y/n) ").lower()
112 if res not in {"y", "n"}:129 if res not in {"y", "n"}:
113 continue130 continue
114 if res == 'y':131 if res == "y":
115 return True132 return True
116 else:133 else:
117 return False134 return False
@@ -125,7 +142,7 @@ def copy_bug(lp, bug_number: int, target: str, public: bool, output=None) -> Non
125 project = lp.projects["oem-priority"]142 project = lp.projects["oem-priority"]
126143
127 if public:144 if public:
128 information_type = 'Public'145 information_type = "Public"
129 else:146 else:
130 information_type = bug.information_type147 information_type = bug.information_type
131148
@@ -134,7 +151,8 @@ def copy_bug(lp, bug_number: int, target: str, public: bool, output=None) -> Non
134 target=project,151 target=project,
135 title=bug.title,152 title=bug.title,
136 information_type=information_type,153 information_type=information_type,
137 tags=bug.tags)154 tags=bug.tags,
155 )
138 info(f'LP: #{new_bug.id} - "{new_bug.title}" is created. {new_bug.web_link}')156 info(f'LP: #{new_bug.id} - "{new_bug.title}" is created. {new_bug.web_link}')
139 if output:157 if output:
140 output.write(f"{new_bug.id}\n")158 output.write(f"{new_bug.id}\n")
@@ -142,7 +160,10 @@ def copy_bug(lp, bug_number: int, target: str, public: bool, output=None) -> Non
142160
143def cleanup_bug(lp, bug_number: int, yes: bool) -> None:161def cleanup_bug(lp, bug_number: int, yes: bool) -> None:
144 bug = lp.bugs[bug_number]162 bug = lp.bugs[bug_number]
145 if not _yes_or_ask(yes, f'Do you want to cleanup all information on LP: #{bug_number} - "{bug.title}"? {bug.web_link}'):163 if not _yes_or_ask(
164 yes,
165 f'Do you want to cleanup all information on LP: #{bug_number} - "{bug.title}"? {bug.web_link}',
166 ):
146 return167 return
147168
148 if bug.title != "null":169 if bug.title != "null":
@@ -159,14 +180,17 @@ def cleanup_bug(lp, bug_number: int, yes: bool) -> None:
159 found = False180 found = False
160181
161 for bug_task in bug.bug_tasks:182 for bug_task in bug.bug_tasks:
162 if bug_task.bug_target_name == 'null-and-void':183 if bug_task.bug_target_name == "null-and-void":
163 found = True184 found = True
164185
165 if not found and bug.information_type == 'Public':186 if not found and bug.information_type == "Public":
166 bug.addTask(target=lp.projects["null-and-void"])187 bug.addTask(target=lp.projects["null-and-void"])
167188
168 for bug_task in bug.bug_tasks:189 for bug_task in bug.bug_tasks:
169 if bug_task.bug_target_name != 'null-and-void' and bug.information_type == 'Public':190 if (
191 bug_task.bug_target_name != "null-and-void"
192 and bug.information_type == "Public"
193 ):
170 try:194 try:
171 bug_task.lp_delete()195 bug_task.lp_delete()
172 except lazr.restfulclient.errors.BadRequest as e:196 except lazr.restfulclient.errors.BadRequest as e:
@@ -183,21 +207,27 @@ def cleanup_bug(lp, bug_number: int, yes: bool) -> None:
183 if subscription.canBeUnsubscribedByUser():207 if subscription.canBeUnsubscribedByUser():
184 bug.unsubscribe(person=lp.people[subscription.person.name])208 bug.unsubscribe(person=lp.people[subscription.person.name])
185 else:209 else:
186 warning(f"{lp.me.name} doesn't have the permission to unsubscribe {subscription.person.name}.")210 warning(
211 f"{lp.me.name} doesn't have the permission to unsubscribe {subscription.person.name}."
212 )
187213
188 info(f'LP: #{bug.id} has been cleaned. {bug.web_link}')214 info(f"LP: #{bug.id} has been cleaned. {bug.web_link}")
189215
190216
191def _run_command(command: list or tuple, returncode=(0,), env=None, silent=False) -> (str, str, int):217def _run_command(
218 command: list or tuple, returncode=(0,), env=None, silent=False
219) -> (str, str, int):
192 if not silent:220 if not silent:
193 debug("$ " + " ".join(command))221 debug("$ " + " ".join(command))
194 proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env)222 proc = subprocess.Popen(
223 command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env
224 )
195 out, err = proc.communicate()225 out, err = proc.communicate()
196226
197 if out:227 if out:
198 out = out.decode('utf-8').strip()228 out = out.decode("utf-8").strip()
199 if err:229 if err:
200 err = err.decode('utf-8').strip()230 err = err.decode("utf-8").strip()
201231
202 if proc.returncode not in returncode:232 if proc.returncode not in returncode:
203 critical(f"return {proc.returncode}")233 critical(f"return {proc.returncode}")
@@ -216,44 +246,46 @@ def _run_command(command: list or tuple, returncode=(0,), env=None, silent=False
216 return (out, err, proc.returncode)246 return (out, err, proc.returncode)
217247
218248
219pattern = re.compile(r'(.*) \(==(.*)\)')249pattern = re.compile(r"(.*) \(==(.*)\)")
220250
221251
222def cloudberry_cqa_verified(lp, yes: bool, bugID: int) -> None:252def cloudberry_cqa_verified(lp, yes: bool, bugID: int) -> None:
223 cloudberry = lp.projects['cloudberry']253 cloudberry = lp.projects["cloudberry"]
224 # Only deal with those bugs with 'Fix Committed' and 'request of publish_package' in the title.254 # Only deal with those bugs with 'Fix Committed' and 'request of publish_package' in the title.
225 tasks = cloudberry.searchTasks(status=['Fix Committed'], search_text='request of publish_package')255 tasks = cloudberry.searchTasks(
256 status=["Fix Committed"], search_text="request of publish_package"
257 )
226 for task in tasks:258 for task in tasks:
227 bug = task.bug259 bug = task.bug
228 # Only deal with one bug id when it is provided.260 # Only deal with one bug id when it is provided.
229 if bugID and bug.id != bugID:261 if bugID and bug.id != bugID:
230 continue262 continue
231 # Only deal with those bugs with this tag.263 # Only deal with those bugs with this tag.
232 if 'cqa-verified-staging' not in bug.tags:264 if "cqa-verified-staging" not in bug.tags:
233 continue265 continue
234 info(f'LP: #{bug.id} "{bug.title}"\n{bug.description}')266 info(f'LP: #{bug.id} "{bug.title}"\n{bug.description}')
235 debug(bug.tags)267 debug(bug.tags)
236 multiple = False268 multiple = False
237 packages = []269 packages = []
238 prod_archive_line = ""270 prod_archive_line = ""
239 lines = bug.description.split('\n')271 lines = bug.description.split("\n")
240 # Parse the package list and the production archive in the bug description.272 # Parse the package list and the production archive in the bug description.
241 for idx, line in enumerate(lines):273 for idx, line in enumerate(lines):
242 if line.startswith('Package: '):274 if line.startswith("Package: "):
243 debug(line)275 debug(line)
244 if line.endswith(','):276 if line.endswith(","):
245 multiple = True277 multiple = True
246 packages.append(line[9:-1])278 packages.append(line[9:-1])
247 else:279 else:
248 packages = line[9:].split(',')280 packages = line[9:].split(",")
249 elif multiple is True:281 elif multiple is True:
250 debug(line)282 debug(line)
251 if not line.endswith(','):283 if not line.endswith(","):
252 multiple = False284 multiple = False
253 packages.append(line.strip())285 packages.append(line.strip())
254 else:286 else:
255 packages.append(line.strip()[:-1])287 packages.append(line.strip()[:-1])
256 elif 'production archive' in line:288 elif "production archive" in line:
257 prod_archive_line = lines[idx + 2]289 prod_archive_line = lines[idx + 2]
258 # Skip the bug when it found no production archive.290 # Skip the bug when it found no production archive.
259 if not prod_archive_line:291 if not prod_archive_line:
@@ -272,9 +304,9 @@ def cloudberry_cqa_verified(lp, yes: bool, bugID: int) -> None:
272 # Check if the production archive provided the packages and versions.304 # Check if the production archive provided the packages and versions.
273 with TemporaryDirectory() as tmpdir:305 with TemporaryDirectory() as tmpdir:
274 failed = False306 failed = False
275 fingerprint = 'F9FDA6BED73CDC22'307 fingerprint = "F9FDA6BED73CDC22"
276 series = ['focal', 'bionic', 'xenial']308 series = ["focal", "bionic", "xenial"]
277 codename = ''309 codename = ""
278 for item in series:310 for item in series:
279 if item in prod_archive_line:311 if item in prod_archive_line:
280 codename = item312 codename = item
@@ -284,23 +316,32 @@ def cloudberry_cqa_verified(lp, yes: bool, bugID: int) -> None:
284 continue316 continue
285 # Setup the temporary apt dir to include the production archive.317 # Setup the temporary apt dir to include the production archive.
286 output, _, returncode = _run_command(318 output, _, returncode = _run_command(
287 ['setup-apt-dir.sh',319 [
288 '-c', codename,320 "setup-apt-dir.sh",
289 '--disable-updates',321 "-c",
290 '--disable-backports',322 codename,
291 '--apt-dir', tmpdir,323 "--disable-updates",
292 '--extra-key', fingerprint,324 "--disable-backports",
293 '--extra-repo', prod_archive_line.replace("deb ", f"deb [signed-by={tmpdir}/{fingerprint}.pub] ")325 "--apt-dir",
294 ], returncode=(0, 100))326 tmpdir,
327 "--extra-key",
328 fingerprint,
329 "--extra-repo",
330 prod_archive_line.replace(
331 "deb ", f"deb [signed-by={tmpdir}/{fingerprint}.pub] "
332 ),
333 ],
334 returncode=(0, 100),
335 )
295 # Skip the bug when it found some error in the production archive.336 # Skip the bug when it found some error in the production archive.
296 if returncode == 100:337 if returncode == 100:
297 warning(output)338 warning(output)
298 continue339 continue
299 # Use the temporary apt dir to compare the package versions.340 # Use the temporary apt dir to compare the package versions.
300 for pkg, ver in packages:341 for pkg, ver in packages:
301 output, _, _ = _run_command(['pkg-list', '--apt-dir', tmpdir, pkg])342 output, _, _ = _run_command(["pkg-list", "--apt-dir", tmpdir, pkg])
302 for line in output.split('\n'):343 for line in output.split("\n"):
303 archive_pkg, archive_ver = line.split(' ')344 archive_pkg, archive_ver = line.split(" ")
304 if pkg == archive_pkg:345 if pkg == archive_pkg:
305 if apt_pkg.version_compare(archive_ver, ver) >= 0:346 if apt_pkg.version_compare(archive_ver, ver) >= 0:
306 print(f"{line} >= {ver}")347 print(f"{line} >= {ver}")
@@ -309,9 +350,12 @@ def cloudberry_cqa_verified(lp, yes: bool, bugID: int) -> None:
309 failed = True350 failed = True
310 # Tag "cqa-verified" if no failure.351 # Tag "cqa-verified" if no failure.
311 if not failed:352 if not failed:
312 if not args.dry_run and _yes_or_ask(yes, f'Would you like to tag "cqa-verified" for LP: #{bug.id} "{bug.title}"?'):353 if not args.dry_run and _yes_or_ask(
354 yes,
355 f'Would you like to tag "cqa-verified" for LP: #{bug.id} "{bug.title}"?',
356 ):
313 tags = bug.tags.copy()357 tags = bug.tags.copy()
314 tags.append('cqa-verified')358 tags.append("cqa-verified")
315 if f"oem-scripts-{oem_scripts.__version__}" not in tags:359 if f"oem-scripts-{oem_scripts.__version__}" not in tags:
316 tags.append(f"oem-scripts-{oem_scripts.__version__}")360 tags.append(f"oem-scripts-{oem_scripts.__version__}")
317 bug.tags = tags361 bug.tags = tags
@@ -322,11 +366,11 @@ if args.subcommand:
322 login = LaunchpadLogin()366 login = LaunchpadLogin()
323 lp = login.lp367 lp = login.lp
324368
325if args.subcommand == 'copy':369if args.subcommand == "copy":
326 copy_bug(lp, args.bugID, output=args.output, target=args.target, public=args.public)370 copy_bug(lp, args.bugID, output=args.output, target=args.target, public=args.public)
327elif args.subcommand == 'cleanup':371elif args.subcommand == "cleanup":
328 cleanup_bug(lp, args.bugID, args.yes)372 cleanup_bug(lp, args.bugID, args.yes)
329elif args.subcommand == 'cqa-verify':373elif args.subcommand == "cqa-verify":
330 cloudberry_cqa_verified(lp, args.yes, args.bugID)374 cloudberry_cqa_verified(lp, args.yes, args.bugID)
331else:375else:
332 parser.print_help()376 parser.print_help()
diff --git a/mir-bug b/mir-bug
index dee3a25..9683a3b 100755
--- a/mir-bug
+++ b/mir-bug
@@ -36,75 +36,108 @@ from oem_scripts.logging import setup_logging
36from string import Template36from string import Template
37from tempfile import TemporaryDirectory37from tempfile import TemporaryDirectory
3838
39SUBSCRIBER_LIST = ('oem-solutions-engineers', 'ubuntu-sponsors', 'ubuntu-desktop')39SUBSCRIBER_LIST = ("oem-solutions-engineers", "ubuntu-sponsors", "ubuntu-desktop")
40TAG_LIST = ('oem-meta-packages', 'oem-priority', f'oem-scripts-{oem_scripts.__version__:.2f}')40TAG_LIST = (
4141 "oem-meta-packages",
42parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,42 "oem-priority",
43 epilog="""43 f"oem-scripts-{oem_scripts.__version__:.2f}",
44)
45
46parser = argparse.ArgumentParser(
47 formatter_class=argparse.RawDescriptionHelpFormatter,
48 epilog="""
44examples:49examples:
45 mir-bug create sutton.newell ace \"ThinkPad X1 Carbon Gen 8\"50 mir-bug create sutton.newell ace \"ThinkPad X1 Carbon Gen 8\"
46 mir-bug check BUG_NUMBER51 mir-bug check BUG_NUMBER
47 mir-bug update BUG_NUMBER52 mir-bug update BUG_NUMBER
48 mir-bug collect oem-meta-mir-bugs.json""")53 mir-bug collect oem-meta-mir-bugs.json""",
54)
4955
50parser.add_argument("-d", "--debug",56parser.add_argument("-d", "--debug", help="print debug messages", action="store_true")
51 help="print debug messages", action="store_true")57parser.add_argument(
52parser.add_argument("-q", "--quiet",58 "-q", "--quiet", help="Don't print info messages", action="store_true"
53 help="Don't print info messages", action="store_true")59)
5460
55subparsers = parser.add_subparsers(dest="subcommand")61subparsers = parser.add_subparsers(dest="subcommand")
5662
57create = subparsers.add_parser('create', help='[-h] [-o=bugID|--output=bugID] oemCodename platformCodename deviceName')63create = subparsers.add_parser(
58create.add_argument("oemCodename",64 "create",
59 help="Such as somerville, stella, or sutton.simon")65 help="[-h] [-o=bugID|--output=bugID] oemCodename platformCodename deviceName",
60create.add_argument("platformCodename",66)
61 help="Name deined by PM, like ace.")67create.add_argument("oemCodename", help="Such as somerville, stella, or sutton.simon")
62create.add_argument("deviceName",68create.add_argument("platformCodename", help="Name deined by PM, like ace.")
63 help="ThinkPad X1 Carbon Gen 8")69create.add_argument("deviceName", help="ThinkPad X1 Carbon Gen 8")
64create.add_argument("-o", "--output",70create.add_argument(
65 help="Specify a file name to write the bug number.",71 "-o",
66 type=argparse.FileType('w', encoding='UTF-8'))72 "--output",
6773 help="Specify a file name to write the bug number.",
68update = subparsers.add_parser('update', help='[-h] [--ready] [--skip] [--tz=UTC-8] [--yes] bugNumber')74 type=argparse.FileType("w", encoding="UTF-8"),
69update.add_argument("bugNumber",75)
70 help="Specify the bug number on Launchpad to update.", type=int)76
71update.add_argument("--yes",77update = subparsers.add_parser(
72 help="Say yes for all prompts.", action="store_true")78 "update", help="[-h] [--ready] [--skip] [--tz=UTC-8] [--yes] bugNumber"
73update.add_argument("--skip",79)
74 help="Skip updating bootstrap branch of Git repository.", action="store_true")80update.add_argument(
75update.add_argument("--tz",81 "bugNumber", help="Specify the bug number on Launchpad to update.", type=int
76 help="Specify the value for TZ. (UTC-8 by default)", default="UTC-8")82)
77update.add_argument("--ready",83update.add_argument("--yes", help="Say yes for all prompts.", action="store_true")
78 action="store_true",84update.add_argument(
79 help="Update the bug to Fix Committed, also affects 'Ubuntu', and subscribe 'ubuntu-sponsors' and 'ubuntu-desktop'.")85 "--skip",
8086 help="Skip updating bootstrap branch of Git repository.",
81check = subparsers.add_parser('check', help='[-h] [--ready] [--skip] [--tz=UTC-8] bugNumber')87 action="store_true",
82check.add_argument("bugNumber",88)
83 help="Specify the bug number on Launchpad to do some sanity checks.", type=int)89update.add_argument(
84check.add_argument("--skip",90 "--tz", help="Specify the value for TZ. (UTC-8 by default)", default="UTC-8"
85 help="Skip checking oem branch of Git repository.", action="store_true")91)
86check.add_argument("--tz",92update.add_argument(
87 help="Specify the value for TZ. (UTC-8 by default)", default="UTC-8")93 "--ready",
88check.add_argument("--ready",94 action="store_true",
89 action="store_true",95 help="Update the bug to Fix Committed, also affects 'Ubuntu', and subscribe 'ubuntu-sponsors' and 'ubuntu-desktop'.",
90 help="Check if the bug is Fix Committed, also affects 'Ubuntu', and subscribe 'ubuntu-sponsors' and 'ubuntu-desktop'.")96)
9197
92collect = subparsers.add_parser('collect', help='[-h] [--ubuntu-certified] jsonFile')98check = subparsers.add_parser(
93collect.add_argument("json",99 "check", help="[-h] [--ready] [--skip] [--tz=UTC-8] bugNumber"
94 help="Specify the json file name to write.",100)
95 type=argparse.FileType('w', encoding='UTF-8'))101check.add_argument(
96collect.add_argument("--ubuntu-certified",102 "bugNumber",
97 action="store_true",103 help="Specify the bug number on Launchpad to do some sanity checks.",
98 help="Only collect those bugs with the 'ubuntu-certified' tag.")104 type=int,
99collect.add_argument("--verification-needed",105)
100 action="store_true",106check.add_argument(
101 help="Only collect those bugs with the 'verification-needed' tag.")107 "--skip", help="Skip checking oem branch of Git repository.", action="store_true"
108)
109check.add_argument(
110 "--tz", help="Specify the value for TZ. (UTC-8 by default)", default="UTC-8"
111)
112check.add_argument(
113 "--ready",
114 action="store_true",
115 help="Check if the bug is Fix Committed, also affects 'Ubuntu', and subscribe 'ubuntu-sponsors' and 'ubuntu-desktop'.",
116)
117
118collect = subparsers.add_parser("collect", help="[-h] [--ubuntu-certified] jsonFile")
119collect.add_argument(
120 "json",
121 help="Specify the json file name to write.",
122 type=argparse.FileType("w", encoding="UTF-8"),
123)
124collect.add_argument(
125 "--ubuntu-certified",
126 action="store_true",
127 help="Only collect those bugs with the 'ubuntu-certified' tag.",
128)
129collect.add_argument(
130 "--verification-needed",
131 action="store_true",
132 help="Only collect those bugs with the 'verification-needed' tag.",
133)
102134
103args = parser.parse_args()135args = parser.parse_args()
104136
105setup_logging(debug=args.debug, quiet=args.quiet)137setup_logging(debug=args.debug, quiet=args.quiet)
106138
107mir_bug_description_template = Template(f"""[Availability]139mir_bug_description_template = Template(
140 f"""[Availability]
108This is a meta package for https://wiki.ubuntu.com/MIRTeam/Exceptions/OEM that means the package doesn't exist in Debian or Ubuntu archive yet.141This is a meta package for https://wiki.ubuntu.com/MIRTeam/Exceptions/OEM that means the package doesn't exist in Debian or Ubuntu archive yet.
109The source code of the $metaPkgName for focal:142The source code of the $metaPkgName for focal:
110 git clone -b $branchName https://git.launchpad.net/~oem-solutions-engineers/pc-enablement/+git/oem-$oemCodenameNogroup-projects-meta143 git clone -b $branchName https://git.launchpad.net/~oem-solutions-engineers/pc-enablement/+git/oem-$oemCodenameNogroup-projects-meta
@@ -131,26 +164,27 @@ Canonical OEM Enablement Team will take care of the maintenance.
131[Background information]164[Background information]
132Please check https://wiki.ubuntu.com/MIRTeam/Exceptions/OEM for details.165Please check https://wiki.ubuntu.com/MIRTeam/Exceptions/OEM for details.
133166
134Please use "oem-metapackage-mir-check" in lp:ubuntu-archive-tools to verify this MIR against the reference package in the archive.""")167Please use "oem-metapackage-mir-check" in lp:ubuntu-archive-tools to verify this MIR against the reference package in the archive."""
168)
135169
136pattern = re.compile(r'.*\[MIR\]\W*oem-([^-]*)-(.*)-meta\W*')170pattern = re.compile(r".*\[MIR\]\W*oem-([^-]*)-(.*)-meta\W*")
137171
138172
139def create_bug(lp, oemCodename, platformCodename, deviceName):173def create_bug(lp, oemCodename, platformCodename, deviceName):
140 info("Creating bug...")174 info("Creating bug...")
141 tempList = oemCodename.split('.')175 tempList = oemCodename.split(".")
142 oemCodenameNogroup = tempList[0]176 oemCodenameNogroup = tempList[0]
143 if len(tempList) == 2:177 if len(tempList) == 2:
144 oemGroupName = tempList[1]178 oemGroupName = tempList[1]
145 else:179 else:
146 oemGroupName = ''180 oemGroupName = ""
147181
148 # metaPkgName's examples182 # metaPkgName's examples
149 # oem-somerville-metapod-meta183 # oem-somerville-metapod-meta
150 # oem-sutton.newell-ace-meta184 # oem-sutton.newell-ace-meta
151 metaPkgName = "oem-" + oemCodename + "-" + platformCodename + "-meta"185 metaPkgName = "oem-" + oemCodename + "-" + platformCodename + "-meta"
152186
153 if oemGroupName.strip() != '':187 if oemGroupName.strip() != "":
154 branchName = oemGroupName + "." + platformCodename + "-focal-ubuntu"188 branchName = oemGroupName + "." + platformCodename + "-focal-ubuntu"
155 else:189 else:
156 branchName = platformCodename + "-focal-ubuntu"190 branchName = platformCodename + "-focal-ubuntu"
@@ -162,8 +196,15 @@ def create_bug(lp, oemCodename, platformCodename, deviceName):
162 metaPkgName=metaPkgName,196 metaPkgName=metaPkgName,
163 branchName=branchName,197 branchName=branchName,
164 oemCodenameNogroup=oemCodenameNogroup,198 oemCodenameNogroup=oemCodenameNogroup,
165 deviceName=deviceName)199 deviceName=deviceName,
166 bug = lp.bugs.createBug(description=bd, target=project, title=bt, information_type='Public', tags=TAG_LIST)200 )
201 bug = lp.bugs.createBug(
202 description=bd,
203 target=project,
204 title=bt,
205 information_type="Public",
206 tags=TAG_LIST,
207 )
167208
168 info("meta package public bug: " + bug.web_link)209 info("meta package public bug: " + bug.web_link)
169210
@@ -171,23 +212,32 @@ def create_bug(lp, oemCodename, platformCodename, deviceName):
171 args.output.write(f"{bug.id}\n")212 args.output.write(f"{bug.id}\n")
172213
173 for task in bug.bug_tasks:214 for task in bug.bug_tasks:
174 task.status = 'Confirmed'215 task.status = "Confirmed"
175 task.importance = 'Critical'216 task.importance = "Critical"
176 # Assign to reporter by default217 # Assign to reporter by default
177 task.assignee = lp.me218 task.assignee = lp.me
178 task.lp_save()219 task.lp_save()
179220
180 # Subscribe the oem-solutions-engineers221 # Subscribe the oem-solutions-engineers
181 bug.subscribe(person=lp.people['oem-solutions-engineers'])222 bug.subscribe(person=lp.people["oem-solutions-engineers"])
182 bug.lp_save()223 bug.lp_save()
183224
184225
185def collect_bugs(lp, output):226def collect_bugs(lp, output):
186 info("Collecting bugs...")227 info("Collecting bugs...")
187 project = lp.projects['oem-priority']228 project = lp.projects["oem-priority"]
188 tasks = project.searchTasks(229 tasks = project.searchTasks(
189 status=['New', 'Incomplete', 'Triaged', 'Opinion', 'Confirmed', 'In Progress', 'Fix Committed'],230 status=[
190 search_text='[MIR]')231 "New",
232 "Incomplete",
233 "Triaged",
234 "Opinion",
235 "Confirmed",
236 "In Progress",
237 "Fix Committed",
238 ],
239 search_text="[MIR]",
240 )
191 try:241 try:
192 total = int(tasks.total_size)242 total = int(tasks.total_size)
193 except TypeError: # When the total size becomes more than 50, it won't return 'int' but 'ScalarValue' instead.243 except TypeError: # When the total size becomes more than 50, it won't return 'int' but 'ScalarValue' instead.
@@ -197,28 +247,38 @@ def collect_bugs(lp, output):
197 for counter, task in enumerate(tasks, 1):247 for counter, task in enumerate(tasks, 1):
198 bug = task.bug248 bug = task.bug
199249
200 if '[MIR]' not in bug.title or 'oem' not in bug.title or 'meta' not in bug.title:250 if (
201 info(f"{counter}/{total} LP: #{bug.id} {bug.title} ({task.status}) **NOT MATCHED**")251 "[MIR]" not in bug.title
252 or "oem" not in bug.title
253 or "meta" not in bug.title
254 ):
255 info(
256 f"{counter}/{total} LP: #{bug.id} {bug.title} ({task.status}) **NOT MATCHED**"
257 )
202 continue258 continue
203259
204 if args.ubuntu_certified and 'ubuntu-certified' not in bug.tags:260 if args.ubuntu_certified and "ubuntu-certified" not in bug.tags:
205 info(f"{counter}/{total} LP: #{bug.id} {bug.title} ({task.status}) **NOT CERTIFIED**")261 info(
262 f"{counter}/{total} LP: #{bug.id} {bug.title} ({task.status}) **NOT CERTIFIED**"
263 )
206 continue264 continue
207265
208 if args.verification_needed:266 if args.verification_needed:
209 verification_needed = False267 verification_needed = False
210 for tag in bug.tags:268 for tag in bug.tags:
211 if tag.startswith('verification-needed'):269 if tag.startswith("verification-needed"):
212 verification_needed = True270 verification_needed = True
213 if not verification_needed:271 if not verification_needed:
214 info(f"{counter}/{total} LP: #{bug.id} {bug.title} ({task.status}) **NOT VERIFICATION NEEDED**")272 info(
273 f"{counter}/{total} LP: #{bug.id} {bug.title} ({task.status}) **NOT VERIFICATION NEEDED**"
274 )
215 continue275 continue
216276
217 result = pattern.match(bug.title)277 result = pattern.match(bug.title)
218 git = None278 git = None
219 if result:279 if result:
220 if '.' in result.group(1):280 if "." in result.group(1):
221 project, group = result.group(1).split('.')281 project, group = result.group(1).split(".")
222 else:282 else:
223 project = result.group(1)283 project = result.group(1)
224 group = None284 group = None
@@ -234,32 +294,38 @@ def collect_bugs(lp, output):
234294
235 ubuntu_status = None295 ubuntu_status = None
236 for bug_task in bug.bug_tasks:296 for bug_task in bug.bug_tasks:
237 if bug_task.bug_target_name == 'ubuntu':297 if bug_task.bug_target_name == "ubuntu":
238 ubuntu_status = bug_task.status298 ubuntu_status = bug_task.status
239299
240 attachments = []300 attachments = []
241 for attachment in bug.attachments:301 for attachment in bug.attachments:
242 attachments.append({'title': attachment.title, 'data_link': attachment.data_link, 'type': attachment.type})302 attachments.append(
303 {
304 "title": attachment.title,
305 "data_link": attachment.data_link,
306 "type": attachment.type,
307 }
308 )
243 clip = {309 clip = {
244 'bug': "https://bugs.launchpad.net/bugs/%s" % bug.id,310 "bug": "https://bugs.launchpad.net/bugs/%s" % bug.id,
245 'link': bug.self_link,311 "link": bug.self_link,
246 'title': bug.title,312 "title": bug.title,
247 'importance': task.importance,313 "importance": task.importance,
248 'tag': bug.tags,314 "tag": bug.tags,
249 'description': bug.description,315 "description": bug.description,
250 'status': task.status,316 "status": task.status,
251 'ubuntu_status': ubuntu_status,317 "ubuntu_status": ubuntu_status,
252 'owner': task.owner.name,318 "owner": task.owner.name,
253 'assignee': task.assignee.name if task.assignee else 'none',319 "assignee": task.assignee.name if task.assignee else "none",
254 'subscriptions': subscriptions,320 "subscriptions": subscriptions,
255 'attachments': attachments,321 "attachments": attachments,
256 'git': git322 "git": git,
257 }323 }
258 bugs.append(clip)324 bugs.append(clip)
259 info(f"{counter}/{total} LP: #{bug.id} {bug.title} ({task.status})")325 info(f"{counter}/{total} LP: #{bug.id} {bug.title} ({task.status})")
260326
261 info("total: %d matched" % len(bugs))327 info("total: %d matched" % len(bugs))
262 output.write(json.dumps(bugs, sort_keys=True, separators=(',', ':')))328 output.write(json.dumps(bugs, sort_keys=True, separators=(",", ":")))
263 output.write("\n")329 output.write("\n")
264330
265331
@@ -271,7 +337,7 @@ def yes_or_ask(yes: bool, message: str) -> bool:
271 res = input(f"> \033[1;34m{message}\033[1;0m (y/n) ").lower()337 res = input(f"> \033[1;34m{message}\033[1;0m (y/n) ").lower()
272 if res not in {"y", "n"}:338 if res not in {"y", "n"}:
273 continue339 continue
274 if res == 'y':340 if res == "y":
275 return True341 return True
276 else:342 else:
277 return False343 return False
@@ -288,7 +354,9 @@ def update_bug(lp, bug_number: int, yes: bool) -> None:
288 bug_modified = True354 bug_modified = True
289 bug.description = desc355 bug.description = desc
290356
291 if check_bug_title(bug, pkg_name) is False and yes_or_ask(yes, "Do you want to update the bug title?"):357 if check_bug_title(bug, pkg_name) is False and yes_or_ask(
358 yes, "Do you want to update the bug title?"
359 ):
292 bug_modified = True360 bug_modified = True
293 bug.title = f"[MIR] {pkg_name}"361 bug.title = f"[MIR] {pkg_name}"
294362
@@ -298,11 +366,13 @@ def update_bug(lp, bug_number: int, yes: bool) -> None:
298366
299 if check_bug_importance(bug) is False:367 if check_bug_importance(bug) is False:
300 for task in bug.bug_tasks:368 for task in bug.bug_tasks:
301 if task.importance != 'Critical':369 if task.importance != "Critical":
302 if task.bug_target_name == 'oem-priority' and \370 if task.bug_target_name == "oem-priority" and yes_or_ask(
303 yes_or_ask(yes, f"Do you want to update the importance of {task.bug_target_name} from {task.importance} to Critical?"):371 yes,
372 f"Do you want to update the importance of {task.bug_target_name} from {task.importance} to Critical?",
373 ):
304 task_modified = True374 task_modified = True
305 task.importance = 'Critical'375 task.importance = "Critical"
306 task.lp_save()376 task.lp_save()
307377
308 update_bug_status(bug, yes)378 update_bug_status(bug, yes)
@@ -312,12 +382,14 @@ def update_bug(lp, bug_number: int, yes: bool) -> None:
312 if check_bug_tags(bug) is False:382 if check_bug_tags(bug) is False:
313 tags = copy(bug.tags)383 tags = copy(bug.tags)
314 for tag in TAG_LIST:384 for tag in TAG_LIST:
315 if tag not in bug.tags and yes_or_ask(yes, f"Do you want to add '{tag}' tag?"):385 if tag not in bug.tags and yes_or_ask(
386 yes, f"Do you want to add '{tag}' tag?"
387 ):
316 bug_modified = True388 bug_modified = True
317 tags.append(tag)389 tags.append(tag)
318 for tag in bug.tags:390 for tag in bug.tags:
319 if tag.startswith('oem-scripts-'):391 if tag.startswith("oem-scripts-"):
320 if tag[len("oem-scripts-"):] != f'{oem_scripts.__version__:.2f}':392 if tag[len("oem-scripts-") :] != f"{oem_scripts.__version__:.2f}":
321 if yes_or_ask(yes, f"Do you want to remove '{tag}' tag?"):393 if yes_or_ask(yes, f"Do you want to remove '{tag}' tag?"):
322 tags.remove(tag)394 tags.remove(tag)
323 if tags != bug.tags:395 if tags != bug.tags:
@@ -329,11 +401,11 @@ def update_bug(lp, bug_number: int, yes: bool) -> None:
329401
330 if bug_modified:402 if bug_modified:
331 bug.lp_save()403 bug.lp_save()
332 info(f'LP: #{bug_number} is updated.')404 info(f"LP: #{bug_number} is updated.")
333 elif task_modified:405 elif task_modified:
334 info(f'LP: #{bug_number} is updated.')406 info(f"LP: #{bug_number} is updated.")
335 elif yes:407 elif yes:
336 info('Everything looks OK.')408 info("Everything looks OK.")
337409
338410
339def check_bug(lp, bug_number: int) -> None:411def check_bug(lp, bug_number: int) -> None:
@@ -357,11 +429,14 @@ def check_bug(lp, bug_number: int) -> None:
357 need_fixing = True429 need_fixing = True
358 if check_and_update_bug_attachments(bug, pkg_name) is False:430 if check_and_update_bug_attachments(bug, pkg_name) is False:
359 need_fixing = True431 need_fixing = True
360 if not args.skip and check_and_update_git_repo(bug, pkg_name, bootstrap=False) is False:432 if (
433 not args.skip
434 and check_and_update_git_repo(bug, pkg_name, bootstrap=False) is False
435 ):
361 need_fixing = True436 need_fixing = True
362437
363 if need_fixing is False:438 if need_fixing is False:
364 info('Everything looks OK.')439 info("Everything looks OK.")
365 else:440 else:
366 exit(1)441 exit(1)
367442
@@ -373,8 +448,8 @@ def check_bug_description(bug) -> (str, str):
373 critical(f"LP: #{bug.id} '{bug.title}' is NOT MATCHED")448 critical(f"LP: #{bug.id} '{bug.title}' is NOT MATCHED")
374 exit(1)449 exit(1)
375450
376 if '.' in result.group(1):451 if "." in result.group(1):
377 project, group = result.group(1).split('.')452 project, group = result.group(1).split(".")
378 platform = result.group(2)453 platform = result.group(2)
379 branchName = group + "." + platform + "-focal-ubuntu"454 branchName = group + "." + platform + "-focal-ubuntu"
380 else:455 else:
@@ -384,10 +459,10 @@ def check_bug_description(bug) -> (str, str):
384 branchName = platform + "-focal-ubuntu"459 branchName = platform + "-focal-ubuntu"
385460
386 metaPkgName = f"oem-{result.group(1)}-{result.group(2)}-meta"461 metaPkgName = f"oem-{result.group(1)}-{result.group(2)}-meta"
387 prog = re.compile(r'\W*We want to improve the hardware support for ([^.]*).\W*')462 prog = re.compile(r"\W*We want to improve the hardware support for ([^.]*).\W*")
388 deviceName = None463 deviceName = None
389464
390 for line in bug.description.split('\n'):465 for line in bug.description.split("\n"):
391 result = prog.match(line)466 result = prog.match(line)
392 if not result:467 if not result:
393 continue468 continue
@@ -403,11 +478,12 @@ def check_bug_description(bug) -> (str, str):
403 metaPkgName=metaPkgName,478 metaPkgName=metaPkgName,
404 branchName=branchName,479 branchName=branchName,
405 oemCodenameNogroup=project,480 oemCodenameNogroup=project,
406 deviceName=deviceName)481 deviceName=deviceName,
482 )
407483
408 if bug.description != desc:484 if bug.description != desc:
409 d = difflib.Differ()485 d = difflib.Differ()
410 diff = d.compare(bug.description.split('\n'), desc.split('\n'))486 diff = d.compare(bug.description.split("\n"), desc.split("\n"))
411 error("The description needs to update.")487 error("The description needs to update.")
412 if not args.quiet:488 if not args.quiet:
413 for i, line in enumerate(diff):489 for i, line in enumerate(diff):
@@ -431,15 +507,19 @@ def check_bug_importance(bug) -> bool:
431 info("Checking bug importance...")507 info("Checking bug importance...")
432 result = True508 result = True
433 for task in bug.bug_tasks:509 for task in bug.bug_tasks:
434 if task.bug_target_name == 'oem-priority' and task.importance != 'Critical':510 if task.bug_target_name == "oem-priority" and task.importance != "Critical":
435 error(f"The '{task.bug_target_name}' importance is expected to be 'Critical' instead of '{task.importance}'.")511 error(
512 f"The '{task.bug_target_name}' importance is expected to be 'Critical' instead of '{task.importance}'."
513 )
436 result = False514 result = False
437 return result515 return result
438516
439517
440def _expected_status(target_name: str, status: str, expected: str) -> bool:518def _expected_status(target_name: str, status: str, expected: str) -> bool:
441 if status != expected:519 if status != expected:
442 error(f"The '{target_name}' status is expected to be '{expected}' instead of '{status}'.")520 error(
521 f"The '{target_name}' status is expected to be '{expected}' instead of '{status}'."
522 )
443 return False523 return False
444 return True524 return True
445525
@@ -449,20 +529,32 @@ def check_bug_status(bug, pkg_name: str) -> bool:
449 result = True529 result = True
450 saw_ubuntu_task = False530 saw_ubuntu_task = False
451 for task in bug.bug_tasks:531 for task in bug.bug_tasks:
452 if task.bug_target_name == 'oem-priority':532 if task.bug_target_name == "oem-priority":
453 if args.ready:533 if args.ready:
454 if _expected_status(task.bug_target_name, task.status, 'Fix Committed') is False:534 if (
535 _expected_status(task.bug_target_name, task.status, "Fix Committed")
536 is False
537 ):
455 result = False538 result = False
456 else:539 else:
457 if _expected_status(task.bug_target_name, task.status, 'In Progress') is False:540 if (
541 _expected_status(task.bug_target_name, task.status, "In Progress")
542 is False
543 ):
458 result = False544 result = False
459 elif task.bug_target_name == 'ubuntu':545 elif task.bug_target_name == "ubuntu":
460 saw_ubuntu_task = True546 saw_ubuntu_task = True
461 if args.ready:547 if args.ready:
462 if _expected_status(task.bug_target_name, task.status, 'Confirmed') is False:548 if (
549 _expected_status(task.bug_target_name, task.status, "Confirmed")
550 is False
551 ):
463 result = False552 result = False
464 else:553 else:
465 if _expected_status(task.bug_target_name, task.status, 'Incomplete') is False:554 if (
555 _expected_status(task.bug_target_name, task.status, "Incomplete")
556 is False
557 ):
466 result = False558 result = False
467 elif f"{pkg_name} (Ubuntu)" not in task.bug_target_name:559 elif f"{pkg_name} (Ubuntu)" not in task.bug_target_name:
468 critical(f"It is unexpected to have '{task.bug_target_name}' task")560 critical(f"It is unexpected to have '{task.bug_target_name}' task")
@@ -472,17 +564,25 @@ def check_bug_status(bug, pkg_name: str) -> bool:
472 return result564 return result
473565
474566
475def _ok_to_change_status(target_name: str, orig_status: str, new_status: str, yes: bool) -> bool:567def _ok_to_change_status(
568 target_name: str, orig_status: str, new_status: str, yes: bool
569) -> bool:
476 if orig_status == new_status:570 if orig_status == new_status:
477 return False571 return False
478 if yes_or_ask(yes, f"Would you like to change the '{target_name}' status from '{orig_status}' to '{new_status}'?"):572 if yes_or_ask(
573 yes,
574 f"Would you like to change the '{target_name}' status from '{orig_status}' to '{new_status}'?",
575 ):
479 return True576 return True
480 return False577 return False
481578
482579
483def _change_task_status(task, new_status: str, yes: bool) -> bool:580def _change_task_status(task, new_status: str, yes: bool) -> bool:
484 if _expected_status(task.bug_target_name, task.status, new_status) is False and \581 if _expected_status(
485 _ok_to_change_status(task.bug_target_name, task.status, new_status, yes):582 task.bug_target_name, task.status, new_status
583 ) is False and _ok_to_change_status(
584 task.bug_target_name, task.status, new_status, yes
585 ):
486 task.status = new_status586 task.status = new_status
487 task.lp_save()587 task.lp_save()
488588
@@ -491,35 +591,42 @@ def update_bug_status(bug, yes: bool) -> None:
491 info("Updating bug status...")591 info("Updating bug status...")
492 saw_ubuntu_task = False592 saw_ubuntu_task = False
493 for bug_task in bug.bug_tasks:593 for bug_task in bug.bug_tasks:
494 if bug_task.bug_target_name == 'oem-priority':594 if bug_task.bug_target_name == "oem-priority":
495 if args.ready:595 if args.ready:
496 _change_task_status(bug_task, 'Fix Committed', yes)596 _change_task_status(bug_task, "Fix Committed", yes)
497 else:597 else:
498 _change_task_status(bug_task, 'In Progress', yes)598 _change_task_status(bug_task, "In Progress", yes)
499 elif bug_task.bug_target_name == 'ubuntu':599 elif bug_task.bug_target_name == "ubuntu":
500 saw_ubuntu_task = True600 saw_ubuntu_task = True
501 if args.ready:601 if args.ready:
502 _change_task_status(bug_task, 'Confirmed', yes)602 _change_task_status(bug_task, "Confirmed", yes)
503 elif yes_or_ask(yes, f"Would you like to delete the '{bug_task.bug_target_name}' bug_task? (Don't affect '{bug_task.bug_target_display_name}')"):603 elif yes_or_ask(
604 yes,
605 f"Would you like to delete the '{bug_task.bug_target_name}' bug_task? (Don't affect '{bug_task.bug_target_display_name}')",
606 ):
504 try:607 try:
505 bug_task.lp_delete()608 bug_task.lp_delete()
506 except lazr.restfulclient.errors.BadRequest as e:609 except lazr.restfulclient.errors.BadRequest as e:
507 warning(f"{bug_task.bug_target_name} can not be deleted, so changing the status to Incomplete instead.")610 warning(
611 f"{bug_task.bug_target_name} can not be deleted, so changing the status to Incomplete instead."
612 )
508 debug(e)613 debug(e)
509 _change_task_status(bug_task, 'Incomplete', yes)614 _change_task_status(bug_task, "Incomplete", yes)
510 except lazr.restfulclient.errors.Unauthorized as e:615 except lazr.restfulclient.errors.Unauthorized as e:
511 warning(f"{bug_task.bug_target_name} can not be deleted, so changing the status to Incomplete instead.")616 warning(
617 f"{bug_task.bug_target_name} can not be deleted, so changing the status to Incomplete instead."
618 )
512 debug(e)619 debug(e)
513 _change_task_status(bug_task, 'Incomplete', yes)620 _change_task_status(bug_task, "Incomplete", yes)
514 else:621 else:
515 _change_task_status(bug_task, 'Incomplete', yes)622 _change_task_status(bug_task, "Incomplete", yes)
516 else:623 else:
517 warning(f"{bug_task.bug_target_name} {bug_task.status}")624 warning(f"{bug_task.bug_target_name} {bug_task.status}")
518 if args.ready and saw_ubuntu_task is False:625 if args.ready and saw_ubuntu_task is False:
519 bug.addTask(target=lp.projects["Ubuntu"])626 bug.addTask(target=lp.projects["Ubuntu"])
520 for bug_task in bug.bug_tasks:627 for bug_task in bug.bug_tasks:
521 if bug_task.bug_target_name == 'ubuntu':628 if bug_task.bug_target_name == "ubuntu":
522 _change_task_status(bug_task, 'Confirmed', yes)629 _change_task_status(bug_task, "Confirmed", yes)
523630
524631
525def check_and_update_bug_subscriptions(lp, bug, update=False, yes=False) -> bool:632def check_and_update_bug_subscriptions(lp, bug, update=False, yes=False) -> bool:
@@ -532,29 +639,41 @@ def check_and_update_bug_subscriptions(lp, bug, update=False, yes=False) -> bool
532 for subscription in bug.subscriptions:639 for subscription in bug.subscriptions:
533 subscriptions.append(subscription.person.name)640 subscriptions.append(subscription.person.name)
534 if not args.ready:641 if not args.ready:
535 for subscriber in ('ubuntu-sponsors', 'ubuntu-desktop'):642 for subscriber in ("ubuntu-sponsors", "ubuntu-desktop"):
536 if subscriber == subscription.person.name:643 if subscriber == subscription.person.name:
537 error(f"'{subscriber}' should not be in the subscriptions.")644 error(f"'{subscriber}' should not be in the subscriptions.")
538 if update and yes_or_ask(yes, f"Do you want to unsubscribe '{subscriber}'?"):645 if update and yes_or_ask(
646 yes, f"Do you want to unsubscribe '{subscriber}'?"
647 ):
539 if subscription.canBeUnsubscribedByUser():648 if subscription.canBeUnsubscribedByUser():
540 bug.unsubscribe(person=lp.people[subscriber])649 bug.unsubscribe(person=lp.people[subscriber])
541 else:650 else:
542 warning(f"{lp.me.name} doesn't have the permission to unsubscribe {subscriber}.")651 warning(
652 f"{lp.me.name} doesn't have the permission to unsubscribe {subscriber}."
653 )
543 result = False654 result = False
544 else:655 else:
545 result = False656 result = False
546 if args.ready:657 if args.ready:
547 for subscriber in ('oem-solutions-engineers', 'ubuntu-sponsors', 'ubuntu-desktop'):658 for subscriber in (
659 "oem-solutions-engineers",
660 "ubuntu-sponsors",
661 "ubuntu-desktop",
662 ):
548 if subscriber not in subscriptions:663 if subscriber not in subscriptions:
549 error(f"'{subscriber}' is not in the subscriptions.")664 error(f"'{subscriber}' is not in the subscriptions.")
550 if update and yes_or_ask(yes, f"Do you want to subscribe '{subscriber}'?"):665 if update and yes_or_ask(
666 yes, f"Do you want to subscribe '{subscriber}'?"
667 ):
551 bug.subscribe(person=lp.people[subscriber])668 bug.subscribe(person=lp.people[subscriber])
552 else:669 else:
553 result = False670 result = False
554 else:671 else:
555 if 'oem-solutions-engineers' not in subscriptions:672 if "oem-solutions-engineers" not in subscriptions:
556 error(f"'oem-solutions-engineers' is not in the subscriptions.")673 error(f"'oem-solutions-engineers' is not in the subscriptions.")
557 if update and yes_or_ask(yes, f"Do you want to subscribe 'oem-solutions-engineers'?"):674 if update and yes_or_ask(
675 yes, f"Do you want to subscribe 'oem-solutions-engineers'?"
676 ):
558 bug.subscribe(person=lp.people[subscriber])677 bug.subscribe(person=lp.people[subscriber])
559 else:678 else:
560 result = False679 result = False
@@ -573,13 +692,15 @@ def check_bug_tags(bug) -> bool:
573692
574def _run_command(command: list or tuple, returncode=(0,), env=None) -> (str, str):693def _run_command(command: list or tuple, returncode=(0,), env=None) -> (str, str):
575 debug("$ " + " ".join(command))694 debug("$ " + " ".join(command))
576 proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env)695 proc = subprocess.Popen(
696 command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env
697 )
577 out, err = proc.communicate()698 out, err = proc.communicate()
578699
579 if out:700 if out:
580 out = out.decode('utf-8').strip()701 out = out.decode("utf-8").strip()
581 if err:702 if err:
582 err = err.decode('utf-8').strip()703 err = err.decode("utf-8").strip()
583704
584 if proc.returncode not in returncode:705 if proc.returncode not in returncode:
585 critical(f"return {proc.returncode}")706 critical(f"return {proc.returncode}")
@@ -597,7 +718,9 @@ def _run_command(command: list or tuple, returncode=(0,), env=None) -> (str, str
597 return (out, err, proc.returncode)718 return (out, err, proc.returncode)
598719
599720
600def check_and_update_bug_attachments(bug, pkg_name: str, update=False, yes=False) -> bool:721def check_and_update_bug_attachments(
722 bug, pkg_name: str, update=False, yes=False
723) -> bool:
601 if update:724 if update:
602 info("Checking and updating attachments...")725 info("Checking and updating attachments...")
603 else:726 else:
@@ -608,8 +731,8 @@ def check_and_update_bug_attachments(bug, pkg_name: str, update=False, yes=False
608 critical(f"{pkg_name} failed.")731 critical(f"{pkg_name} failed.")
609 exit(1)732 exit(1)
610733
611 if '.' in result.group(1):734 if "." in result.group(1):
612 project, group = result.group(1).split('.')735 project, group = result.group(1).split(".")
613 else:736 else:
614 project = result.group(1)737 project = result.group(1)
615 group = None738 group = None
@@ -620,7 +743,16 @@ def check_and_update_bug_attachments(bug, pkg_name: str, update=False, yes=False
620 else:743 else:
621 branch = f"{platform}-focal-ubuntu"744 branch = f"{platform}-focal-ubuntu"
622745
623 git_command = ("git", "clone", "--depth", "1", "-b", branch, f"https://git.launchpad.net/~oem-solutions-engineers/pc-enablement/+git/oem-{project}-projects-meta", pkg_name)746 git_command = (
747 "git",
748 "clone",
749 "--depth",
750 "1",
751 "-b",
752 branch,
753 f"https://git.launchpad.net/~oem-solutions-engineers/pc-enablement/+git/oem-{project}-projects-meta",
754 pkg_name,
755 )
624756
625 debdiff = None757 debdiff = None
626 content = None758 content = None
@@ -628,33 +760,49 @@ def check_and_update_bug_attachments(bug, pkg_name: str, update=False, yes=False
628760
629 with TemporaryDirectory() as tmpdir:761 with TemporaryDirectory() as tmpdir:
630 os.chdir(tmpdir)762 os.chdir(tmpdir)
631 _run_command(['wget', 'https://bazaar.launchpad.net/~ubuntu-archive/ubuntu-archive-tools/trunk/download/head:/oem-metapackage-mir-check'])763 _run_command(
764 [
765 "wget",
766 "https://bazaar.launchpad.net/~ubuntu-archive/ubuntu-archive-tools/trunk/download/head:/oem-metapackage-mir-check",
767 ]
768 )
632 _run_command(git_command)769 _run_command(git_command)
633 git_dir = os.path.join(tmpdir, pkg_name)770 git_dir = os.path.join(tmpdir, pkg_name)
634 os.chdir(git_dir)771 os.chdir(git_dir)
635 _run_command(['dpkg-buildpackage', '-S', '-us', '-uc'])772 _run_command(["dpkg-buildpackage", "-S", "-us", "-uc"])
636 os.chdir(tmpdir)773 os.chdir(tmpdir)
637 dsc = glob(f'{pkg_name}*.dsc')[0]774 dsc = glob(f"{pkg_name}*.dsc")[0]
638 prog = re.compile(fr'{pkg_name}_(.*).dsc')775 prog = re.compile(fr"{pkg_name}_(.*).dsc")
639 result = prog.match(dsc)776 result = prog.match(dsc)
640 version = result.group(1)777 version = result.group(1)
641 debdiff = f"{pkg_name}_{version}.debdiff"778 debdiff = f"{pkg_name}_{version}.debdiff"
642 # It should generate some debdiff so the return code should be 1 unless comparing to oem-qemu-meta itself.779 # It should generate some debdiff so the return code should be 1 unless comparing to oem-qemu-meta itself.
643 debug(f"TZ={args.tz}")780 debug(f"TZ={args.tz}")
644 content, _, _ = _run_command(['bash', 'oem-metapackage-mir-check', dsc], returncode=(1,), env=dict(os.environ, TZ=args.tz))781 content, _, _ = _run_command(
782 ["bash", "oem-metapackage-mir-check", dsc],
783 returncode=(1,),
784 env=dict(os.environ, TZ=args.tz),
785 )
645 content += "\n"786 content += "\n"
646 with open(debdiff, 'w') as f:787 with open(debdiff, "w") as f:
647 f.write(content)788 f.write(content)
648789
649 for attachment in bug.attachments:790 for attachment in bug.attachments:
650 if 'debdiff' in attachment.title:791 if "debdiff" in attachment.title:
651 _run_command(['wget', attachment.data_link, '-O', 'data'])792 _run_command(["wget", attachment.data_link, "-O", "data"])
652 out, err, returncode = _run_command(['colordiff', '-ur', 'data', debdiff], returncode=(0, 1))793 out, err, returncode = _run_command(
794 ["colordiff", "-ur", "data", debdiff], returncode=(0, 1)
795 )
653 if returncode == 1:796 if returncode == 1:
654 warning(f"{attachment.title} - {attachment.web_link} has unexpected content.")797 warning(
798 f"{attachment.title} - {attachment.web_link} has unexpected content."
799 )
655 info(f"{out}")800 info(f"{out}")
656 found = True801 found = True
657 if update and yes_or_ask(yes, f"Do you want to remove {attachment.title} - {attachment.web_link}?"):802 if update and yes_or_ask(
803 yes,
804 f"Do you want to remove {attachment.title} - {attachment.web_link}?",
805 ):
658 try:806 try:
659 attachment.removeFromBug()807 attachment.removeFromBug()
660 except lazr.restfulclient.errors.NotFound as e:808 except lazr.restfulclient.errors.NotFound as e:
@@ -669,32 +817,54 @@ def check_and_update_bug_attachments(bug, pkg_name: str, update=False, yes=False
669 error(f"There is no {debdiff}.")817 error(f"There is no {debdiff}.")
670 info(content)818 info(content)
671 if update and yes_or_ask(yes, f"Do you want to attach {debdiff}?"):819 if update and yes_or_ask(yes, f"Do you want to attach {debdiff}?"):
672 bug.addAttachment(filename=debdiff, data=content.encode("utf-8"), comment=f"Attach {debdiff} by oem-scripts {oem_scripts.__version__:.2f}.", is_patch=True)820 bug.addAttachment(
821 filename=debdiff,
822 data=content.encode("utf-8"),
823 comment=f"Attach {debdiff} by oem-scripts {oem_scripts.__version__:.2f}.",
824 is_patch=True,
825 )
673 return True826 return True
674 else:827 else:
675 return False828 return False
676829
677830
678def _get_items_from_git(project: str, branch: str, pkg_name: str) -> tuple:831def _get_items_from_git(project: str, branch: str, pkg_name: str) -> tuple:
679 git_command = ("git", "clone", "--depth", "1", "-b", branch, f"https://git.launchpad.net/~oem-solutions-engineers/pc-enablement/+git/oem-{project}-projects-meta", pkg_name)832 git_command = (
833 "git",
834 "clone",
835 "--depth",
836 "1",
837 "-b",
838 branch,
839 f"https://git.launchpad.net/~oem-solutions-engineers/pc-enablement/+git/oem-{project}-projects-meta",
840 pkg_name,
841 )
680 with TemporaryDirectory() as tmpdir:842 with TemporaryDirectory() as tmpdir:
681 os.chdir(tmpdir)843 os.chdir(tmpdir)
682 _run_command(git_command)844 _run_command(git_command)
683 git_dir = os.path.join(tmpdir, pkg_name)845 git_dir = os.path.join(tmpdir, pkg_name)
684846
685 if project == 'somerville':847 if project == "somerville":
686 prog = re.compile(r"alias pci:\*sv00001028sd0000([0-9A-F]{4})[^ ]* meta (.*)")848 prog = re.compile(
687 elif project == 'stella':849 r"alias pci:\*sv00001028sd0000([0-9A-F]{4})[^ ]* meta (.*)"
688 prog = re.compile(r"alias pci:\*sv0000103Csd0000([0-9A-F]{4})[^ ]* meta (.*)")850 )
851 elif project == "stella":
852 prog = re.compile(
853 r"alias pci:\*sv0000103Csd0000([0-9A-F]{4})[^ ]* meta (.*)"
854 )
689 else:855 else:
690 prog = re.compile(r"alias dmi:\*bvn([0-9a-zA-Z]+):bvr([0-9a-zA-Z]{3})\* meta (.*)")856 prog = re.compile(
857 r"alias dmi:\*bvn([0-9a-zA-Z]+):bvr([0-9a-zA-Z]{3})\* meta (.*)"
858 )
691859
692 ids = []860 ids = []
693 with open(os.path.join(git_dir, 'debian', 'modaliases'), 'r') as modaliases:861 with open(os.path.join(git_dir, "debian", "modaliases"), "r") as modaliases:
694 for line in modaliases:862 for line in modaliases:
695 result = prog.match(line.strip())863 result = prog.match(line.strip())
696 if result.group(result.lastindex) != pkg_name:864 if result.group(result.lastindex) != pkg_name:
697 error("Something wrong in debian/modaliases. Please fix it manually first.")865 error(
866 "Something wrong in debian/modaliases. Please fix it manually first."
867 )
698 return False868 return False
699 if result.lastindex == 3:869 if result.lastindex == 3:
700 ids.append((result.group(1), result.group(2)))870 ids.append((result.group(1), result.group(2)))
@@ -702,21 +872,25 @@ def _get_items_from_git(project: str, branch: str, pkg_name: str) -> tuple:
702 ids.append(result.group(1))872 ids.append(result.group(1))
703 kernel_flavour = None873 kernel_flavour = None
704 kernel_meta = None874 kernel_meta = None
705 with open(os.path.join(git_dir, 'debian', 'control'), 'r') as control:875 with open(os.path.join(git_dir, "debian", "control"), "r") as control:
706 for line in control:876 for line in control:
707 if line.startswith('XB-Ubuntu-OEM-Kernel-Flavour:'):877 if line.startswith("XB-Ubuntu-OEM-Kernel-Flavour:"):
708 kernel_flavour = line[len('XB-Ubuntu-OEM-Kernel-Flavour:'):].strip()878 kernel_flavour = line[
709 elif line.startswith('Depends:'):879 len("XB-Ubuntu-OEM-Kernel-Flavour:") :
710 if 'linux-oem-20.04b' in line:880 ].strip()
711 kernel_meta = 'linux-oem-20.04b'881 elif line.startswith("Depends:"):
712 elif 'linux-oem-20.04' in line:882 if "linux-oem-20.04b" in line:
713 kernel_meta = 'linux-oem-20.04'883 kernel_meta = "linux-oem-20.04b"
714 elif 'linux-generic-hwe-20.04' in line:884 elif "linux-oem-20.04" in line:
715 kernel_meta = 'linux-generic-hwe-20.04'885 kernel_meta = "linux-oem-20.04"
886 elif "linux-generic-hwe-20.04" in line:
887 kernel_meta = "linux-generic-hwe-20.04"
716 return kernel_flavour, kernel_meta, ids888 return kernel_flavour, kernel_meta, ids
717889
718890
719def check_and_update_git_repo(bug, pkg_name: str, yes=False, bootstrap=True, update=False, username=None) -> bool:891def check_and_update_git_repo(
892 bug, pkg_name: str, yes=False, bootstrap=True, update=False, username=None
893) -> bool:
720 if update:894 if update:
721 if bootstrap:895 if bootstrap:
722 info("Checking and updating git repo for bootstrap branch...")896 info("Checking and updating git repo for bootstrap branch...")
@@ -734,17 +908,17 @@ def check_and_update_git_repo(bug, pkg_name: str, yes=False, bootstrap=True, upd
734 critical(f"{pkg_name} failed.")908 critical(f"{pkg_name} failed.")
735 exit(1)909 exit(1)
736910
737 if '.' in result.group(1):911 if "." in result.group(1):
738 project, group = result.group(1).split('.')912 project, group = result.group(1).split(".")
739 else:913 else:
740 project = result.group(1)914 project = result.group(1)
741 group = None915 group = None
742 platform = result.group(2)916 platform = result.group(2)
743917
744 if bootstrap:918 if bootstrap:
745 suffix = 'ubuntu'919 suffix = "ubuntu"
746 else:920 else:
747 suffix = 'oem'921 suffix = "oem"
748922
749 if group:923 if group:
750 branch = f"{group}.{platform}-focal-{suffix}"924 branch = f"{group}.{platform}-focal-{suffix}"
@@ -752,23 +926,27 @@ def check_and_update_git_repo(bug, pkg_name: str, yes=False, bootstrap=True, upd
752 branch = f"{platform}-focal-{suffix}"926 branch = f"{platform}-focal-{suffix}"
753927
754 kernel_flavour, kernel_meta, ids = _get_items_from_git(project, branch, pkg_name)928 kernel_flavour, kernel_meta, ids = _get_items_from_git(project, branch, pkg_name)
755 if kernel_flavour == 'default':929 if kernel_flavour == "default":
756 if kernel_meta is None:930 if kernel_meta is None:
757 kernel_meta = 'linux-generic-hwe-20.04'931 kernel_meta = "linux-generic-hwe-20.04"
758 elif kernel_meta == 'linux-generic-hwe-20.04':932 elif kernel_meta == "linux-generic-hwe-20.04":
759 pass933 pass
760 else:934 else:
761 critical(f"{pkg_name}'s {branch} branch has unexpected kernel_flavour=${kernel_flavour}, kernel_meta={kernel_meta}.")935 critical(
936 f"{pkg_name}'s {branch} branch has unexpected kernel_flavour=${kernel_flavour}, kernel_meta={kernel_meta}."
937 )
762 exit(1)938 exit(1)
763 elif kernel_flavour == 'oem':939 elif kernel_flavour == "oem":
764 if kernel_meta is None:940 if kernel_meta is None:
765 kernel_meta = 'linux-oem-20.04'941 kernel_meta = "linux-oem-20.04"
766 elif kernel_meta == 'linux-oem-20.04b':942 elif kernel_meta == "linux-oem-20.04b":
767 pass943 pass
768 elif kernel_meta == 'linux-oem-20.04':944 elif kernel_meta == "linux-oem-20.04":
769 pass945 pass
770 else:946 else:
771 critical(f"{pkg_name}'s {branch} branch has unexpected kernel_flavour=${kernel_flavour}, kernel_meta={kernel_meta}.")947 critical(
948 f"{pkg_name}'s {branch} branch has unexpected kernel_flavour=${kernel_flavour}, kernel_meta={kernel_meta}."
949 )
772 exit(1)950 exit(1)
773951
774 if ids == []:952 if ids == []:
@@ -778,27 +956,61 @@ def check_and_update_git_repo(bug, pkg_name: str, yes=False, bootstrap=True, upd
778 with TemporaryDirectory() as tmpdir:956 with TemporaryDirectory() as tmpdir:
779 os.chdir(tmpdir)957 os.chdir(tmpdir)
780 # Generated the meta package by pkg-oem-meta958 # Generated the meta package by pkg-oem-meta
781 if project == 'somerville':959 if project == "somerville":
782 command = ['pkg-somerville-meta', '-s', 'focal', '-k', kernel_meta, '-p', platform, '--public-bug', str(bug.id)]960 command = [
961 "pkg-somerville-meta",
962 "-s",
963 "focal",
964 "-k",
965 kernel_meta,
966 "-p",
967 platform,
968 "--public-bug",
969 str(bug.id),
970 ]
783 command.extend(ids)971 command.extend(ids)
784 elif project == 'stella':972 elif project == "stella":
785 command = ['pkg-stella-meta', '-s', 'focal', '-k', kernel_meta, '-g', group, '-p', platform, '--public-bug', str(bug.id)]973 command = [
974 "pkg-stella-meta",
975 "-s",
976 "focal",
977 "-k",
978 kernel_meta,
979 "-g",
980 group,
981 "-p",
982 platform,
983 "--public-bug",
984 str(bug.id),
985 ]
786 command.extend(ids)986 command.extend(ids)
787 elif project == 'sutton':987 elif project == "sutton":
788 command = ['pkg-sutton-meta', '-s', 'focal', '-k', kernel_meta, '-g', group, '-p', platform, '--public-bug', str(bug.id)]988 command = [
989 "pkg-sutton-meta",
990 "-s",
991 "focal",
992 "-k",
993 kernel_meta,
994 "-g",
995 group,
996 "-p",
997 platform,
998 "--public-bug",
999 str(bug.id),
1000 ]
789 for bvn, bvr in ids:1001 for bvn, bvr in ids:
790 command.append(f"bvn{bvn}:bvr{bvr}")1002 command.append(f"bvn{bvn}:bvr{bvr}")
791 _run_command(command)1003 _run_command(command)
792 new_dir = os.path.join(tmpdir, pkg_name)1004 new_dir = os.path.join(tmpdir, pkg_name)
793 os.chdir(new_dir)1005 os.chdir(new_dir)
794 if bootstrap:1006 if bootstrap:
795 _run_command(['git', 'checkout', branch])1007 _run_command(["git", "checkout", branch])
7961008
797 os.chdir(tmpdir)1009 os.chdir(tmpdir)
798 os.rename(new_dir, new_dir + '.new')1010 os.rename(new_dir, new_dir + ".new")
799 new_dir += ".new"1011 new_dir += ".new"
800 shutil.rmtree(os.path.join(new_dir, '.git'))1012 shutil.rmtree(os.path.join(new_dir, ".git"))
801 os.remove(os.path.join(new_dir, 'debian', 'changelog'))1013 os.remove(os.path.join(new_dir, "debian", "changelog"))
8021014
803 if username:1015 if username:
804 git_repo = f"git+ssh://{username}@git.launchpad.net/~oem-solutions-engineers/pc-enablement/+git/oem-{project}-projects-meta"1016 git_repo = f"git+ssh://{username}@git.launchpad.net/~oem-solutions-engineers/pc-enablement/+git/oem-{project}-projects-meta"
@@ -811,25 +1023,27 @@ def check_and_update_git_repo(bug, pkg_name: str, yes=False, bootstrap=True, upd
8111023
812 if bootstrap:1024 if bootstrap:
813 lines = None1025 lines = None
814 changelog = os.path.join(tmpdir, pkg_name, 'debian', 'changelog')1026 changelog = os.path.join(tmpdir, pkg_name, "debian", "changelog")
815 with open(changelog, 'r') as f:1027 with open(changelog, "r") as f:
816 lines = f.readlines()1028 lines = f.readlines()
8171029
818 lines[0] = f"{pkg_name} (20.04~ubuntu1) UNRELEASED; urgency=medium\n"1030 lines[0] = f"{pkg_name} (20.04~ubuntu1) UNRELEASED; urgency=medium\n"
8191031
820 if f"(LP: #{bug.id})" not in lines[2] and lines[2].startswith(" * Meta package for"):1032 if f"(LP: #{bug.id})" not in lines[2] and lines[2].startswith(
1033 " * Meta package for"
1034 ):
821 lines[2] = " " + lines[2].strip() + f" (LP: #{bug.id})\n"1035 lines[2] = " " + lines[2].strip() + f" (LP: #{bug.id})\n"
8221036
823 with open(changelog, 'w') as f:1037 with open(changelog, "w") as f:
824 f.writelines(lines)1038 f.writelines(lines)
8251039
826 # Remove deprecated autopkgtest file1040 # Remove deprecated autopkgtest file
827 deprecated_test = os.path.join(tmpdir, pkg_name, 'debian', 'tests', pkg_name)1041 deprecated_test = os.path.join(tmpdir, pkg_name, "debian", "tests", pkg_name)
828 if os.path.exists(deprecated_test):1042 if os.path.exists(deprecated_test):
829 _run_command(["git", "rm", f"debian/tests/{pkg_name}"])1043 _run_command(["git", "rm", f"debian/tests/{pkg_name}"])
8301044
831 # Remove deprecated debian/compat1045 # Remove deprecated debian/compat
832 deprecated_compat = os.path.join(tmpdir, pkg_name, 'debian', 'compat')1046 deprecated_compat = os.path.join(tmpdir, pkg_name, "debian", "compat")
833 if os.path.exists(deprecated_compat):1047 if os.path.exists(deprecated_compat):
834 _run_command(["git", "rm", f"debian/compat"])1048 _run_command(["git", "rm", f"debian/compat"])
8351049
@@ -841,22 +1055,39 @@ def check_and_update_git_repo(bug, pkg_name: str, yes=False, bootstrap=True, upd
8411055
842 _run_command(["git", "add", "."])1056 _run_command(["git", "add", "."])
843 out, _, _ = _run_command(["git", "diff", "--color=always", "--cached"])1057 out, _, _ = _run_command(["git", "diff", "--color=always", "--cached"])
844 if out != b'':1058 if out != b"":
845 warning("$ git diff")1059 warning("$ git diff")
846 print(out)1060 print(out)
847 if update is True:1061 if update is True:
848 if args.skip:1062 if args.skip:
849 warning(f"The update of the '{branch}' branch of {pkg_name}'s Git repository is skipped on demand.")1063 warning(
1064 f"The update of the '{branch}' branch of {pkg_name}'s Git repository is skipped on demand."
1065 )
850 return False1066 return False
851 elif yes_or_ask(yes, f"Do you want to commit and push the changes above into the '{branch}' branch of {pkg_name}'s Git repository?"):1067 elif yes_or_ask(
852 _run_command(['git', 'commit', '-a', '-m', f"Updated by oem-scripts {oem_scripts.__version__:.2f}."])1068 yes,
853 _run_command(['git', 'push'])1069 f"Do you want to commit and push the changes above into the '{branch}' branch of {pkg_name}'s Git repository?",
1070 ):
1071 _run_command(
1072 [
1073 "git",
1074 "commit",
1075 "-a",
1076 "-m",
1077 f"Updated by oem-scripts {oem_scripts.__version__:.2f}.",
1078 ]
1079 )
1080 _run_command(["git", "push"])
854 return True1081 return True
855 else:1082 else:
856 if bootstrap:1083 if bootstrap:
857 error(f"The '{branch}' branch of {pkg_name} in Git repository needs to update.")1084 error(
1085 f"The '{branch}' branch of {pkg_name} in Git repository needs to update."
1086 )
858 else:1087 else:
859 warning(f"The '{branch}' branch of {pkg_name} in Git repository may need to update.")1088 warning(
1089 f"The '{branch}' branch of {pkg_name} in Git repository may need to update."
1090 )
860 return False1091 return False
861 else:1092 else:
862 info(f"The '{branch}' branch of {pkg_name} in Git repository looks fine.")1093 info(f"The '{branch}' branch of {pkg_name} in Git repository looks fine.")
@@ -867,13 +1098,13 @@ if args.subcommand:
867 login = LaunchpadLogin()1098 login = LaunchpadLogin()
868 lp = login.lp1099 lp = login.lp
8691100
870if args.subcommand == 'create':1101if args.subcommand == "create":
871 create_bug(lp, args.oemCodename, args.platformCodename, args.deviceName)1102 create_bug(lp, args.oemCodename, args.platformCodename, args.deviceName)
872elif args.subcommand == 'update':1103elif args.subcommand == "update":
873 update_bug(lp, args.bugNumber, args.yes)1104 update_bug(lp, args.bugNumber, args.yes)
874elif args.subcommand == 'check':1105elif args.subcommand == "check":
875 check_bug(lp, args.bugNumber)1106 check_bug(lp, args.bugNumber)
876elif args.subcommand == 'collect':1107elif args.subcommand == "collect":
877 collect_bugs(lp, args.json)1108 collect_bugs(lp, args.json)
878else:1109else:
879 parser.print_help()1110 parser.print_help()
diff --git a/oem-getiso b/oem-getiso
index fc7723d..932e769 100755
--- a/oem-getiso
+++ b/oem-getiso
@@ -11,11 +11,10 @@ import sys
11import re11import re
12from optparse import OptionParser12from optparse import OptionParser
1313
14__DEBUG__ = (os.getenv('DEBUG') == 'true')14__DEBUG__ = os.getenv("DEBUG") == "true"
1515
1616
17class ImageURI(object):17class ImageURI(object):
18
19 def __init__(self, project, date, buildorder, image_type):18 def __init__(self, project, date, buildorder, image_type):
20 self.project = project19 self.project = project
21 self.date = date20 self.date = date
@@ -24,13 +23,12 @@ class ImageURI(object):
24 self.image_type = image_type23 self.image_type = image_type
2524
26 # default value25 # default value
27 self.siteurl = 'https://oem-share.canonical.com'26 self.siteurl = "https://oem-share.canonical.com"
28 self.ispublished = False27 self.ispublished = False
2928
30 @property29 @property
31 def rootdir(self):30 def rootdir(self):
32 self._rootdir = self.ispublished and\31 self._rootdir = self.ispublished and "/partners/" or "/oem/cesg-builds/"
33 '/partners/' or '/oem/cesg-builds/'
34 return self._rootdir32 return self._rootdir
3533
36 @property34 @property
@@ -44,23 +42,31 @@ class ImageURI(object):
44 @property42 @property
45 def imagepath(self):43 def imagepath(self):
46 if self.ispublished:44 if self.ispublished:
47 return os.path.join(self.project, 'images',45 return os.path.join(
48 '{}-{}'.format(self.date, self.buildorder),46 self.project,
49 self.image_type, self.isoname)47 "images",
48 "{}-{}".format(self.date, self.buildorder),
49 self.image_type,
50 self.isoname,
51 )
50 else:52 else:
51 return os.path.join(self.project, self.date, self.buildorder,53 return os.path.join(
52 'images', self.image_type, self.isoname)54 self.project,
55 self.date,
56 self.buildorder,
57 "images",
58 self.image_type,
59 self.isoname,
60 )
5361
54 @property62 @property
55 def isoname(self):63 def isoname(self):
56 if self.manifest_ver:64 if self.manifest_ver:
57 project = self.project + '-' + self.manifest_ver65 project = self.project + "-" + self.manifest_ver
58 else:66 else:
59 project = self.project67 project = self.project
60 image_type = 'iso' \68 image_type = "iso" if re.match(r"^dell-bto", self.project) else self.image_type
61 if re.match(r'^dell-bto', self.project) else self.image_type69 return "{}-{}-{}-{}.iso".format(project, image_type, self.date, self.buildorder)
62 return '{}-{}-{}-{}.iso'.format(project, image_type,
63 self.date, self.buildorder)
6470
65 @classmethod71 @classmethod
66 def from_url(cls, url):72 def from_url(cls, url):
@@ -88,33 +94,41 @@ class ImageURI(object):
88 'dell-bto-oneiric-pebble-beach-X05-iso-20111226-0.iso'94 'dell-bto-oneiric-pebble-beach-X05-iso-20111226-0.iso'
89 >>> o = ImageURI.from_url('https://oem-share.canonical.com/partners/dell-bto-oneiric-audi-13-intel/images/20120316-3/iso/dell-bto-oneiric-audi-13-intel-A04-iso-20120316-3.iso')95 >>> o = ImageURI.from_url('https://oem-share.canonical.com/partners/dell-bto-oneiric-audi-13-intel/images/20120316-3/iso/dell-bto-oneiric-audi-13-intel-A04-iso-20120316-3.iso')
90 """96 """
97
91 def _parse(url):98 def _parse(url):
92 from urllib.parse import urlparse99 from urllib.parse import urlparse
93 if not re.match(r'^https.*', url):100
94 logging.error('{0} is not a valid image URL'.format(url))101 if not re.match(r"^https.*", url):
102 logging.error("{0} is not a valid image URL".format(url))
95 raise IndexError103 raise IndexError
96 comps = urlparse(url).path.split('/')104 comps = urlparse(url).path.split("/")
97 published = (comps[1] == 'partners')105 published = comps[1] == "partners"
98 if published:106 if published:
99 (proj, image_type) = (comps[2], comps[5])107 (proj, image_type) = (comps[2], comps[5])
100 (date, buildorder) = comps[4].split('-')108 (date, buildorder) = comps[4].split("-")
101 else:109 else:
102 (proj, date, buildorder, image_type) = \110 (proj, date, buildorder, image_type) = (
103 (comps[3], comps[4], comps[5], comps[7])111 comps[3],
104 if 'dell-bto' in proj:112 comps[4],
113 comps[5],
114 comps[7],
115 )
116 if "dell-bto" in proj:
105 manifest_ver_index = published and 6 or 8117 manifest_ver_index = published and 6 or 8
106 try:118 try:
107 manifest_ver = comps[manifest_ver_index].split('-')[-4]119 manifest_ver = comps[manifest_ver_index].split("-")[-4]
108 except IndexError:120 except IndexError:
109 logging.error('published:{0} manifest_ver_index:{1}'121 logging.error(
110 .format(published, manifest_ver_index))122 "published:{0} manifest_ver_index:{1}".format(
123 published, manifest_ver_index
124 )
125 )
111 raise IndexError126 raise IndexError
112 else:127 else:
113 manifest_ver = None128 manifest_ver = None
114 return (proj, date, buildorder, published,129 return (proj, date, buildorder, published, manifest_ver, image_type)
115 manifest_ver, image_type)130
116 (project, date, buildorder, published, manifest_ver, image_type) = \131 (project, date, buildorder, published, manifest_ver, image_type) = _parse(url)
117 _parse(url)
118 obj = cls(project, date, buildorder, image_type)132 obj = cls(project, date, buildorder, image_type)
119 obj.manifest_ver = manifest_ver133 obj.manifest_ver = manifest_ver
120 obj.ispublished = published134 obj.ispublished = published
@@ -141,25 +155,24 @@ class ImageURI(object):
141155
142def __testself():156def __testself():
143 import doctest157 import doctest
158
144 doctest.testmod()159 doctest.testmod()
145160
146161
147class RsyncURI(object):162class RsyncURI(object):
148
149 def __init__(self, imguri, username=None, siteurl=None):163 def __init__(self, imguri, username=None, siteurl=None):
150 self.imguri = imguri164 self.imguri = imguri
151 self.siteurl = siteurl or 'oem-share.canonical.com'165 self.siteurl = siteurl or "oem-share.canonical.com"
152 self.username = username or os.getenv('USER')166 self.username = username or os.getenv("USER")
153 self.rootdir = '/srv/oem-share.canonical.com/www'167 self.rootdir = "/srv/oem-share.canonical.com/www"
154168
155 @property169 @property
156 def isolink(self):170 def isolink(self):
157 return '{}@{}:{}'.format(self.username, self.siteurl, self.imagepath)171 return "{}@{}:{}".format(self.username, self.siteurl, self.imagepath)
158172
159 @property173 @property
160 def imagepath(self):174 def imagepath(self):
161 return self.rootdir + os.path.join(self.imguri.rootdir,175 return self.rootdir + os.path.join(self.imguri.rootdir, self.imguri.imagepath)
162 self.imguri.imagepath)
163176
164 @classmethod177 @classmethod
165 def from_imguri(cls, imguri):178 def from_imguri(cls, imguri):
@@ -176,7 +189,6 @@ class RsyncURI(object):
176189
177190
178class _Downloader(object):191class _Downloader(object):
179
180 def __init__(self, url):192 def __init__(self, url):
181 """193 """
182 >>> o = _Downloader('https://oem-share.canonical.com/partners/somerville-oneiric-amd64/images/20111116-1/iso/somerville-oneiric-amd64-iso-20111116-1.iso')194 >>> o = _Downloader('https://oem-share.canonical.com/partners/somerville-oneiric-amd64/images/20111116-1/iso/somerville-oneiric-amd64-iso-20111116-1.iso')
@@ -185,8 +197,7 @@ class _Downloader(object):
185 self.imguri = ImageURI.from_url(url)197 self.imguri = ImageURI.from_url(url)
186198
187 def find_lastdownloadediso(self):199 def find_lastdownloadediso(self):
188 proj_isos = filter(lambda iso: self.imguri.project in iso,200 proj_isos = filter(lambda iso: self.imguri.project in iso, glob.glob("*.iso"))
189 glob.glob('*.iso'))
190 try:201 try:
191 return sorted_isos(proj_isos)[-1]202 return sorted_isos(proj_isos)[-1]
192 except IndexError:203 except IndexError:
@@ -205,12 +216,12 @@ class _Downloader(object):
205216
206 def run(self):217 def run(self):
207 self.lastiso = self.find_lastdownloadediso()218 self.lastiso = self.find_lastdownloadediso()
208 print('Found last downloaded file:{}'.format(self.lastiso))219 print("Found last downloaded file:{}".format(self.lastiso))
209 self.isolink = RsyncURI.from_imguri(self.imguri).isolink220 self.isolink = RsyncURI.from_imguri(self.imguri).isolink
210 # if os.path.exists(self.imguri.isoname):221 # if os.path.exists(self.imguri.isoname):
211 # sys.exit("Image {} already be downloaded.".format(222 # sys.exit("Image {} already be downloaded.".format(
212 # self.imguri.isoname))223 # self.imguri.isoname))
213 print('Starting to download file:{}'.format(self.imguri.isoname))224 print("Starting to download file:{}".format(self.imguri.isoname))
214 self.proc = self.do_download()225 self.proc = self.do_download()
215 ret = self.proc.wait()226 ret = self.proc.wait()
216 self.post_download()227 self.post_download()
@@ -222,10 +233,7 @@ class RsyncDownloader(_Downloader):
222233
223 def do_download(self):234 def do_download(self):
224 self.filename = self.lastiso and self.lastiso or self.imguri.isoname235 self.filename = self.lastiso and self.lastiso or self.imguri.isoname
225 cmd = ['rsync', '-Pv',236 cmd = ["rsync", "-Pv", self.isolink, self.filename]
226 self.isolink,
227 self.filename
228 ]
229237
230 return subprocess.Popen(cmd)238 return subprocess.Popen(cmd)
231239
@@ -235,24 +243,27 @@ class RsyncDownloader(_Downloader):
235243
236244
237class ZsyncDownloader(_Downloader):245class ZsyncDownloader(_Downloader):
238
239 def __init__(self, url, auth_token):246 def __init__(self, url, auth_token):
240 super(ZsyncDownloader, self).__init__(url)247 super(ZsyncDownloader, self).__init__(url)
241 self.auth_token = auth_token248 self.auth_token = auth_token
242249
243 def do_download(self):250 def do_download(self):
244 cmd = ['zsync_curl',251 cmd = [
245 self.imguri.isolink + '.zsync',252 "zsync_curl",
246 '-c pysid=' + self.auth_token253 self.imguri.isolink + ".zsync",
247 ]254 "-c pysid=" + self.auth_token,
255 ]
248 if self.lastiso:256 if self.lastiso:
249 cmd.append('-i ' + self.lastiso)257 cmd.append("-i " + self.lastiso)
250258
251 return subprocess.Popen(" ".join(cmd), shell=True)259 return subprocess.Popen(" ".join(cmd), shell=True)
252260
253 def post_download(self):261 def post_download(self):
254 if self.lastiso and self.lastiso != self.imguri.isoname and \262 if (
255 os.path.exists(self.lastiso):263 self.lastiso
264 and self.lastiso != self.imguri.isoname
265 and os.path.exists(self.lastiso)
266 ):
256 os.remove(self.lastiso)267 os.remove(self.lastiso)
257 if os.path.exists(self.imguri.isoname + ".zs-old"):268 if os.path.exists(self.imguri.isoname + ".zs-old"):
258 os.remove(self.imguri.isoname + ".zs-old")269 os.remove(self.imguri.isoname + ".zs-old")
@@ -267,19 +278,21 @@ def sorted_isos(isos):
267 >>> sorted_isos(isos)278 >>> sorted_isos(isos)
268 ['watauga2-precise-amd64-norecovery-iso-20130121-0.iso', 'watauga2-precise-amd64-norecovery-iso-20130121-1.iso']279 ['watauga2-precise-amd64-norecovery-iso-20130121-0.iso', 'watauga2-precise-amd64-norecovery-iso-20130121-1.iso']
269 """280 """
281
270 def _f(e):282 def _f(e):
271 e = e.replace('.iso', '')283 e = e.replace(".iso", "")
272 if e.startswith('dell'):284 if e.startswith("dell"):
273 e = e.replace('dell-bto-oneiric-', '')285 e = e.replace("dell-bto-oneiric-", "")
274 comps = e.split('-iso-')286 comps = e.split("-iso-")
275 comps = comps[1].split('-')287 comps = comps[1].split("-")
276 (date, order) = (comps[0], comps[1])288 (date, order) = (comps[0], comps[1])
277 else:289 else:
278 # Date and build number are guaranteed to be the last 2 fields290 # Date and build number are guaranteed to be the last 2 fields
279 # in the name of an ISO produced by Offspring291 # in the name of an ISO produced by Offspring
280 comps = e.rsplit('-', 2)292 comps = e.rsplit("-", 2)
281 (date, order) = (comps[1], comps[2])293 (date, order) = (comps[1], comps[2])
282 return int('{}{}'.format(date, order))294 return int("{}{}".format(date, order))
295
283 return sorted(isos, key=_f)296 return sorted(isos, key=_f)
284297
285298
@@ -289,7 +302,8 @@ def iso_of_bugdesc(desc):
289 ('somerville-oneiric-amd64', '20111116', '1')302 ('somerville-oneiric-amd64', '20111116', '1')
290 """303 """
291 import re304 import re
292 ret = re.findall('Image:\\s+((\\S+)-(\\d+)-(\\d)).*', desc)305
306 ret = re.findall("Image:\\s+((\\S+)-(\\d+)-(\\d)).*", desc)
293 if ret:307 if ret:
294 return (ret[0][1], ret[0][2], ret[0][3])308 return (ret[0][1], ret[0][2], ret[0][3])
295 else:309 else:
@@ -302,9 +316,10 @@ def isourl_of_bug(q):
302 'https://oem-share.canonical.com/oem/cesg-builds/stella-anaheim-precise-amd64/20130116/0/images/iso/stella-anaheim-precise-amd64-iso-20130116-0.iso'316 'https://oem-share.canonical.com/oem/cesg-builds/stella-anaheim-precise-amd64/20130116/0/images/iso/stella-anaheim-precise-amd64-iso-20130116-0.iso'
303 """317 """
304 from launchpadlib.launchpad import Launchpad318 from launchpadlib.launchpad import Launchpad
319
305 cachedir = os.path.join(os.environ["HOME"], ".launchpadlib/cache")320 cachedir = os.path.join(os.environ["HOME"], ".launchpadlib/cache")
306 try:321 try:
307 lp = Launchpad.login_with("oem-getiso", 'production', cachedir)322 lp = Launchpad.login_with("oem-getiso", "production", cachedir)
308 found_iso = iso_of_bugdesc(lp.bugs[q].description)323 found_iso = iso_of_bugdesc(lp.bugs[q].description)
309 except KeyboardInterrupt:324 except KeyboardInterrupt:
310 print("Terminated by user reqeust!")325 print("Terminated by user reqeust!")
@@ -317,20 +332,29 @@ def isourl_of_bug(q):
317 return img.isolink332 return img.isolink
318333
319334
320if __name__ == '__main__':335if __name__ == "__main__":
321 usage = "usage: %prog --method [rsync|zsync --auth_token " \336 usage = (
322 "<auth_token>] iso_url|bug_number"337 "usage: %prog --method [rsync|zsync --auth_token "
338 "<auth_token>] iso_url|bug_number"
339 )
323 parser = OptionParser(usage=usage)340 parser = OptionParser(usage=usage)
324 parser.add_option(341 parser.add_option(
325 "--method", dest="method", default="rsync", metavar="METHOD",342 "--method",
326 help="The METHOD of download. Defaults to 'rsync'.")343 dest="method",
344 default="rsync",
345 metavar="METHOD",
346 help="The METHOD of download. Defaults to 'rsync'.",
347 )
327 parser.add_option(348 parser.add_option(
328 "--auth_token", dest="auth_token", metavar="AUTH_TOKEN",349 "--auth_token",
329 help="The authetication token needed to access oem-share.")350 dest="auth_token",
351 metavar="AUTH_TOKEN",
352 help="The authetication token needed to access oem-share.",
353 )
330 (options, args) = parser.parse_args()354 (options, args) = parser.parse_args()
331355
332 if __DEBUG__:356 if __DEBUG__:
333 print('self testing...')357 print("self testing...")
334 __testself()358 __testself()
335 sys.exit()359 sys.exit()
336360
@@ -353,8 +377,7 @@ if __name__ == '__main__':
353 if options.auth_token is None:377 if options.auth_token is None:
354 sys.exit(parser.get_usage())378 sys.exit(parser.get_usage())
355 if not os.path.exists("/usr/bin/zsync_curl"):379 if not os.path.exists("/usr/bin/zsync_curl"):
356 sys.exit(380 sys.exit("The zync-curl package must be installed to use this method.")
357 "The zync-curl package must be installed to use this method.")
358 runner = ZsyncDownloader(url, options.auth_token)381 runner = ZsyncDownloader(url, options.auth_token)
359 else:382 else:
360 sys.exit(parser.get_usage())383 sys.exit(parser.get_usage())
diff --git a/oem-meta-packages b/oem-meta-packages
index 91b07ef..c98dee8 100755
--- a/oem-meta-packages
+++ b/oem-meta-packages
@@ -40,7 +40,8 @@ from string import Template
40from tempfile import TemporaryDirectory40from tempfile import TemporaryDirectory
4141
4242
43staging_copy_template = Template(f'''Operation: copy_package43staging_copy_template = Template(
44 f'''Operation: copy_package
44Source: $source45Source: $source
45Destination: $destination46Destination: $destination
46Package: $packages47Package: $packages
@@ -60,7 +61,8 @@ And then verify the production archive.
60"""61"""
61deb http://oem.archive.canonical.com $distribution $component62deb http://oem.archive.canonical.com $distribution $component
62deb-src http://oem.archive.canonical.com $distribution $component63deb-src http://oem.archive.canonical.com $distribution $component
63"""''')64"""'''
65)
6466
6567
66class DataJSONEncoder(json.JSONEncoder):68class DataJSONEncoder(json.JSONEncoder):
@@ -90,10 +92,11 @@ class PkgInfo:
90 proposed_version: str92 proposed_version: str
9193
9294
93pattern = re.compile(r'oem-([^-]*)-(.*)-meta')95pattern = re.compile(r"oem-([^-]*)-(.*)-meta")
94staging_pattern = re.compile(r'.*>(.*)/</a>')96staging_pattern = re.compile(r".*>(.*)/</a>")
95parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,97parser = argparse.ArgumentParser(
96 epilog="""98 formatter_class=argparse.RawDescriptionHelpFormatter,
99 epilog="""
97You need to have the [oem-scripts] section in ~/.config/oem-scripts/config.ini.100You need to have the [oem-scripts] section in ~/.config/oem-scripts/config.ini.
98Executing `launchpad-api get people/+me` will generate it.101Executing `launchpad-api get people/+me` will generate it.
99102
@@ -113,35 +116,38 @@ url = SomewhereToProvideTheInformationForUsernameAndPassword
113username = UserName116username = UserName
114password = PassWord117password = PassWord
115archive = https://cesg.canonical.com/canonical118archive = https://cesg.canonical.com/canonical
116fingerprint = 54F1860295829CE3""")119fingerprint = 54F1860295829CE3""",
117120)
118121
119parser.add_argument("-d", "--debug",122
120 help="print debug messages", action="store_true")123parser.add_argument("-d", "--debug", help="print debug messages", action="store_true")
121parser.add_argument("-q", "--quiet",124parser.add_argument(
122 help="Don't print info messages", action="store_true")125 "-q", "--quiet", help="Don't print info messages", action="store_true"
123parser.add_argument("-v", "--verbose", action="store_true",126)
124 help="print verbose messages")127parser.add_argument(
125parser.add_argument("--dry-run",128 "-v", "--verbose", action="store_true", help="print verbose messages"
126 help="Dry run the process.", action="store_true")129)
127parser.add_argument("--yes",130parser.add_argument("--dry-run", help="Dry run the process.", action="store_true")
128 help="Say yes for all prompts.", action="store_true")131parser.add_argument("--yes", help="Say yes for all prompts.", action="store_true")
129parser.add_argument("--skip",132parser.add_argument(
130 help="Skip some projects or some meta packages.", type=str, default="")133 "--skip", help="Skip some projects or some meta packages.", type=str, default=""
131parser.add_argument("--only",134)
132 help="Specified the meta package. Skip others.", type=str)135parser.add_argument("--only", help="Specified the meta package. Skip others.", type=str)
133parser.add_argument("--since",136parser.add_argument(
134 help="Begin from the specified meta package. Skip previous meta packages.", type=str)137 "--since",
135parser.add_argument("--apt-dir",138 help="Begin from the specified meta package. Skip previous meta packages.",
136 type=str,139 type=str,
137 help="specify the dir for apt")140)
141parser.add_argument("--apt-dir", type=str, help="specify the dir for apt")
138142
139143
140subparsers = parser.add_subparsers(dest="subcommand")144subparsers = parser.add_subparsers(dest="subcommand")
141145
142collect = subparsers.add_parser('collect', help='[-h] --json platforms.json | --meta oem-qemu-meta -o|--output meta-info.json',146collect = subparsers.add_parser(
143 formatter_class=argparse.RawDescriptionHelpFormatter,147 "collect",
144 epilog="""148 help="[-h] --json platforms.json | --meta oem-qemu-meta -o|--output meta-info.json",
149 formatter_class=argparse.RawDescriptionHelpFormatter,
150 epilog="""
145For example,151For example,
146 oem-meta-packages collect --json platforms.json --output meta-info.json152 oem-meta-packages collect --json platforms.json --output meta-info.json
147153
@@ -179,24 +185,37 @@ platforms.json:
179185
180oem-meta-packages collect --meta oem-qemu-meta --output meta-info.json186oem-meta-packages collect --meta oem-qemu-meta --output meta-info.json
181187
182Collect the information of OEM metapackages in PPAs and devel/staging archives.""")188Collect the information of OEM metapackages in PPAs and devel/staging archives.""",
183189)
184collect.add_argument("--json",190
185 help="Specify the json file to read the platforms information.",191collect.add_argument(
186 type=argparse.FileType('r', encoding='UTF-8'))192 "--json",
187collect.add_argument("--meta",193 help="Specify the json file to read the platforms information.",
188 help="Specify the meta package to collect the information.")194 type=argparse.FileType("r", encoding="UTF-8"),
189collect.add_argument("-o", "--output", required=True,195)
190 help="Specify a filename to write the meta information.",196collect.add_argument(
191 type=argparse.FileType('w', encoding='UTF-8'))197 "--meta", help="Specify the meta package to collect the information."
192198)
193list_pkg = subparsers.add_parser('list', help='[-h]',199collect.add_argument(
194 formatter_class=argparse.RawDescriptionHelpFormatter,200 "-o",
195 epilog="List all OEM meta packages in Ubuntu archive.")201 "--output",
196202 required=True,
197subscribe = subparsers.add_parser('subscribe', help='[-h]',203 help="Specify a filename to write the meta information.",
198 formatter_class=argparse.RawDescriptionHelpFormatter,204 type=argparse.FileType("w", encoding="UTF-8"),
199 epilog="""205)
206
207list_pkg = subparsers.add_parser(
208 "list",
209 help="[-h]",
210 formatter_class=argparse.RawDescriptionHelpFormatter,
211 epilog="List all OEM meta packages in Ubuntu archive.",
212)
213
214subscribe = subparsers.add_parser(
215 "subscribe",
216 help="[-h]",
217 formatter_class=argparse.RawDescriptionHelpFormatter,
218 epilog="""
200Make all bugs of all oem meta packages be subscribed by oem-solutions-engineers.219Make all bugs of all oem meta packages be subscribed by oem-solutions-engineers.
201 (search current apt source for package lists)220 (search current apt source for package lists)
202221
@@ -205,56 +224,81 @@ Make all bugs of all oem meta packages be subscribed by oem-solutions-engineers.
205 (search current apt source for package lists)224 (search current apt source for package lists)
206225
207Check "To all bugs in oem-qemu-meta in Ubuntu:" on https://launchpad.net/ubuntu/+source/oem-qemu-meta/+subscribe for example.226Check "To all bugs in oem-qemu-meta in Ubuntu:" on https://launchpad.net/ubuntu/+source/oem-qemu-meta/+subscribe for example.
208""")227""",
209228)
210unsubscribe = subparsers.add_parser('unsubscribe', help='[-h] pkgName',229
211 formatter_class=argparse.RawDescriptionHelpFormatter,230unsubscribe = subparsers.add_parser(
212 epilog="""231 "unsubscribe",
232 help="[-h] pkgName",
233 formatter_class=argparse.RawDescriptionHelpFormatter,
234 epilog="""
213Unsubscribe oem-solutions-engineers from oem-qemu-meta235Unsubscribe oem-solutions-engineers from oem-qemu-meta
214236
215For example,237For example,
216 oem-meta-packages unsubscribe oem-qemu-meta238 oem-meta-packages unsubscribe oem-qemu-meta
217""")239""",
218unsubscribe.add_argument("pkgName", type=str,240)
219 help="Specify the package name to unsubscribe.")241unsubscribe.add_argument(
220242 "pkgName", type=str, help="Specify the package name to unsubscribe."
221staging_copy = subparsers.add_parser('staging-copy', help='[-h] [--ignore-staging-lock] --json meta-info.json | --meta oem-qemu-meta',243)
222 formatter_class=argparse.RawDescriptionHelpFormatter,244
223 epilog="""245staging_copy = subparsers.add_parser(
246 "staging-copy",
247 help="[-h] [--ignore-staging-lock] --json meta-info.json | --meta oem-qemu-meta",
248 formatter_class=argparse.RawDescriptionHelpFormatter,
249 epilog="""
224For example,250For example,
225 oem-meta-packages --dry-run staging-copy --meta oem-qemu-meta251 oem-meta-packages --dry-run staging-copy --meta oem-qemu-meta
226 or252 or
227 oem-meta-packages --dry-run staging-copy --json meta-info.json (generated by the 'collect' subcommand.)253 oem-meta-packages --dry-run staging-copy --json meta-info.json (generated by the 'collect' subcommand.)
228254
229Copy the meta package from the devel archive into the staging archive.""")255Copy the meta package from the devel archive into the staging archive.""",
230256)
231staging_copy.add_argument("--json",257
232 help="Specify the json file to read the meta information.",258staging_copy.add_argument(
233 type=argparse.FileType('r', encoding='UTF-8'))259 "--json",
234staging_copy.add_argument("--meta",260 help="Specify the json file to read the meta information.",
235 help="Specify the meta package to copy.")261 type=argparse.FileType("r", encoding="UTF-8"),
236staging_copy.add_argument("--ignore-staging-lock",262)
237 help="Ignore the staging-lock tag.", action="store_true")263staging_copy.add_argument("--meta", help="Specify the meta package to copy.")
238264staging_copy.add_argument(
239update = subparsers.add_parser('update', help='[-h] [--autopkgtest] --json meta-info.json | --meta oem-qemu-meta',265 "--ignore-staging-lock", help="Ignore the staging-lock tag.", action="store_true"
240 formatter_class=argparse.RawDescriptionHelpFormatter,266)
241 epilog="""267
268update = subparsers.add_parser(
269 "update",
270 help="[-h] [--autopkgtest] --json meta-info.json | --meta oem-qemu-meta",
271 formatter_class=argparse.RawDescriptionHelpFormatter,
272 epilog="""
242For example,273For example,
243 oem-meta-packages --dry-run update --meta oem-qemu-meta --kernel linux-oem-20.04274 oem-meta-packages --dry-run update --meta oem-qemu-meta --kernel linux-oem-20.04
244 or275 or
245 oem-meta-packages --dry-run update --json meta-info.json (generated by the 'collect' subcommand.)276 oem-meta-packages --dry-run update --json meta-info.json (generated by the 'collect' subcommand.)
246277
247Update the market name and the kernel flavour of the OEM meta package to the default kernel flavour, i.e. linux-generic-hwe-20.04.""")278Update the market name and the kernel flavour of the OEM meta package to the default kernel flavour, i.e. linux-generic-hwe-20.04.""",
248update.add_argument("--autopkgtest", action="store_true",279)
249 help="Run autopkgtest when checking the git repository.")280update.add_argument(
250update.add_argument("--json", type=argparse.FileType('r', encoding='UTF-8'),281 "--autopkgtest",
251 help="Specify the json file to read the meta information.")282 action="store_true",
252update.add_argument("--meta", type=str,283 help="Run autopkgtest when checking the git repository.",
253 help="Specify the meta package to update.")284)
254update.add_argument("--kernel", type=str, default="linux-generic-hwe-20.04",285update.add_argument(
255 help="Specify the kernel meta to update. linux-generic-hwe-20.04|linux-oem-20.04b|linux-oem-20.04")286 "--json",
256update.add_argument("--factory", action="store_true",287 type=argparse.FileType("r", encoding="UTF-8"),
257 help="Make the factory meta to depend on the kernel meta directly.")288 help="Specify the json file to read the meta information.",
289)
290update.add_argument("--meta", type=str, help="Specify the meta package to update.")
291update.add_argument(
292 "--kernel",
293 type=str,
294 default="linux-generic-hwe-20.04",
295 help="Specify the kernel meta to update. linux-generic-hwe-20.04|linux-oem-20.04b|linux-oem-20.04",
296)
297update.add_argument(
298 "--factory",
299 action="store_true",
300 help="Make the factory meta to depend on the kernel meta directly.",
301)
258302
259args = parser.parse_args()303args = parser.parse_args()
260304
@@ -263,18 +307,20 @@ setup_logging(debug=args.debug, quiet=args.quiet)
263if args.subcommand:307if args.subcommand:
264 login = LaunchpadLogin()308 login = LaunchpadLogin()
265 lp = login.lp309 lp = login.lp
266 oem_archive = lp.people['oem-archive']310 oem_archive = lp.people["oem-archive"]
267311
268if args.apt_dir:312if args.apt_dir:
269 apt_pkg.init_config()313 apt_pkg.init_config()
270 if args.debug:314 if args.debug:
271 old = apt_pkg.config.dump()315 old = apt_pkg.config.dump()
272 apt_pkg.config.set("Dir", args.apt_dir)316 apt_pkg.config.set("Dir", args.apt_dir)
273 apt_pkg.config.set("Dir::State::status", os.path.join(args.apt_dir, "var/lib/dpkg/status"))317 apt_pkg.config.set(
318 "Dir::State::status", os.path.join(args.apt_dir, "var/lib/dpkg/status")
319 )
274 if args.debug:320 if args.debug:
275 new = apt_pkg.config.dump()321 new = apt_pkg.config.dump()
276 d = difflib.Differ()322 d = difflib.Differ()
277 diff = d.compare(old.split('\n'), new.split('\n'))323 diff = d.compare(old.split("\n"), new.split("\n"))
278 for line in diff:324 for line in diff:
279 debug(line.strip())325 debug(line.strip())
280 apt_pkg.init_system()326 apt_pkg.init_system()
@@ -288,7 +334,7 @@ def yes_or_ask(yes: bool, message: str) -> bool:
288 res = input(f"> \033[1;34m{message}\033[1;0m (y/n) ").lower()334 res = input(f"> \033[1;34m{message}\033[1;0m (y/n) ").lower()
289 if res not in {"y", "n"}:335 if res not in {"y", "n"}:
290 continue336 continue
291 if res == 'y':337 if res == "y":
292 return True338 return True
293 else:339 else:
294 return False340 return False
@@ -302,21 +348,25 @@ def _debug_obj(pkg) -> None:
302 debug(dir(pkg))348 debug(dir(pkg))
303349
304 for attr in dir(pkg):350 for attr in dir(pkg):
305 if not attr.startswith('__'):351 if not attr.startswith("__"):
306 if not isinstance(pkg.__getattribute__(attr), types.BuiltinFunctionType):352 if not isinstance(pkg.__getattribute__(attr), types.BuiltinFunctionType):
307 debug(f"{attr}: {pkg.__getattribute__(attr)}")353 debug(f"{attr}: {pkg.__getattribute__(attr)}")
308354
309355
310def _run_command(command: list or tuple, returncode=(0,), env=None, silent=False) -> (str, str, int):356def _run_command(
357 command: list or tuple, returncode=(0,), env=None, silent=False
358) -> (str, str, int):
311 if not silent:359 if not silent:
312 debug("$ " + " ".join(command))360 debug("$ " + " ".join(command))
313 proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env)361 proc = subprocess.Popen(
362 command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env
363 )
314 out, err = proc.communicate()364 out, err = proc.communicate()
315365
316 if out:366 if out:
317 out = out.decode('utf-8').strip()367 out = out.decode("utf-8").strip()
318 if err:368 if err:
319 err = err.decode('utf-8').strip()369 err = err.decode("utf-8").strip()
320370
321 if proc.returncode not in returncode:371 if proc.returncode not in returncode:
322 critical(f"return {proc.returncode}")372 critical(f"return {proc.returncode}")
@@ -339,7 +389,11 @@ def get_oem_meta_packages(cache) -> list:
339 pkg_list = []389 pkg_list = []
340 for pkg in cache.packages:390 for pkg in cache.packages:
341 name = pkg.name391 name = pkg.name
342 if not name.startswith('oem-') or not name.endswith('-meta') or '-factory-' in name:392 if (
393 not name.startswith("oem-")
394 or not name.endswith("-meta")
395 or "-factory-" in name
396 ):
343 continue397 continue
344 pkg_list.append(name)398 pkg_list.append(name)
345 return sorted(pkg_list)399 return sorted(pkg_list)
@@ -347,8 +401,8 @@ def get_oem_meta_packages(cache) -> list:
347401
348def _grouping_market_names(market_names: list, maxsplit=1) -> str:402def _grouping_market_names(market_names: list, maxsplit=1) -> str:
349 # Remove empty item403 # Remove empty item
350 while '' in market_names:404 while "" in market_names:
351 market_names.remove('')405 market_names.remove("")
352 tmp = collections.defaultdict(list)406 tmp = collections.defaultdict(list)
353 space_in_model = False407 space_in_model = False
354 try:408 try:
@@ -356,28 +410,28 @@ def _grouping_market_names(market_names: list, maxsplit=1) -> str:
356 if maxsplit == 1:410 if maxsplit == 1:
357 name, model = market_name.split(maxsplit=maxsplit)411 name, model = market_name.split(maxsplit=maxsplit)
358 tmp[name].append(model)412 tmp[name].append(model)
359 if ' ' in model:413 if " " in model:
360 space_in_model = True414 space_in_model = True
361 elif maxsplit == 2:415 elif maxsplit == 2:
362 brand, name, model = market_name.split(maxsplit=maxsplit)416 brand, name, model = market_name.split(maxsplit=maxsplit)
363 tmp[brand + ' ' + name].append(model)417 tmp[brand + " " + name].append(model)
364 if ' ' in model:418 if " " in model:
365 space_in_model = True419 space_in_model = True
366 except ValueError:420 except ValueError:
367 return ', '.join(sorted(market_names))421 return ", ".join(sorted(market_names))
368422
369 if space_in_model:423 if space_in_model:
370 return ', '.join(f"{name} {', '.join(models)}" for name, models in tmp.items())424 return ", ".join(f"{name} {', '.join(models)}" for name, models in tmp.items())
371 else:425 else:
372 return ', '.join(f"{name} {'/'.join(models)}" for name, models in tmp.items())426 return ", ".join(f"{name} {'/'.join(models)}" for name, models in tmp.items())
373427
374428
375def deal_with_description(git_dir, old, new) -> bool:429def deal_with_description(git_dir, old, new) -> bool:
376 if not old or not new:430 if not old or not new:
377 return False431 return False
378 os.chdir(git_dir)432 os.chdir(git_dir)
379 file_path = os.path.join(git_dir, 'debian', 'control')433 file_path = os.path.join(git_dir, "debian", "control")
380 with open(file_path, 'r') as control:434 with open(file_path, "r") as control:
381 lines = control.readlines()435 lines = control.readlines()
382 changed = False436 changed = False
383 for i, line in enumerate(lines):437 for i, line in enumerate(lines):
@@ -385,7 +439,7 @@ def deal_with_description(git_dir, old, new) -> bool:
385 changed = True439 changed = True
386 lines[i] = line.replace(old, new)440 lines[i] = line.replace(old, new)
387 info(f'"{old}" will be replaced by "{new}".')441 info(f'"{old}" will be replaced by "{new}".')
388 with open(file_path, 'w') as control:442 with open(file_path, "w") as control:
389 control.writelines(lines)443 control.writelines(lines)
390 _run_command(["git", "add", "debian/control"])444 _run_command(["git", "add", "debian/control"])
391 return changed445 return changed
@@ -395,39 +449,41 @@ def deal_with_kernel_flavour(pkg_name, branch, git_dir) -> bool:
395 os.chdir(git_dir)449 os.chdir(git_dir)
396 idx = -1450 idx = -1
397 kernel_flavour = None451 kernel_flavour = None
398 file_path = os.path.join(git_dir, 'debian', 'control')452 file_path = os.path.join(git_dir, "debian", "control")
399 with open(file_path, 'r') as control:453 with open(file_path, "r") as control:
400 lines = control.readlines()454 lines = control.readlines()
401 for i, line in enumerate(lines):455 for i, line in enumerate(lines):
402 if line.startswith('XB-Ubuntu-OEM-Kernel-Flavour:'):456 if line.startswith("XB-Ubuntu-OEM-Kernel-Flavour:"):
403 kernel_flavour = line[len('XB-Ubuntu-OEM-Kernel-Flavour:'):].strip()457 kernel_flavour = line[len("XB-Ubuntu-OEM-Kernel-Flavour:") :].strip()
404 idx = i458 idx = i
405 break459 break
406460
407 if not kernel_flavour:461 if not kernel_flavour:
408 critical(f"There is no XB-Ubuntu-OEM-Kernel-Flavour in debian/control of {branch} for {pkg_name}.")462 critical(
463 f"There is no XB-Ubuntu-OEM-Kernel-Flavour in debian/control of {branch} for {pkg_name}."
464 )
409 exit(1)465 exit(1)
410466
411 debug(f"XB-Ubuntu-OEM-Kernel-Flavour: {kernel_flavour}")467 debug(f"XB-Ubuntu-OEM-Kernel-Flavour: {kernel_flavour}")
412468
413 if args.kernel == 'linux-generic-hwe-20.04':469 if args.kernel == "linux-generic-hwe-20.04":
414 if kernel_flavour == 'default':470 if kernel_flavour == "default":
415 return False471 return False
416 kernel_flavour = 'default'472 kernel_flavour = "default"
417 elif args.kernel == 'linux-oem-20.04':473 elif args.kernel == "linux-oem-20.04":
418 if kernel_flavour == 'oem':474 if kernel_flavour == "oem":
419 return False475 return False
420 kernel_flavour = 'oem'476 kernel_flavour = "oem"
421 elif args.kernel == 'linux-oem-20.04b':477 elif args.kernel == "linux-oem-20.04b":
422 if kernel_flavour == 'oem':478 if kernel_flavour == "oem":
423 return False479 return False
424 kernel_flavour = 'oem'480 kernel_flavour = "oem"
425 else:481 else:
426 print(f"{args.kernel} is not supported.")482 print(f"{args.kernel} is not supported.")
427 exit(1)483 exit(1)
428484
429 lines[idx] = f"XB-Ubuntu-OEM-Kernel-Flavour: {kernel_flavour}\n"485 lines[idx] = f"XB-Ubuntu-OEM-Kernel-Flavour: {kernel_flavour}\n"
430 with open(file_path, 'w') as control:486 with open(file_path, "w") as control:
431 control.writelines(lines)487 control.writelines(lines)
432 _run_command(["git", "add", "debian/control"])488 _run_command(["git", "add", "debian/control"])
433 return True489 return True
@@ -436,12 +492,12 @@ def deal_with_kernel_flavour(pkg_name, branch, git_dir) -> bool:
436def deal_with_kernel_depends(pkg_name, branch, git_dir) -> bool:492def deal_with_kernel_depends(pkg_name, branch, git_dir) -> bool:
437 os.chdir(git_dir)493 os.chdir(git_dir)
438 idx = -1494 idx = -1
439 file_path = os.path.join(git_dir, 'debian', 'control')495 file_path = os.path.join(git_dir, "debian", "control")
440 changed = False496 changed = False
441 with open(file_path, 'r') as control:497 with open(file_path, "r") as control:
442 lines = control.readlines()498 lines = control.readlines()
443 for i, line in enumerate(lines):499 for i, line in enumerate(lines):
444 if line.startswith('Depends:'):500 if line.startswith("Depends:"):
445 idx = i501 idx = i
446 break502 break
447503
@@ -452,24 +508,42 @@ def deal_with_kernel_depends(pkg_name, branch, git_dir) -> bool:
452 debug(lines[idx].strip())508 debug(lines[idx].strip())
453509
454 # this only works for updating auto-generated code510 # this only works for updating auto-generated code
455 if args.kernel == 'linux-generic-hwe-20.04' and ', linux-generic-hwe-20.04,' not in lines[idx]:511 if (
456 lines[idx] = lines[idx].replace(', linux-oem-20.04,', ', linux-generic-hwe-20.04,')512 args.kernel == "linux-generic-hwe-20.04"
457 lines[idx] = lines[idx].replace(', linux-oem-20.04b | linux-oem-20.04,', ', linux-generic-hwe-20.04,')513 and ", linux-generic-hwe-20.04," not in lines[idx]
514 ):
515 lines[idx] = lines[idx].replace(
516 ", linux-oem-20.04,", ", linux-generic-hwe-20.04,"
517 )
518 lines[idx] = lines[idx].replace(
519 ", linux-oem-20.04b | linux-oem-20.04,", ", linux-generic-hwe-20.04,"
520 )
458 changed = True521 changed = True
459 elif args.kernel == 'linux-oem-20.04' and ', linux-oem-20.04,' not in lines[idx]:522 elif args.kernel == "linux-oem-20.04" and ", linux-oem-20.04," not in lines[idx]:
460 lines[idx] = lines[idx].replace(', linux-generic-hwe-20.04,', ', linux-oem-20.04,')523 lines[idx] = lines[idx].replace(
461 lines[idx] = lines[idx].replace(', linux-oem-20.04b | linux-oem-20.04,', ', linux-oem-20.04,')524 ", linux-generic-hwe-20.04,", ", linux-oem-20.04,"
525 )
526 lines[idx] = lines[idx].replace(
527 ", linux-oem-20.04b | linux-oem-20.04,", ", linux-oem-20.04,"
528 )
462 changed = True529 changed = True
463 elif args.kernel == 'linux-oem-20.04b' and ', linux-oem-20.04b | linux-oem-20.04,' not in lines[idx]:530 elif (
464 lines[idx] = lines[idx].replace(', linux-generic-hwe-20.04,', ', linux-oem-20.04b | linux-oem-20.04,')531 args.kernel == "linux-oem-20.04b"
465 lines[idx] = lines[idx].replace(', linux-oem-20.04,', ', linux-oem-20.04b | linux-oem-20.04,')532 and ", linux-oem-20.04b | linux-oem-20.04," not in lines[idx]
533 ):
534 lines[idx] = lines[idx].replace(
535 ", linux-generic-hwe-20.04,", ", linux-oem-20.04b | linux-oem-20.04,"
536 )
537 lines[idx] = lines[idx].replace(
538 ", linux-oem-20.04,", ", linux-oem-20.04b | linux-oem-20.04,"
539 )
466 changed = True540 changed = True
467541
468 if args.factory:542 if args.factory:
469 factory_idx = -1543 factory_idx = -1
470 # Find the factory depends.544 # Find the factory depends.
471 for i, line in enumerate(lines):545 for i, line in enumerate(lines):
472 if i > idx and line.startswith('Depends:'):546 if i > idx and line.startswith("Depends:"):
473 factory_idx = i547 factory_idx = i
474 depends_line = lines[factory_idx].strip()548 depends_line = lines[factory_idx].strip()
475 break549 break
@@ -478,7 +552,11 @@ def deal_with_kernel_depends(pkg_name, branch, git_dir) -> bool:
478 critical("It can not find factory 'Depends' in debian/control.")552 critical("It can not find factory 'Depends' in debian/control.")
479 exit(1)553 exit(1)
480554
481 for kernel in ('linux-generic-hwe-20.04', 'linux-oem-20.04', 'linux-oem-20.04b'):555 for kernel in (
556 "linux-generic-hwe-20.04",
557 "linux-oem-20.04",
558 "linux-oem-20.04b",
559 ):
482 if depends_line.endswith(kernel) or f"{kernel}," in depends_line:560 if depends_line.endswith(kernel) or f"{kernel}," in depends_line:
483 if kernel != args.kernel:561 if kernel != args.kernel:
484 lines[factory_idx] = lines[factory_idx].replace(kernel, args.kernel)562 lines[factory_idx] = lines[factory_idx].replace(kernel, args.kernel)
@@ -488,7 +566,7 @@ def deal_with_kernel_depends(pkg_name, branch, git_dir) -> bool:
488 lines[factory_idx] = depends_line + f", {args.kernel}\n"566 lines[factory_idx] = depends_line + f", {args.kernel}\n"
489 changed = True567 changed = True
490568
491 with open(file_path, 'w') as control:569 with open(file_path, "w") as control:
492 control.writelines(lines)570 control.writelines(lines)
493571
494 if changed:572 if changed:
@@ -497,12 +575,14 @@ def deal_with_kernel_depends(pkg_name, branch, git_dir) -> bool:
497 return changed575 return changed
498576
499577
500def deal_with_debian_tests(pkg_name: str, git_dir: str, branch: str, bootstrap: bool) -> bool:578def deal_with_debian_tests(
579 pkg_name: str, git_dir: str, branch: str, bootstrap: bool
580) -> bool:
501 os.chdir(git_dir)581 os.chdir(git_dir)
502582
503 changed = False583 changed = False
504584
505 tests_folder = os.path.join(git_dir, 'debian', 'tests')585 tests_folder = os.path.join(git_dir, "debian", "tests")
506 if not os.path.exists(tests_folder):586 if not os.path.exists(tests_folder):
507 os.mkdir(tests_folder)587 os.mkdir(tests_folder)
508588
@@ -510,17 +590,17 @@ def deal_with_debian_tests(pkg_name: str, git_dir: str, branch: str, bootstrap:
510Depends: @590Depends: @
511Restrictions: needs-root591Restrictions: needs-root
512"""592"""
513 control = os.path.join(git_dir, 'debian', 'tests', 'control')593 control = os.path.join(git_dir, "debian", "tests", "control")
514594
515 if os.path.exists(control):595 if os.path.exists(control):
516 with open(control, 'r') as f:596 with open(control, "r") as f:
517 if f.read() != control_content:597 if f.read() != control_content:
518 with open(control, 'w') as fp:598 with open(control, "w") as fp:
519 fp.write(control_content)599 fp.write(control_content)
520 _run_command(["git", "add", "debian/tests/control"])600 _run_command(["git", "add", "debian/tests/control"])
521 changed = True601 changed = True
522 else:602 else:
523 with open(control, 'w') as fp:603 with open(control, "w") as fp:
524 fp.write(control_content)604 fp.write(control_content)
525 _run_command(["git", "add", "debian/tests/control"])605 _run_command(["git", "add", "debian/tests/control"])
526 changed = True606 changed = True
@@ -538,29 +618,37 @@ apt-get update
538apt-get full-upgrade --yes618apt-get full-upgrade --yes
539"""619"""
540 if not bootstrap:620 if not bootstrap:
541 if 'oem' in args.kernel:621 if "oem" in args.kernel:
542 grub_flavour = 'oem'622 grub_flavour = "oem"
543 else:623 else:
544 grub_flavour = 'generic'624 grub_flavour = "generic"
545 meta_content += '\ngrep ^GRUB_FLAVOUR_ORDER=' + grub_flavour + '$ /etc/default/grub.d/oem-flavour.cfg\n'625 meta_content += (
546 meta_content += '\ndpkg-query -W -f=\'${Status}\' ' + args.kernel + ' | grep "install ok installed"\n'626 "\ngrep ^GRUB_FLAVOUR_ORDER="
627 + grub_flavour
628 + "$ /etc/default/grub.d/oem-flavour.cfg\n"
629 )
630 meta_content += (
631 "\ndpkg-query -W -f='${Status}' "
632 + args.kernel
633 + ' | grep "install ok installed"\n'
634 )
547 meta_content += f"\napt-get autoremove --purge --yes {pkg_name}\n"635 meta_content += f"\napt-get autoremove --purge --yes {pkg_name}\n"
548 meta = os.path.join(git_dir, 'debian', 'tests', 'meta')636 meta = os.path.join(git_dir, "debian", "tests", "meta")
549 old_meta = os.path.join(git_dir, 'debian', 'tests', pkg_name)637 old_meta = os.path.join(git_dir, "debian", "tests", pkg_name)
550638
551 if os.path.exists(old_meta):639 if os.path.exists(old_meta):
552 _run_command(["git", "rm", "-f", f"debian/tests/{pkg_name}"])640 _run_command(["git", "rm", "-f", f"debian/tests/{pkg_name}"])
553 changed = True641 changed = True
554642
555 if os.path.exists(meta):643 if os.path.exists(meta):
556 with open(meta, 'r') as f:644 with open(meta, "r") as f:
557 if f.read() != meta_content:645 if f.read() != meta_content:
558 with open(meta, 'w') as fp:646 with open(meta, "w") as fp:
559 fp.write(meta_content)647 fp.write(meta_content)
560 _run_command(["git", "add", "debian/tests/meta"])648 _run_command(["git", "add", "debian/tests/meta"])
561 changed = True649 changed = True
562 else:650 else:
563 with open(meta, 'w') as fp:651 with open(meta, "w") as fp:
564 fp.write(meta_content)652 fp.write(meta_content)
565 _run_command(["git", "add", "debian/tests/meta"])653 _run_command(["git", "add", "debian/tests/meta"])
566 changed = True654 changed = True
@@ -570,29 +658,30 @@ apt-get full-upgrade --yes
570658
571def deal_with_gbp_conf(git_dir, branch) -> bool:659def deal_with_gbp_conf(git_dir, branch) -> bool:
572 os.chdir(git_dir)660 os.chdir(git_dir)
573 file_path = os.path.join(git_dir, 'debian', 'gbp.conf')661 file_path = os.path.join(git_dir, "debian", "gbp.conf")
574 gbp_conf = f"""[DEFAULT]662 gbp_conf = f"""[DEFAULT]
575pristine-tar = False663pristine-tar = False
576debian-branch = {branch}664debian-branch = {branch}
577debian-tag = {branch}_%(version)s665debian-tag = {branch}_%(version)s
578"""666"""
579 if os.path.exists(file_path):667 if os.path.exists(file_path):
580 with open(file_path, 'r') as f:668 with open(file_path, "r") as f:
581 if f.read() == gbp_conf:669 if f.read() == gbp_conf:
582 return False670 return False
583 with open(file_path, 'w') as f:671 with open(file_path, "w") as f:
584 f.write(gbp_conf)672 f.write(gbp_conf)
585 _run_command(["git", "add", "debian/gbp.conf"])673 _run_command(["git", "add", "debian/gbp.conf"])
586 return True674 return True
587675
588676
589def deal_with_maintainer_scripts(pkg_name, branch, git_dir) -> bool:677def deal_with_maintainer_scripts(pkg_name, branch, git_dir) -> bool:
590 postinst_path = os.path.join(git_dir, 'debian', 'postinst')678 postinst_path = os.path.join(git_dir, "debian", "postinst")
591 postrm_path = os.path.join(git_dir, 'debian', 'postrm')679 postrm_path = os.path.join(git_dir, "debian", "postrm")
592 modified = False680 modified = False
593681
594 with open(postinst_path, 'w') as f:682 with open(postinst_path, "w") as f:
595 f.write(f'''#!/bin/sh683 f.write(
684 f"""#!/bin/sh
596685
597set -e686set -e
598687
@@ -609,14 +698,16 @@ case "$1" in
609esac698esac
610699
611#DEBHELPER#700#DEBHELPER#
612''')701"""
702 )
613 _run_command(["git", "add", "debian/postinst"])703 _run_command(["git", "add", "debian/postinst"])
614 output, _, _ = _run_command(["git", "status", "--porcelain", "debian/postinst"])704 output, _, _ = _run_command(["git", "status", "--porcelain", "debian/postinst"])
615 if output:705 if output:
616 modified = True706 modified = True
617707
618 with open(postrm_path, 'w') as f:708 with open(postrm_path, "w") as f:
619 f.write('''#!/bin/sh709 f.write(
710 """#!/bin/sh
620711
621set -e712set -e
622713
@@ -632,7 +723,8 @@ case "$1" in
632esac723esac
633724
634#DEBHELPER#725#DEBHELPER#
635''')726"""
727 )
636 _run_command(["git", "add", "debian/postrm"])728 _run_command(["git", "add", "debian/postrm"])
637 output, _, _ = _run_command(["git", "status", "--porcelain", "debian/postrm"])729 output, _, _ = _run_command(["git", "status", "--porcelain", "debian/postrm"])
638 if output:730 if output:
@@ -644,39 +736,41 @@ esac
644def deal_with_grub_flavour(pkg_name, branch, git_dir) -> bool:736def deal_with_grub_flavour(pkg_name, branch, git_dir) -> bool:
645 os.chdir(git_dir)737 os.chdir(git_dir)
646 grub_flavour = None738 grub_flavour = None
647 file_path = os.path.join(git_dir, 'oem-flavour.cfg')739 file_path = os.path.join(git_dir, "oem-flavour.cfg")
648 if os.path.exists(file_path):740 if os.path.exists(file_path):
649 with open(file_path, 'r') as oem_flavour:741 with open(file_path, "r") as oem_flavour:
650 for line in oem_flavour:742 for line in oem_flavour:
651 if line.startswith('GRUB_FLAVOUR_ORDER='):743 if line.startswith("GRUB_FLAVOUR_ORDER="):
652 grub_flavour = line[len('GRUB_FLAVOUR_ORDER='):].strip()744 grub_flavour = line[len("GRUB_FLAVOUR_ORDER=") :].strip()
653 break745 break
654746
655 if args.kernel == 'linux-generic-hwe-20.04':747 if args.kernel == "linux-generic-hwe-20.04":
656 if grub_flavour == 'generic':748 if grub_flavour == "generic":
657 return False749 return False
658 grub_flavour = 'generic'750 grub_flavour = "generic"
659 elif args.kernel == 'linux-oem-20.04':751 elif args.kernel == "linux-oem-20.04":
660 if grub_flavour == 'oem':752 if grub_flavour == "oem":
661 return False753 return False
662 grub_flavour = 'oem'754 grub_flavour = "oem"
663 elif args.kernel == 'linux-oem-20.04b':755 elif args.kernel == "linux-oem-20.04b":
664 if grub_flavour == 'oem':756 if grub_flavour == "oem":
665 return False757 return False
666 grub_flavour = 'oem'758 grub_flavour = "oem"
667 else:759 else:
668 print(f"{args.kernel} is not supported.")760 print(f"{args.kernel} is not supported.")
669 exit(1)761 exit(1)
670762
671 if not os.path.exists(file_path):763 if not os.path.exists(file_path):
672 with open(os.path.join(git_dir, 'debian', 'install'), 'a') as f:764 with open(os.path.join(git_dir, "debian", "install"), "a") as f:
673 f.write(f"oem-flavour.cfg /usr/share/{pkg_name}/\n")765 f.write(f"oem-flavour.cfg /usr/share/{pkg_name}/\n")
674 _run_command(["git", "add", "debian/install"])766 _run_command(["git", "add", "debian/install"])
675767
676 with open(file_path, 'w') as f:768 with open(file_path, "w") as f:
677 f.write(f"""# This file is automatically generated by {pkg_name}, and changes will be overriden769 f.write(
770 f"""# This file is automatically generated by {pkg_name}, and changes will be overriden
678GRUB_FLAVOUR_ORDER={grub_flavour}771GRUB_FLAVOUR_ORDER={grub_flavour}
679""")772"""
773 )
680 _run_command(["git", "add", "oem-flavour.cfg"])774 _run_command(["git", "add", "oem-flavour.cfg"])
681775
682 return True776 return True
@@ -684,29 +778,31 @@ GRUB_FLAVOUR_ORDER={grub_flavour}
684778
685# Python 3.9 supports this.779# Python 3.9 supports this.
686def remove_prefix(s, prefix):780def remove_prefix(s, prefix):
687 return s[len(prefix):] if s.startswith(prefix) else s781 return s[len(prefix) :] if s.startswith(prefix) else s
688782
689783
690# Python 3.9 supports this.784# Python 3.9 supports this.
691def remove_suffix(s, suffix):785def remove_suffix(s, suffix):
692 return s[:-len(suffix)] if s.endswith(suffix) else s786 return s[: -len(suffix)] if s.endswith(suffix) else s
693787
694788
695def remove_prefix_suffix(s, prefix, suffix):789def remove_prefix_suffix(s, prefix, suffix):
696 return remove_suffix(remove_prefix(s, prefix), suffix)790 return remove_suffix(remove_prefix(s, prefix), suffix)
697791
698792
699def search_ppa_and_version(project: str, group: str, platform: str, pkg_name: str, archive_name=None):793def search_ppa_and_version(
794 project: str, group: str, platform: str, pkg_name: str, archive_name=None
795):
700 if archive_name:796 if archive_name:
701 archive = oem_archive.getPPAByName(name=archive_name)797 archive = oem_archive.getPPAByName(name=archive_name)
702 elif project == 'somerville':798 elif project == "somerville":
703 try:799 try:
704 archive = oem_archive.getPPAByName(name=f"{project}-fossa-{platform}")800 archive = oem_archive.getPPAByName(name=f"{project}-fossa-{platform}")
705 except lazr.restfulclient.errors.NotFound:801 except lazr.restfulclient.errors.NotFound:
706 archive = oem_archive.getPPAByName(name=f"{project}-{platform}")802 archive = oem_archive.getPPAByName(name=f"{project}-{platform}")
707 elif project == 'stella':803 elif project == "stella":
708 archive = oem_archive.getPPAByName(name=f"{project}-{group}-ouagadougou")804 archive = oem_archive.getPPAByName(name=f"{project}-{group}-ouagadougou")
709 elif project == 'sutton':805 elif project == "sutton":
710 try:806 try:
711 archive = oem_archive.getPPAByName(name=f"{project}-{group}-ouagadougou")807 archive = oem_archive.getPPAByName(name=f"{project}-{group}-ouagadougou")
712 except lazr.restfulclient.errors.NotFound:808 except lazr.restfulclient.errors.NotFound:
@@ -718,7 +814,7 @@ def search_ppa_and_version(project: str, group: str, platform: str, pkg_name: st
718 archive.newSubscription(subscriber=lp.me)814 archive.newSubscription(subscriber=lp.me)
719 archive.lp_save()815 archive.lp_save()
720 except lazr.restfulclient.errors.BadRequest as e:816 except lazr.restfulclient.errors.BadRequest as e:
721 if 'already has a current subscription for' not in str(e):817 if "already has a current subscription for" not in str(e):
722 raise e818 raise e
723 _run_command(["get-private-ppa", f"ppa:oem-archive/{archive.name}"])819 _run_command(["get-private-ppa", f"ppa:oem-archive/{archive.name}"])
724 source_lists = "\n".join(lp.me.getArchiveSubscriptionURLs())820 source_lists = "\n".join(lp.me.getArchiveSubscriptionURLs())
@@ -726,8 +822,10 @@ def search_ppa_and_version(project: str, group: str, platform: str, pkg_name: st
726 fingerprint = archive.signing_key_fingerprint822 fingerprint = archive.signing_key_fingerprint
727 version = ""823 version = ""
728 for source in sources:824 for source in sources:
729 if source.source_package_name == pkg_name and \825 if (
730 apt_pkg.version_compare(source.source_package_version, version) > 0:826 source.source_package_name == pkg_name
827 and apt_pkg.version_compare(source.source_package_version, version) > 0
828 ):
731 version = source.source_package_version829 version = source.source_package_version
732 if version:830 if version:
733 return archive.name, version, fingerprint831 return archive.name, version, fingerprint
@@ -741,8 +839,8 @@ def get_debian_version_from_git(pkg_name: str) -> str:
741 if not result:839 if not result:
742 return None840 return None
743841
744 if '.' in result.group(1):842 if "." in result.group(1):
745 project, group = result.group(1).split('.')843 project, group = result.group(1).split(".")
746 else:844 else:
747 project = result.group(1)845 project = result.group(1)
748 group = None846 group = None
@@ -756,137 +854,215 @@ def get_debian_version_from_git(pkg_name: str) -> str:
756 ubuntu_branch = f"{platform}-focal-ubuntu"854 ubuntu_branch = f"{platform}-focal-ubuntu"
757 oem_branch = f"{platform}-focal-oem"855 oem_branch = f"{platform}-focal-oem"
758856
759 wget_changelog_command = ("wget", '-q', "-O", "changelog",857 wget_changelog_command = (
760 f"https://git.launchpad.net/~oem-solutions-engineers/pc-enablement/+git/oem-{project}-projects-meta/plain/debian/changelog?h={ubuntu_branch}")858 "wget",
859 "-q",
860 "-O",
861 "changelog",
862 f"https://git.launchpad.net/~oem-solutions-engineers/pc-enablement/+git/oem-{project}-projects-meta/plain/debian/changelog?h={ubuntu_branch}",
863 )
761864
762 bootstrap_version = ""865 bootstrap_version = ""
763 with TemporaryDirectory() as tmpdir:866 with TemporaryDirectory() as tmpdir:
764 os.chdir(tmpdir)867 os.chdir(tmpdir)
765 _run_command(wget_changelog_command)868 _run_command(wget_changelog_command)
766 bootstrap_version, _, _ = _run_command(["dpkg-parsechangelog", "--show-field", "Version", "-l", f"changelog"])869 bootstrap_version, _, _ = _run_command(
767870 ["dpkg-parsechangelog", "--show-field", "Version", "-l", f"changelog"]
768 wget_changelog_command = ("wget", '-q', "-O", "changelog",871 )
769 f"https://git.launchpad.net/~oem-solutions-engineers/pc-enablement/+git/oem-{project}-projects-meta/plain/debian/changelog?h={oem_branch}")872
873 wget_changelog_command = (
874 "wget",
875 "-q",
876 "-O",
877 "changelog",
878 f"https://git.launchpad.net/~oem-solutions-engineers/pc-enablement/+git/oem-{project}-projects-meta/plain/debian/changelog?h={oem_branch}",
879 )
770880
771 oem_version = ""881 oem_version = ""
772 with TemporaryDirectory() as tmpdir:882 with TemporaryDirectory() as tmpdir:
773 os.chdir(tmpdir)883 os.chdir(tmpdir)
774 _run_command(wget_changelog_command)884 _run_command(wget_changelog_command)
775 oem_version, _, _ = _run_command(["dpkg-parsechangelog", "--show-field", "Version", "-l", f"changelog"])885 oem_version, _, _ = _run_command(
886 ["dpkg-parsechangelog", "--show-field", "Version", "-l", f"changelog"]
887 )
776888
777 return bootstrap_version, oem_version889 return bootstrap_version, oem_version
778890
779891
780def search_public_archive(pkg_name: str, project: str, codename: str) -> tuple:892def search_public_archive(pkg_name: str, project: str, codename: str) -> tuple:
781 if project == 'somerville':893 if project == "somerville":
782 source_line = 'http://dell.archive.canonical.com/'894 source_line = "http://dell.archive.canonical.com/"
783 archive = f"somerville-{codename}"895 archive = f"somerville-{codename}"
784 elif project == 'stella':896 elif project == "stella":
785 source_line = 'http://hp.archive.canonical.com/'897 source_line = "http://hp.archive.canonical.com/"
786 archive = f"stella.{codename}"898 archive = f"stella.{codename}"
787 elif project == 'sutton':899 elif project == "sutton":
788 source_line = 'http://lenovo.archive.canonical.com/'900 source_line = "http://lenovo.archive.canonical.com/"
789 archive = f"sutton.{codename}"901 archive = f"sutton.{codename}"
790 oem_version = ""902 oem_version = ""
791 with TemporaryDirectory() as tmpdir:903 with TemporaryDirectory() as tmpdir:
792 os.chdir(tmpdir)904 os.chdir(tmpdir)
793 _run_command(['setup-apt-dir.sh',905 _run_command(
794 '-c', 'focal',906 [
795 '--disable-base',907 "setup-apt-dir.sh",
796 '--disable-updates',908 "-c",
797 '--disable-backports',909 "focal",
798 '--apt-dir', tmpdir,910 "--disable-base",
799 '--extra-key', '59AC787C2A8C78BA5ECA0B2ED4D1EAED36962F69',911 "--disable-updates",
800 '--extra-repo', f"deb [signed-by={tmpdir}/59AC787C2A8C78BA5ECA0B2ED4D1EAED36962F69.pub arch=amd64] {source_line} focal {archive}"],912 "--disable-backports",
801 silent=True)913 "--apt-dir",
802 output, _, _ = _run_command(['pkg-list', '--long', '--apt-dir', tmpdir, pkg_name], returncode=(0, 1), silent=True)914 tmpdir,
803 for line in output.split('\n'):915 "--extra-key",
916 "59AC787C2A8C78BA5ECA0B2ED4D1EAED36962F69",
917 "--extra-repo",
918 f"deb [signed-by={tmpdir}/59AC787C2A8C78BA5ECA0B2ED4D1EAED36962F69.pub arch=amd64] {source_line} focal {archive}",
919 ],
920 silent=True,
921 )
922 output, _, _ = _run_command(
923 ["pkg-list", "--long", "--apt-dir", tmpdir, pkg_name],
924 returncode=(0, 1),
925 silent=True,
926 )
927 for line in output.split("\n"):
804 if pkg_name in line and source_line in line:928 if pkg_name in line and source_line in line:
805 oem_version = line.split(' ')[1]929 oem_version = line.split(" ")[1]
806 info(f"{pkg_name} {oem_version} exists in 'deb {source_line} focal {archive}'.")930 info(
931 f"{pkg_name} {oem_version} exists in 'deb {source_line} focal {archive}'."
932 )
807 break933 break
808 ubuntu_version = ""934 ubuntu_version = ""
809 with TemporaryDirectory() as tmpdir:935 with TemporaryDirectory() as tmpdir:
810 os.chdir(tmpdir)936 os.chdir(tmpdir)
811 _run_command(['setup-apt-dir.sh',937 _run_command(
812 '-c', 'focal',938 [
813 '--disable-backports',939 "setup-apt-dir.sh",
814 '--apt-dir', tmpdir],940 "-c",
815 silent=True)941 "focal",
816 output, _, _ = _run_command(['pkg-list', '--long', '--apt-dir', tmpdir, pkg_name], returncode=(0, 1), silent=True)942 "--disable-backports",
817 for line in output.split('\n'):943 "--apt-dir",
818 if pkg_name in line and 'http://archive.ubuntu.com/ubuntu' in line:944 tmpdir,
819 ubuntu_version = line.split(' ')[1]945 ],
946 silent=True,
947 )
948 output, _, _ = _run_command(
949 ["pkg-list", "--long", "--apt-dir", tmpdir, pkg_name],
950 returncode=(0, 1),
951 silent=True,
952 )
953 for line in output.split("\n"):
954 if pkg_name in line and "http://archive.ubuntu.com/ubuntu" in line:
955 ubuntu_version = line.split(" ")[1]
820 info(f"{pkg_name} {ubuntu_version} exists in Ubuntu archive.")956 info(f"{pkg_name} {ubuntu_version} exists in Ubuntu archive.")
821 break957 break
822 proposed_version = ""958 proposed_version = ""
823 with TemporaryDirectory() as tmpdir:959 with TemporaryDirectory() as tmpdir:
824 os.chdir(tmpdir)960 os.chdir(tmpdir)
825 _run_command(['setup-apt-dir.sh',961 _run_command(
826 '-c', 'focal',962 [
827 '--proposed',963 "setup-apt-dir.sh",
828 '--disable-base',964 "-c",
829 '--disable-updates',965 "focal",
830 '--disable-backports',966 "--proposed",
831 '--apt-dir', tmpdir],967 "--disable-base",
832 silent=True)968 "--disable-updates",
833 output, _, _ = _run_command(['pkg-list', '--long', '--apt-dir', tmpdir, pkg_name], returncode=(0, 1), silent=True)969 "--disable-backports",
834 for line in output.split('\n'):970 "--apt-dir",
835 if pkg_name in line and 'http://archive.ubuntu.com/ubuntu' in line:971 tmpdir,
836 proposed_version = line.split(' ')[1]972 ],
973 silent=True,
974 )
975 output, _, _ = _run_command(
976 ["pkg-list", "--long", "--apt-dir", tmpdir, pkg_name],
977 returncode=(0, 1),
978 silent=True,
979 )
980 for line in output.split("\n"):
981 if pkg_name in line and "http://archive.ubuntu.com/ubuntu" in line:
982 proposed_version = line.split(" ")[1]
837 info(f"{pkg_name} {proposed_version} exists in focal-proposed.")983 info(f"{pkg_name} {proposed_version} exists in focal-proposed.")
838 break984 break
839 return ubuntu_version, proposed_version, oem_version, archive985 return ubuntu_version, proposed_version, oem_version, archive
840986
841987
842def search_private_archive(pkg_name: str, project: str, platform: str, index: str, config: str, branch: str) -> tuple:988def search_private_archive(
843 domain = config['archive'].split("://")[1].split("/")[0]989 pkg_name: str, project: str, platform: str, index: str, config: str, branch: str
990) -> tuple:
991 domain = config["archive"].split("://")[1].split("/")[0]
844 archive = None992 archive = None
845 version = None993 version = None
846 for line in index.split('\n'):994 for line in index.split("\n"):
847 if project in line and platform in line and f'focal-{branch}' in line:995 if project in line and platform in line and f"focal-{branch}" in line:
848 result = staging_pattern.match(line)996 result = staging_pattern.match(line)
849 if result:997 if result:
850 archive = result.group(1)998 archive = result.group(1)
851 with TemporaryDirectory() as tmpdir:999 with TemporaryDirectory() as tmpdir:
852 os.chdir(tmpdir)1000 os.chdir(tmpdir)
853 source_line = config['archive'].replace("https://", f"https://{config['username']}:{config['password']}@")1001 source_line = config["archive"].replace(
854 _run_command(['setup-apt-dir.sh',1002 "https://", f"https://{config['username']}:{config['password']}@"
855 '-c', 'focal',1003 )
856 '--disable-updates',1004 _run_command(
857 '--disable-backports',1005 [
858 '--apt-dir', tmpdir,1006 "setup-apt-dir.sh",
859 '--extra-key', config['fingerprint'],1007 "-c",
860 '--extra-repo', f"deb [signed-by={tmpdir}/{config['fingerprint']}.pub arch=amd64] {source_line} {archive} public"],1008 "focal",
861 silent=True)1009 "--disable-updates",
862 output, _, _ = _run_command(['pkg-list', '--long', '--apt-dir', tmpdir, pkg_name], returncode=(0, 1), silent=True)1010 "--disable-backports",
863 for line in output.split('\n'):1011 "--apt-dir",
1012 tmpdir,
1013 "--extra-key",
1014 config["fingerprint"],
1015 "--extra-repo",
1016 f"deb [signed-by={tmpdir}/{config['fingerprint']}.pub arch=amd64] {source_line} {archive} public",
1017 ],
1018 silent=True,
1019 )
1020 output, _, _ = _run_command(
1021 ["pkg-list", "--long", "--apt-dir", tmpdir, pkg_name],
1022 returncode=(0, 1),
1023 silent=True,
1024 )
1025 for line in output.split("\n"):
864 if pkg_name in line and domain in line:1026 if pkg_name in line and domain in line:
865 version = line.split(' ')[1]1027 version = line.split(" ")[1]
866 break1028 break
867 if version is None and project == "somerville":1029 if version is None and project == "somerville":
868 archive = f"somerville-focal-{branch}"1030 archive = f"somerville-focal-{branch}"
869 with TemporaryDirectory() as tmpdir:1031 with TemporaryDirectory() as tmpdir:
870 os.chdir(tmpdir)1032 os.chdir(tmpdir)
871 source_line = config['archive'].replace("https://", f"https://{config['username']}:{config['password']}@")1033 source_line = config["archive"].replace(
872 _run_command(['setup-apt-dir.sh',1034 "https://", f"https://{config['username']}:{config['password']}@"
873 '-c', 'focal',1035 )
874 '--disable-updates',1036 _run_command(
875 '--disable-backports',1037 [
876 '--apt-dir', tmpdir,1038 "setup-apt-dir.sh",
877 '--extra-key', config['fingerprint'],1039 "-c",
878 '--extra-repo', f"deb [signed-by={tmpdir}/{config['fingerprint']}.pub arch=amd64] {source_line} {archive} public"],1040 "focal",
879 silent=True)1041 "--disable-updates",
880 output, _, _ = _run_command(['pkg-list', '--long', '--apt-dir', tmpdir, pkg_name], silent=True)1042 "--disable-backports",
881 for line in output.split('\n'):1043 "--apt-dir",
1044 tmpdir,
1045 "--extra-key",
1046 config["fingerprint"],
1047 "--extra-repo",
1048 f"deb [signed-by={tmpdir}/{config['fingerprint']}.pub arch=amd64] {source_line} {archive} public",
1049 ],
1050 silent=True,
1051 )
1052 output, _, _ = _run_command(
1053 ["pkg-list", "--long", "--apt-dir", tmpdir, pkg_name], silent=True
1054 )
1055 for line in output.split("\n"):
882 if pkg_name in line and domain in line:1056 if pkg_name in line and domain in line:
883 version = line.split(' ')[1]1057 version = line.split(" ")[1]
884 break1058 break
8851059
886 return (archive, version)1060 return (archive, version)
8871061
8881062
889def collect_pkg_info(data, check_private: bool = False, index=None, config=None) -> dict:1063def collect_pkg_info(
1064 data, check_private: bool = False, index=None, config=None
1065) -> dict:
890 if type(data) is str:1066 if type(data) is str:
891 result = pattern.match(data)1067 result = pattern.match(data)
8921068
@@ -894,8 +1070,8 @@ def collect_pkg_info(data, check_private: bool = False, index=None, config=None)
894 print(f"{data} is not supported.")1070 print(f"{data} is not supported.")
895 exit(1)1071 exit(1)
8961072
897 if '.' in result.group(1):1073 if "." in result.group(1):
898 project, group = result.group(1).split('.')1074 project, group = result.group(1).split(".")
899 else:1075 else:
900 project = result.group(1)1076 project = result.group(1)
901 group = "N/A"1077 group = "N/A"
@@ -917,14 +1093,16 @@ def collect_pkg_info(data, check_private: bool = False, index=None, config=None)
917 print(f"{data} is not supported.")1093 print(f"{data} is not supported.")
918 exit(1)1094 exit(1)
9191095
920 json_data = json.loads(f"""[{{1096 json_data = json.loads(
1097 f"""[{{
921"Customer": "{customer}",1098"Customer": "{customer}",
922"Group": "{group}",1099"Group": "{group}",
923"Codename": "{codename}",1100"Codename": "{codename}",
924"Platform": "",1101"Platform": "",
925"MarketName": "",1102"MarketName": "",
926"PlatformLPTag": "{tag}"1103"PlatformLPTag": "{tag}"
927}}]""")1104}}]"""
1105 )
928 else:1106 else:
929 json_data = json.load(data)1107 json_data = json.load(data)
9301108
@@ -935,27 +1113,27 @@ def collect_pkg_info(data, check_private: bool = False, index=None, config=None)
935 sutton = dict()1113 sutton = dict()
9361114
937 for item in json_data:1115 for item in json_data:
938 customer = item['Customer'].lower()1116 customer = item["Customer"].lower()
939 platform = item['Platform'].lower()1117 platform = item["Platform"].lower()
940 codename = item['Codename'].lower()1118 codename = item["Codename"].lower()
941 group = item['Group'].lower()1119 group = item["Group"].lower()
942 market_name = item['MarketName']1120 market_name = item["MarketName"]
943 lp_tag = item['PlatformLPTag'].lower()1121 lp_tag = item["PlatformLPTag"].lower()
944 if 'dell' in customer:1122 if "dell" in customer:
945 if 'somerville' in args.skip:1123 if "somerville" in args.skip:
946 continue1124 continue
947 platform = remove_prefix(lp_tag, 'fossa-')1125 platform = remove_prefix(lp_tag, "fossa-")
948 lst = somerville.get(platform, [])1126 lst = somerville.get(platform, [])
949 lst.append(market_name)1127 lst.append(market_name)
950 somerville[platform] = lst1128 somerville[platform] = lst
951 elif 'hp' in customer:1129 elif "hp" in customer:
952 if 'stella' in args.skip:1130 if "stella" in args.skip:
953 continue1131 continue
954 lst = stella.get(f"{group}-{codename}", [])1132 lst = stella.get(f"{group}-{codename}", [])
955 lst.append(market_name)1133 lst.append(market_name)
956 stella[f"{group}-{codename}"] = lst1134 stella[f"{group}-{codename}"] = lst
957 elif 'lenovo' in customer:1135 elif "lenovo" in customer:
958 if 'sutton' in args.skip:1136 if "sutton" in args.skip:
959 continue1137 continue
960 lst = sutton.get(f"{group}-{codename}", [])1138 lst = sutton.get(f"{group}-{codename}", [])
961 lst.append(market_name)1139 lst.append(market_name)
@@ -975,37 +1153,58 @@ def collect_pkg_info(data, check_private: bool = False, index=None, config=None)
975 info("Finding the corresponding PPAs...")1153 info("Finding the corresponding PPAs...")
9761154
977 for codename, v in somerville.items():1155 for codename, v in somerville.items():
978 pkg_name = 'oem-somerville-' + codename + '-meta'1156 pkg_name = "oem-somerville-" + codename + "-meta"
979 if args.only and pkg_name != args.only:1157 if args.only and pkg_name != args.only:
980 warning(f"Skip {pkg_name}")1158 warning(f"Skip {pkg_name}")
981 continue1159 continue
982 if pkg_name in args.skip:1160 if pkg_name in args.skip:
983 warning(f"Skip {pkg_name}")1161 warning(f"Skip {pkg_name}")
984 continue1162 continue
985 ppa_archive, ppa_version, fingerprint = search_ppa_and_version("somerville", None, codename, pkg_name)1163 ppa_archive, ppa_version, fingerprint = search_ppa_and_version(
1164 "somerville", None, codename, pkg_name
1165 )
986 if ppa_archive is None:1166 if ppa_archive is None:
987 ppa_archive, ppa_version, fingerprint = search_ppa_and_version("somerville", None, codename, pkg_name, "somerville")1167 ppa_archive, ppa_version, fingerprint = search_ppa_and_version(
1168 "somerville", None, codename, pkg_name, "somerville"
1169 )
988 if ppa_archive is None:1170 if ppa_archive is None:
989 critical(f"It can not find any private PPA that contains {pkg_name}.")1171 critical(f"It can not find any private PPA that contains {pkg_name}.")
990 exit(1)1172 exit(1)
9911173
992 bootstrap_version, real_version = get_debian_version_from_git(pkg_name)1174 bootstrap_version, real_version = get_debian_version_from_git(pkg_name)
993 if ppa_version != real_version:1175 if ppa_version != real_version:
994 warning(f"{pkg_name}'s version in Git is {real_version}, but the version in ppa:oem-archive/{ppa_archive} is {ppa_version}.")1176 warning(
1177 f"{pkg_name}'s version in Git is {real_version}, but the version in ppa:oem-archive/{ppa_archive} is {ppa_version}."
1178 )
995 info(f"{pkg_name} {ppa_version} exists in ppa:oem-archive/{ppa_archive}.")1179 info(f"{pkg_name} {ppa_version} exists in ppa:oem-archive/{ppa_archive}.")
9961180
997 ubuntu_version, proposed_version, oem_version, oem_archive = search_public_archive(pkg_name, "somerville", codename)1181 ubuntu_version, proposed_version, oem_version, oem_archive = search_public_archive(
9981182 pkg_name, "somerville", codename
999 pkgInfo[pkg_name] = PkgInfo(ppa_archive=ppa_archive, ppa_version=ppa_version,1183 )
1000 bootstrap_version=bootstrap_version, real_version=real_version, git_version=real_version,1184
1001 old_desc="", new_desc="", fingerprint=fingerprint,1185 pkgInfo[pkg_name] = PkgInfo(
1002 staging_archive="", staging_version="",1186 ppa_archive=ppa_archive,
1003 devel_archive="", devel_version="",1187 ppa_version=ppa_version,
1004 oem_archive=oem_archive, oem_version=oem_version,1188 bootstrap_version=bootstrap_version,
1005 ubuntu_version=ubuntu_version, proposed_version=proposed_version)1189 real_version=real_version,
1190 git_version=real_version,
1191 old_desc="",
1192 new_desc="",
1193 fingerprint=fingerprint,
1194 staging_archive="",
1195 staging_version="",
1196 devel_archive="",
1197 devel_version="",
1198 oem_archive=oem_archive,
1199 oem_version=oem_version,
1200 ubuntu_version=ubuntu_version,
1201 proposed_version=proposed_version,
1202 )
10061203
1007 if check_private:1204 if check_private:
1008 staging_archive, staging_version = search_private_archive(pkg_name, "somerville", codename, index, config, branch="staging")1205 staging_archive, staging_version = search_private_archive(
1206 pkg_name, "somerville", codename, index, config, branch="staging"
1207 )
1009 pkgInfo[pkg_name].staging_archive = staging_archive1208 pkgInfo[pkg_name].staging_archive = staging_archive
1010 if staging_version:1209 if staging_version:
1011 info(f"{pkg_name} {staging_version} exists in {staging_archive}.")1210 info(f"{pkg_name} {staging_version} exists in {staging_archive}.")
@@ -1013,7 +1212,9 @@ def collect_pkg_info(data, check_private: bool = False, index=None, config=None)
1013 else:1212 else:
1014 debug(f"{pkg_name} doesn't exist in {staging_archive} yet.")1213 debug(f"{pkg_name} doesn't exist in {staging_archive} yet.")
10151214
1016 devel_archive, devel_version = search_private_archive(pkg_name, "somerville", codename, index, config, branch="devel")1215 devel_archive, devel_version = search_private_archive(
1216 pkg_name, "somerville", codename, index, config, branch="devel"
1217 )
1017 pkgInfo[pkg_name].devel_archive = devel_archive1218 pkgInfo[pkg_name].devel_archive = devel_archive
1018 if devel_version:1219 if devel_version:
1019 info(f"{pkg_name} {devel_version} exists in {devel_archive}.")1220 info(f"{pkg_name} {devel_version} exists in {devel_archive}.")
@@ -1021,7 +1222,7 @@ def collect_pkg_info(data, check_private: bool = False, index=None, config=None)
1021 else:1222 else:
1022 debug(f"{pkg_name} doesn't exist in {devel_archive} yet.")1223 debug(f"{pkg_name} doesn't exist in {devel_archive} yet.")
10231224
1024 if pkg_name in pkgNamesInArchive and ''.join(v):1225 if pkg_name in pkgNamesInArchive and "".join(v):
1025 new_desc = _grouping_market_names(v)1226 new_desc = _grouping_market_names(v)
1026 if "Dell" not in new_desc:1227 if "Dell" not in new_desc:
1027 new_desc = "Dell " + new_desc1228 new_desc = "Dell " + new_desc
@@ -1029,37 +1230,56 @@ def collect_pkg_info(data, check_private: bool = False, index=None, config=None)
1029 pkgInfo[pkg_name].new_desc = new_desc1230 pkgInfo[pkg_name].new_desc = new_desc
10301231
1031 for k, v in stella.items():1232 for k, v in stella.items():
1032 pkg_name = 'oem-stella.' + k + '-meta'1233 pkg_name = "oem-stella." + k + "-meta"
1033 if args.only and pkg_name != args.only:1234 if args.only and pkg_name != args.only:
1034 warning(f"Skip {pkg_name}")1235 warning(f"Skip {pkg_name}")
1035 continue1236 continue
1036 if pkg_name in args.skip:1237 if pkg_name in args.skip:
1037 warning(f"Skip {pkg_name}")1238 warning(f"Skip {pkg_name}")
1038 continue1239 continue
1039 group, codename = k.split('-', 1)1240 group, codename = k.split("-", 1)
10401241
1041 ppa_archive, ppa_version, fingerprint = search_ppa_and_version("stella", group, codename, pkg_name)1242 ppa_archive, ppa_version, fingerprint = search_ppa_and_version(
1243 "stella", group, codename, pkg_name
1244 )
1042 if ppa_archive is None:1245 if ppa_archive is None:
1043 critical(f"It can not find any private PPA that contains {pkg_name}.")1246 critical(f"It can not find any private PPA that contains {pkg_name}.")
1044 exit(1)1247 exit(1)
10451248
1046 bootstrap_version, real_version = get_debian_version_from_git(pkg_name)1249 bootstrap_version, real_version = get_debian_version_from_git(pkg_name)
1047 if ppa_version != real_version:1250 if ppa_version != real_version:
1048 warning(f"{pkg_name}'s version in Git is {real_version}, but the version in ppa:oem-archive/{ppa_archive} is {ppa_version}.")1251 warning(
1252 f"{pkg_name}'s version in Git is {real_version}, but the version in ppa:oem-archive/{ppa_archive} is {ppa_version}."
1253 )
1049 info(f"{pkg_name} {ppa_version} exists in ppa:oem-archive/{ppa_archive}")1254 info(f"{pkg_name} {ppa_version} exists in ppa:oem-archive/{ppa_archive}")
10501255
1051 ubuntu_version, proposed_version, oem_version, oem_archive = search_public_archive(pkg_name, "stella", group)1256 ubuntu_version, proposed_version, oem_version, oem_archive = search_public_archive(
10521257 pkg_name, "stella", group
1053 pkgInfo[pkg_name] = PkgInfo(ppa_archive=ppa_archive, ppa_version=ppa_version,1258 )
1054 bootstrap_version=bootstrap_version, real_version=real_version, git_version=real_version,1259
1055 old_desc="", new_desc="", fingerprint=fingerprint,1260 pkgInfo[pkg_name] = PkgInfo(
1056 staging_archive="", staging_version="",1261 ppa_archive=ppa_archive,
1057 devel_archive="", devel_version="",1262 ppa_version=ppa_version,
1058 oem_archive=oem_archive, oem_version=oem_version,1263 bootstrap_version=bootstrap_version,
1059 ubuntu_version=ubuntu_version, proposed_version=proposed_version)1264 real_version=real_version,
1265 git_version=real_version,
1266 old_desc="",
1267 new_desc="",
1268 fingerprint=fingerprint,
1269 staging_archive="",
1270 staging_version="",
1271 devel_archive="",
1272 devel_version="",
1273 oem_archive=oem_archive,
1274 oem_version=oem_version,
1275 ubuntu_version=ubuntu_version,
1276 proposed_version=proposed_version,
1277 )
10601278
1061 if check_private:1279 if check_private:
1062 staging_archive, staging_version = search_private_archive(pkg_name, "stella", group, index, config, branch="staging")1280 staging_archive, staging_version = search_private_archive(
1281 pkg_name, "stella", group, index, config, branch="staging"
1282 )
1063 pkgInfo[pkg_name].staging_archive = staging_archive1283 pkgInfo[pkg_name].staging_archive = staging_archive
1064 if staging_version:1284 if staging_version:
1065 info(f"{pkg_name} {staging_version} exists in {staging_archive}.")1285 info(f"{pkg_name} {staging_version} exists in {staging_archive}.")
@@ -1067,7 +1287,9 @@ def collect_pkg_info(data, check_private: bool = False, index=None, config=None)
1067 else:1287 else:
1068 debug(f"{pkg_name} doesn't exist in {staging_archive} yet.")1288 debug(f"{pkg_name} doesn't exist in {staging_archive} yet.")
10691289
1070 devel_archive, devel_version = search_private_archive(pkg_name, "stella", group, index, config, branch="devel")1290 devel_archive, devel_version = search_private_archive(
1291 pkg_name, "stella", group, index, config, branch="devel"
1292 )
1071 pkgInfo[pkg_name].devel_archive = devel_archive1293 pkgInfo[pkg_name].devel_archive = devel_archive
1072 if devel_version:1294 if devel_version:
1073 info(f"{pkg_name} {devel_version} exists in {devel_archive}.")1295 info(f"{pkg_name} {devel_version} exists in {devel_archive}.")
@@ -1075,45 +1297,66 @@ def collect_pkg_info(data, check_private: bool = False, index=None, config=None)
1075 else:1297 else:
1076 debug(f"{pkg_name} doesn't exist in {devel_archive} yet.")1298 debug(f"{pkg_name} doesn't exist in {devel_archive} yet.")
10771299
1078 if pkg_name in pkgNamesInArchive and ''.join(v):1300 if pkg_name in pkgNamesInArchive and "".join(v):
1079 new_desc = _grouping_market_names(v, maxsplit=2)1301 new_desc = _grouping_market_names(v, maxsplit=2)
1080 if "HP" not in new_desc:1302 if "HP" not in new_desc:
1081 new_desc = "HP " + new_desc1303 new_desc = "HP " + new_desc
1082 pkgInfo[pkg_name].old_desc = f"Stella {group.title()} {codename.title()} platform"1304 pkgInfo[
1305 pkg_name
1306 ].old_desc = f"Stella {group.title()} {codename.title()} platform"
1083 pkgInfo[pkg_name].new_desc = new_desc1307 pkgInfo[pkg_name].new_desc = new_desc
10841308
1085 for k, v in sutton.items():1309 for k, v in sutton.items():
1086 pkg_name = 'oem-sutton.' + k + '-meta'1310 pkg_name = "oem-sutton." + k + "-meta"
1087 if args.only and pkg_name != args.only:1311 if args.only and pkg_name != args.only:
1088 warning(f"Skip {pkg_name}")1312 warning(f"Skip {pkg_name}")
1089 continue1313 continue
1090 if pkg_name in args.skip:1314 if pkg_name in args.skip:
1091 warning(f"Skip {pkg_name}")1315 warning(f"Skip {pkg_name}")
1092 continue1316 continue
1093 group, codename = k.split('-', 1)1317 group, codename = k.split("-", 1)
10941318
1095 ppa_archive, ppa_version, fingerprint = search_ppa_and_version("sutton", group, codename, pkg_name)1319 ppa_archive, ppa_version, fingerprint = search_ppa_and_version(
1320 "sutton", group, codename, pkg_name
1321 )
1096 if ppa_archive is None:1322 if ppa_archive is None:
1097 critical(f"It can not find any private PPA that contains {pkg_name}.")1323 critical(f"It can not find any private PPA that contains {pkg_name}.")
1098 exit(1)1324 exit(1)
10991325
1100 bootstrap_version, real_version = get_debian_version_from_git(pkg_name)1326 bootstrap_version, real_version = get_debian_version_from_git(pkg_name)
1101 if ppa_version != real_version:1327 if ppa_version != real_version:
1102 warning(f"{pkg_name}'s version in Git is {real_version}, but the version in ppa:oem-archive/{ppa_archive} is {ppa_version}.")1328 warning(
1329 f"{pkg_name}'s version in Git is {real_version}, but the version in ppa:oem-archive/{ppa_archive} is {ppa_version}."
1330 )
1103 info(f"{pkg_name} {ppa_version} exists in ppa:oem-archive/{ppa_archive}")1331 info(f"{pkg_name} {ppa_version} exists in ppa:oem-archive/{ppa_archive}")
11041332
1105 ubuntu_version, proposed_version, oem_version, oem_archive = search_public_archive(pkg_name, "sutton", group)1333 ubuntu_version, proposed_version, oem_version, oem_archive = search_public_archive(
11061334 pkg_name, "sutton", group
1107 pkgInfo[pkg_name] = PkgInfo(ppa_archive=ppa_archive, ppa_version=ppa_version,1335 )
1108 bootstrap_version=bootstrap_version, real_version=real_version, git_version=real_version,1336
1109 old_desc="", new_desc="", fingerprint=fingerprint,1337 pkgInfo[pkg_name] = PkgInfo(
1110 staging_archive="", staging_version="",1338 ppa_archive=ppa_archive,
1111 devel_archive="", devel_version="",1339 ppa_version=ppa_version,
1112 oem_archive=oem_archive, oem_version=oem_version,1340 bootstrap_version=bootstrap_version,
1113 ubuntu_version=ubuntu_version, proposed_version=proposed_version)1341 real_version=real_version,
1342 git_version=real_version,
1343 old_desc="",
1344 new_desc="",
1345 fingerprint=fingerprint,
1346 staging_archive="",
1347 staging_version="",
1348 devel_archive="",
1349 devel_version="",
1350 oem_archive=oem_archive,
1351 oem_version=oem_version,
1352 ubuntu_version=ubuntu_version,
1353 proposed_version=proposed_version,
1354 )
11141355
1115 if check_private:1356 if check_private:
1116 staging_archive, staging_version = search_private_archive(pkg_name, "sutton", group, index, config, branch="staging")1357 staging_archive, staging_version = search_private_archive(
1358 pkg_name, "sutton", group, index, config, branch="staging"
1359 )
1117 pkgInfo[pkg_name].staging_archive = staging_archive1360 pkgInfo[pkg_name].staging_archive = staging_archive
1118 if staging_version:1361 if staging_version:
1119 info(f"{pkg_name} {staging_version} exists in {staging_archive}.")1362 info(f"{pkg_name} {staging_version} exists in {staging_archive}.")
@@ -1121,7 +1364,9 @@ def collect_pkg_info(data, check_private: bool = False, index=None, config=None)
1121 else:1364 else:
1122 debug(f"{pkg_name} doesn't exist in {staging_archive} yet.")1365 debug(f"{pkg_name} doesn't exist in {staging_archive} yet.")
11231366
1124 devel_archive, devel_version = search_private_archive(pkg_name, "sutton", group, index, config, branch="devel")1367 devel_archive, devel_version = search_private_archive(
1368 pkg_name, "sutton", group, index, config, branch="devel"
1369 )
1125 pkgInfo[pkg_name].devel_archive = devel_archive1370 pkgInfo[pkg_name].devel_archive = devel_archive
1126 if devel_version:1371 if devel_version:
1127 info(f"{pkg_name} {devel_version} exists in {devel_archive}.")1372 info(f"{pkg_name} {devel_version} exists in {devel_archive}.")
@@ -1129,11 +1374,13 @@ def collect_pkg_info(data, check_private: bool = False, index=None, config=None)
1129 else:1374 else:
1130 debug(f"{pkg_name} doesn't exist in {devel_archive} yet.")1375 debug(f"{pkg_name} doesn't exist in {devel_archive} yet.")
11311376
1132 if pkg_name in pkgNamesInArchive and ''.join(v):1377 if pkg_name in pkgNamesInArchive and "".join(v):
1133 new_desc = _grouping_market_names(v)1378 new_desc = _grouping_market_names(v)
1134 if "Lenovo" not in new_desc:1379 if "Lenovo" not in new_desc:
1135 new_desc = "Lenovo " + new_desc1380 new_desc = "Lenovo " + new_desc
1136 pkgInfo[pkg_name].old_desc = f"Sutton {group.title()} {codename.title()} platform"1381 pkgInfo[
1382 pkg_name
1383 ].old_desc = f"Sutton {group.title()} {codename.title()} platform"
1137 pkgInfo[pkg_name].new_desc = new_desc1384 pkgInfo[pkg_name].new_desc = new_desc
11381385
1139 debug(json.dumps(pkgInfo, indent=4, sort_keys=True, cls=DataJSONEncoder))1386 debug(json.dumps(pkgInfo, indent=4, sort_keys=True, cls=DataJSONEncoder))
@@ -1145,22 +1392,24 @@ def load_pkg_info(data) -> dict:
1145 pkgInfo = dict()1392 pkgInfo = dict()
1146 data = json.load(data)1393 data = json.load(data)
1147 for meta in data.keys():1394 for meta in data.keys():
1148 pkgInfo[meta] = PkgInfo(ppa_archive=data[meta]['ppa_archive'],1395 pkgInfo[meta] = PkgInfo(
1149 ppa_version=data[meta]['ppa_version'],1396 ppa_archive=data[meta]["ppa_archive"],
1150 git_version=data[meta]['git_version'],1397 ppa_version=data[meta]["ppa_version"],
1151 bootstrap_version=data[meta]['bootstrap_version'],1398 git_version=data[meta]["git_version"],
1152 real_version=data[meta]['real_version'],1399 bootstrap_version=data[meta]["bootstrap_version"],
1153 old_desc=data[meta]['old_desc'],1400 real_version=data[meta]["real_version"],
1154 new_desc=data[meta]['new_desc'],1401 old_desc=data[meta]["old_desc"],
1155 fingerprint=data[meta]['fingerprint'],1402 new_desc=data[meta]["new_desc"],
1156 staging_archive=data[meta]['staging_archive'],1403 fingerprint=data[meta]["fingerprint"],
1157 staging_version=data[meta]['staging_version'],1404 staging_archive=data[meta]["staging_archive"],
1158 devel_archive=data[meta]['devel_archive'],1405 staging_version=data[meta]["staging_version"],
1159 devel_version=data[meta]['devel_version'],1406 devel_archive=data[meta]["devel_archive"],
1160 oem_archive=data[meta]['oem_archive'],1407 devel_version=data[meta]["devel_version"],
1161 oem_version=data[meta]['oem_version'],1408 oem_archive=data[meta]["oem_archive"],
1162 ubuntu_version=data[meta]['ubuntu_version'],1409 oem_version=data[meta]["oem_version"],
1163 proposed_version=data[meta]['proposed_version'])1410 ubuntu_version=data[meta]["ubuntu_version"],
1411 proposed_version=data[meta]["proposed_version"],
1412 )
11641413
1165 debug(json.dumps(pkgInfo, indent=4, sort_keys=True, cls=DataJSONEncoder))1414 debug(json.dumps(pkgInfo, indent=4, sort_keys=True, cls=DataJSONEncoder))
1166 return pkgInfo1415 return pkgInfo
@@ -1184,8 +1433,8 @@ def deal_with_meta_git(pkg_name: str, pkg_info: PkgInfo, bootstrap: bool) -> boo
1184 if not result:1433 if not result:
1185 return1434 return
11861435
1187 if '.' in result.group(1):1436 if "." in result.group(1):
1188 project, group = result.group(1).split('.')1437 project, group = result.group(1).split(".")
1189 else:1438 else:
1190 project = result.group(1)1439 project = result.group(1)
1191 group = None1440 group = None
@@ -1203,15 +1452,34 @@ def deal_with_meta_git(pkg_name: str, pkg_info: PkgInfo, bootstrap: bool) -> boo
1203 else:1452 else:
1204 branch = f"{platform}-focal-oem"1453 branch = f"{platform}-focal-oem"
12051454
1206 git_command = ("git", "clone", "--depth", "1", "-b", branch, f"https://git.launchpad.net/~oem-solutions-engineers/pc-enablement/+git/oem-{project}-projects-meta", pkg_name)1455 git_command = (
1456 "git",
1457 "clone",
1458 "--depth",
1459 "1",
1460 "-b",
1461 branch,
1462 f"https://git.launchpad.net/~oem-solutions-engineers/pc-enablement/+git/oem-{project}-projects-meta",
1463 pkg_name,
1464 )
12071465
1208 with TemporaryDirectory() as tmpdir:1466 with TemporaryDirectory() as tmpdir:
1209 messages = list()1467 messages = list()
1210 os.chdir(tmpdir)1468 os.chdir(tmpdir)
1211 _run_command(git_command)1469 _run_command(git_command)
1212 git_version, _, _ = _run_command(["dpkg-parsechangelog", "--show-field", "Version", "-l", f"{pkg_name}/debian/changelog"])1470 git_version, _, _ = _run_command(
1471 [
1472 "dpkg-parsechangelog",
1473 "--show-field",
1474 "Version",
1475 "-l",
1476 f"{pkg_name}/debian/changelog",
1477 ]
1478 )
1213 if git_version != pkg_info.ppa_version:1479 if git_version != pkg_info.ppa_version:
1214 critical(f"{pkg_name}'s version is {pkg_info.ppa_version} in ppa:oem-archive/{pkg_info.ppa_archive} but the version in Git repository is {git_version}.")1480 critical(
1481 f"{pkg_name}'s version is {pkg_info.ppa_version} in ppa:oem-archive/{pkg_info.ppa_archive} but the version in Git repository is {git_version}."
1482 )
1215 exit(1)1483 exit(1)
1216 git_dir = os.path.join(tmpdir, pkg_name)1484 git_dir = os.path.join(tmpdir, pkg_name)
12171485
@@ -1240,29 +1508,41 @@ def deal_with_meta_git(pkg_name: str, pkg_info: PkgInfo, bootstrap: bool) -> boo
1240 return False1508 return False
12411509
1242 # Prepare the changelog and commit the changes.1510 # Prepare the changelog and commit the changes.
1243 commit_message = 'Update the ' + ' and'.join(', '.join(messages).rsplit(',', 1)) + f' for {args.kernel}.'1511 commit_message = (
1244 _run_command(['dch', '--increment', commit_message])1512 "Update the "
1513 + " and".join(", ".join(messages).rsplit(",", 1))
1514 + f" for {args.kernel}."
1515 )
1516 _run_command(["dch", "--increment", commit_message])
1245 _run_command(["git", "add", "debian/changelog"])1517 _run_command(["git", "add", "debian/changelog"])
1246 _run_command(['git', 'commit', '-a', '-m', f"{commit_message}\n\nUpdated by oem-scripts {oem_scripts.__version__:.2f}."])1518 _run_command(
12471519 [
1248 out, _, _ = _run_command(['git', 'show', '--color=always'])1520 "git",
1249 if out != b'':1521 "commit",
1522 "-a",
1523 "-m",
1524 f"{commit_message}\n\nUpdated by oem-scripts {oem_scripts.__version__:.2f}.",
1525 ]
1526 )
1527
1528 out, _, _ = _run_command(["git", "show", "--color=always"])
1529 if out != b"":
1250 debug(f"({pkg_name}:{branch}) $ git show")1530 debug(f"({pkg_name}:{branch}) $ git show")
1251 debug(out)1531 debug(out)
12521532
1253 # Run autopkgtest1533 # Run autopkgtest
1254 if args.autopkgtest:1534 if args.autopkgtest:
1255 with open(f'{pkg_name}.list', 'r') as f:1535 with open(f"{pkg_name}.list", "r") as f:
1256 source_list = f.read().strip()1536 source_list = f.read().strip()
12571537
1258 archives = set()1538 archives = set()
1259 archives.add(pkg_info.ppa_archive)1539 archives.add(pkg_info.ppa_archive)
12601540
1261 if project == 'somerville':1541 if project == "somerville":
1262 common_archive = oem_archive.getPPAByName(name=project)1542 common_archive = oem_archive.getPPAByName(name=project)
1263 fingerprint = common_archive.signing_key_fingerprint1543 fingerprint = common_archive.signing_key_fingerprint
1264 archives.add(f"{project}")1544 archives.add(f"{project}")
1265 elif project == 'stella' or project == 'sutton':1545 elif project == "stella" or project == "sutton":
1266 common_archive = oem_archive.getPPAByName(name=f"{project}-ouagadougou")1546 common_archive = oem_archive.getPPAByName(name=f"{project}-ouagadougou")
1267 fingerprint = common_archive.signing_key_fingerprint1547 fingerprint = common_archive.signing_key_fingerprint
1268 archives.add(f"{project}-ouagadougou")1548 archives.add(f"{project}-ouagadougou")
@@ -1286,8 +1566,9 @@ def deal_with_meta_git(pkg_name: str, pkg_info: PkgInfo, bootstrap: bool) -> boo
1286 if f"oem-archive/{ppa}/ubuntu" in url:1566 if f"oem-archive/{ppa}/ubuntu" in url:
1287 source_list += "\ndeb " + url + " focal main"1567 source_list += "\ndeb " + url + " focal main"
12881568
1289 with open(f'autopkgtest-{pkg_name}-auto', 'w') as f:1569 with open(f"autopkgtest-{pkg_name}-auto", "w") as f:
1290 f.write(f'''#!/bin/bash1570 f.write(
1571 f"""#!/bin/bash
12911572
1292set -euo pipefail1573set -euo pipefail
1293IFS=$'\n\t'1574IFS=$'\n\t'
@@ -1319,54 +1600,83 @@ true
1319ENDLINE1600ENDLINE
1320chmod 755 "\\$root/usr/sbin/update-grub"1601chmod 755 "\\$root/usr/sbin/update-grub"
1321END1602END
1322''')1603"""
1604 )
1323 if args.debug:1605 if args.debug:
1324 _run_command(['cat', f'autopkgtest-{pkg_name}-auto'])1606 _run_command(["cat", f"autopkgtest-{pkg_name}-auto"])
1325 os.chmod(f'autopkgtest-{pkg_name}-auto', 0o755)1607 os.chmod(f"autopkgtest-{pkg_name}-auto", 0o755)
1326 info(f"({pkg_name}:{branch}) $ run-autopkgtest lxc focal -C")1608 info(f"({pkg_name}:{branch}) $ run-autopkgtest lxc focal -C")
1327 _run_command(['run-autopkgtest', 'lxc', 'focal', '-C'])1609 _run_command(["run-autopkgtest", "lxc", "focal", "-C"])
1328 _run_command(['git', 'reset', '--hard', 'HEAD'])1610 _run_command(["git", "reset", "--hard", "HEAD"])
1329 _run_command(['git', 'clean', '-x', '-d', '-f'])1611 _run_command(["git", "clean", "-x", "-d", "-f"])
13301612
1331 # Don't use UNRELEASED in the real meta.1613 # Don't use UNRELEASED in the real meta.
1332 if not bootstrap:1614 if not bootstrap:
1333 _run_command(['sed', '-i', 's/UNRELEASED/focal/', 'debian/changelog'])1615 _run_command(["sed", "-i", "s/UNRELEASED/focal/", "debian/changelog"])
1334 _run_command(['git', 'commit', '-a', '--amend', '--no-edit'])1616 _run_command(["git", "commit", "-a", "--amend", "--no-edit"])
13351617
1336 # Tag and find it out.1618 # Tag and find it out.
1337 out, _, _ = _run_command(['gbp', 'tag'])1619 out, _, _ = _run_command(["gbp", "tag"])
1338 if out != b'':1620 if out != b"":
1339 info(out)1621 info(out)
1340 out, _, _ = _run_command(['git', 'describe'])1622 out, _, _ = _run_command(["git", "describe"])
1341 if out != b'':1623 if out != b"":
1342 tag = out.strip()1624 tag = out.strip()
1343 info(tag)1625 info(tag)
13441626
1345 # Build Debian binary packages1627 # Build Debian binary packages
1346 _run_command(['gbp', 'buildpackage', '-us', '-uc'])1628 _run_command(["gbp", "buildpackage", "-us", "-uc"])
1347 _run_command(['git', 'reset', '--hard', 'HEAD'])1629 _run_command(["git", "reset", "--hard", "HEAD"])
1348 _run_command(['git', 'clean', '-x', '-d', '-f'])1630 _run_command(["git", "clean", "-x", "-d", "-f"])
13491631
1350 # Build Debian source packages1632 # Build Debian source packages
1351 _run_command(['gbp', 'buildpackage', '-S', '-us', '-uc'])1633 _run_command(["gbp", "buildpackage", "-S", "-us", "-uc"])
1352 _run_command(['git', 'reset', '--hard', 'HEAD'])1634 _run_command(["git", "reset", "--hard", "HEAD"])
1353 _run_command(['git', 'clean', '-x', '-d', '-f'])1635 _run_command(["git", "clean", "-x", "-d", "-f"])
13541636
1355 # Show the commit1637 # Show the commit
1356 out, _, _ = _run_command(['git', 'show', '--color=always'])1638 out, _, _ = _run_command(["git", "show", "--color=always"])
1357 if out != b'':1639 if out != b"":
1358 warning(f"({pkg_name}:{branch}) $ git show")1640 warning(f"({pkg_name}:{branch}) $ git show")
1359 print(out)1641 print(out)
1360 version, _, _ = _run_command(["dpkg-parsechangelog", "--show-field", "Version", "-l", f"debian/changelog"])1642 version, _, _ = _run_command(
1361 if not args.dry_run and yes_or_ask(args.yes, f"Would you like to commit and push the changes of {version} into {pkg_name}'s git {branch} branch?"):1643 [
1644 "dpkg-parsechangelog",
1645 "--show-field",
1646 "Version",
1647 "-l",
1648 f"debian/changelog",
1649 ]
1650 )
1651 if not args.dry_run and yes_or_ask(
1652 args.yes,
1653 f"Would you like to commit and push the changes of {version} into {pkg_name}'s git {branch} branch?",
1654 ):
1362 os.chdir(git_dir)1655 os.chdir(git_dir)
1363 _run_command(['git', 'remote', 'add', 'oem-solutions-engineers', f"git+ssh://{lp.me.name}@git.launchpad.net/~oem-solutions-engineers/pc-enablement/+git/oem-{project}-projects-meta"])1656 _run_command(
1364 _run_command(['git', 'push', 'oem-solutions-engineers'])1657 [
1365 _run_command(['git', 'push', 'oem-solutions-engineers', tag])1658 "git",
1366 if not args.dry_run and yes_or_ask(args.yes, f"Would you like to dput Debian source package into ppa:oem-archive/{pkg_info.ppa_archive}?"):1659 "remote",
1367 os.chdir(os.path.join(git_dir, '..'))1660 "add",
1368 _run_command(['debsign', f'{pkg_name}_{version}_source.changes'])1661 "oem-solutions-engineers",
1369 _run_command(['dput', f'ppa:oem-archive/{pkg_info.ppa_archive}', f'{pkg_name}_{version}_source.changes'])1662 f"git+ssh://{lp.me.name}@git.launchpad.net/~oem-solutions-engineers/pc-enablement/+git/oem-{project}-projects-meta",
1663 ]
1664 )
1665 _run_command(["git", "push", "oem-solutions-engineers"])
1666 _run_command(["git", "push", "oem-solutions-engineers", tag])
1667 if not args.dry_run and yes_or_ask(
1668 args.yes,
1669 f"Would you like to dput Debian source package into ppa:oem-archive/{pkg_info.ppa_archive}?",
1670 ):
1671 os.chdir(os.path.join(git_dir, ".."))
1672 _run_command(["debsign", f"{pkg_name}_{version}_source.changes"])
1673 _run_command(
1674 [
1675 "dput",
1676 f"ppa:oem-archive/{pkg_info.ppa_archive}",
1677 f"{pkg_name}_{version}_source.changes",
1678 ]
1679 )
13701680
13711681
1372def check_meta_git(pkg_name: str, pkg_info: PkgInfo, skip_bootstrap: False) -> None:1682def check_meta_git(pkg_name: str, pkg_info: PkgInfo, skip_bootstrap: False) -> None:
@@ -1383,84 +1693,119 @@ def check_meta_git(pkg_name: str, pkg_info: PkgInfo, skip_bootstrap: False) -> N
13831693
1384cache = apt_pkg.Cache(progress=None)1694cache = apt_pkg.Cache(progress=None)
13851695
1386if args.subcommand == 'list':1696if args.subcommand == "list":
1387 for name in get_oem_meta_packages(cache):1697 for name in get_oem_meta_packages(cache):
1388 print(name)1698 print(name)
1389elif args.subcommand == 'subscribe':1699elif args.subcommand == "subscribe":
1390 for name in get_oem_meta_packages(cache):1700 for name in get_oem_meta_packages(cache):
1391 info(f"Checking the subscriptions for {name}...")1701 info(f"Checking the subscriptions for {name}...")
1392 source = lp.distributions['ubuntu'].getSourcePackage(name=name)1702 source = lp.distributions["ubuntu"].getSourcePackage(name=name)
1393 if 'oem-solutions-engineers' in map(lambda x: x.subscriber.name, source.getSubscriptions()):1703 if "oem-solutions-engineers" in map(
1704 lambda x: x.subscriber.name, source.getSubscriptions()
1705 ):
1394 info(f"ubuntu/{name} has subscribed oem-solutions-engineers.")1706 info(f"ubuntu/{name} has subscribed oem-solutions-engineers.")
1395 continue1707 continue
1396 warning(f"ubuntu/{name} didn't subscribe oem-solutions-engineers yet.")1708 warning(f"ubuntu/{name} didn't subscribe oem-solutions-engineers yet.")
1397 if yes_or_ask(args.yes, f"Would you like to subscribe 'oem-solutions-engineers' for ubuntu/{name}?"):1709 if yes_or_ask(
1710 args.yes,
1711 f"Would you like to subscribe 'oem-solutions-engineers' for ubuntu/{name}?",
1712 ):
1398 try:1713 try:
1399 # When a person is subscribed to a source package, one actually subscribe all bugs for it.1714 # When a person is subscribed to a source package, one actually subscribe all bugs for it.
1400 source.addBugSubscription(subscriber=lp.people['oem-solutions-engineers'])1715 source.addBugSubscription(
1716 subscriber=lp.people["oem-solutions-engineers"]
1717 )
1401 except lazr.restfulclient.errors.Unauthorized as e:1718 except lazr.restfulclient.errors.Unauthorized as e:
1402 error(f"{lp.me.name} does not have permission to subscribe oem-solutions-engineers.")1719 error(
1720 f"{lp.me.name} does not have permission to subscribe oem-solutions-engineers."
1721 )
1403 if args.verbose:1722 if args.verbose:
1404 print(e)1723 print(e)
1405 exit(1)1724 exit(1)
1406elif args.subcommand == 'unsubscribe':1725elif args.subcommand == "unsubscribe":
1407 source = lp.distributions['ubuntu'].getSourcePackage(name=args.pkgName)1726 source = lp.distributions["ubuntu"].getSourcePackage(name=args.pkgName)
1408 subscriptions = source.getSubscriptions()1727 subscriptions = source.getSubscriptions()
1409 for subscription in subscriptions:1728 for subscription in subscriptions:
1410 if subscription.subscriber.name == 'oem-solutions-engineers':1729 if subscription.subscriber.name == "oem-solutions-engineers":
1411 info(f"ubuntu/{args.pkgName} has subscribed oem-solutions-engineers.")1730 info(f"ubuntu/{args.pkgName} has subscribed oem-solutions-engineers.")
1412 if yes_or_ask(args.yes, f"Would you like to unsubscribe 'oem-solutions-engineers' for ubuntu/{args.pkgName}?"):1731 if yes_or_ask(
1732 args.yes,
1733 f"Would you like to unsubscribe 'oem-solutions-engineers' for ubuntu/{args.pkgName}?",
1734 ):
1413 try:1735 try:
1414 source.removeBugSubscription(subscriber=lp.people['oem-solutions-engineers'])1736 source.removeBugSubscription(
1737 subscriber=lp.people["oem-solutions-engineers"]
1738 )
1415 except lazr.restfulclient.errors.Unauthorized as e:1739 except lazr.restfulclient.errors.Unauthorized as e:
1416 error(f"{lp.me.name} does not have permission to unsubscribe oem-solutions-engineers.")1740 error(
1741 f"{lp.me.name} does not have permission to unsubscribe oem-solutions-engineers."
1742 )
1417 if args.verbose:1743 if args.verbose:
1418 print(e)1744 print(e)
1419 exit(1)1745 exit(1)
1420 exit(0)1746 exit(0)
1421elif args.subcommand == 'update':1747elif args.subcommand == "update":
1422 oem_scripts_config_ini = os.path.join(os.environ["HOME"],1748 oem_scripts_config_ini = os.path.join(
1423 ".config/oem-scripts/config.ini")1749 os.environ["HOME"], ".config/oem-scripts/config.ini"
1750 )
1424 oem_scripts_config = ConfigParser()1751 oem_scripts_config = ConfigParser()
1425 oem_scripts_config.read(oem_scripts_config_ini)1752 oem_scripts_config.read(oem_scripts_config_ini)
1426 config = oem_scripts_config['private']1753 config = oem_scripts_config["private"]
1427 if args.json:1754 if args.json:
1428 pkgInfo = load_pkg_info(args.json)1755 pkgInfo = load_pkg_info(args.json)
1429 elif args.meta:1756 elif args.meta:
1430 r = requests.get(config['archive'] + "/dists/", auth=(config['username'], config['password']))1757 r = requests.get(
1431 pkgInfo = collect_pkg_info(args.meta, check_private=True, index=r.text, config=config)1758 config["archive"] + "/dists/", auth=(config["username"], config["password"])
1759 )
1760 pkgInfo = collect_pkg_info(
1761 args.meta, check_private=True, index=r.text, config=config
1762 )
1432 else:1763 else:
1433 print("You needto use --json or --meta.")1764 print("You needto use --json or --meta.")
1434 exit(1)1765 exit(1)
1435 process_update_task(pkgInfo)1766 process_update_task(pkgInfo)
1436elif args.subcommand == 'collect':1767elif args.subcommand == "collect":
1437 oem_scripts_config_ini = os.path.join(os.environ["HOME"],1768 oem_scripts_config_ini = os.path.join(
1438 ".config/oem-scripts/config.ini")1769 os.environ["HOME"], ".config/oem-scripts/config.ini"
1770 )
1439 oem_scripts_config = ConfigParser()1771 oem_scripts_config = ConfigParser()
1440 oem_scripts_config.read(oem_scripts_config_ini)1772 oem_scripts_config.read(oem_scripts_config_ini)
1441 config = oem_scripts_config['private']1773 config = oem_scripts_config["private"]
1442 r = requests.get(config['archive'] + "/dists/", auth=(config['username'], config['password']))1774 r = requests.get(
1775 config["archive"] + "/dists/", auth=(config["username"], config["password"])
1776 )
1443 if args.json:1777 if args.json:
1444 pkgInfo = collect_pkg_info(args.json, check_private=True, index=r.text, config=config)1778 pkgInfo = collect_pkg_info(
1779 args.json, check_private=True, index=r.text, config=config
1780 )
1445 elif args.meta:1781 elif args.meta:
1446 pkgInfo = collect_pkg_info(args.meta, check_private=True, index=r.text, config=config)1782 pkgInfo = collect_pkg_info(
1783 args.meta, check_private=True, index=r.text, config=config
1784 )
1447 else:1785 else:
1448 print("You need to use --json or --meta.")1786 print("You need to use --json or --meta.")
1449 exit(1)1787 exit(1)
1450 args.output.write(json.dumps(pkgInfo, indent=4, sort_keys=True, cls=DataJSONEncoder))1788 args.output.write(
1789 json.dumps(pkgInfo, indent=4, sort_keys=True, cls=DataJSONEncoder)
1790 )
1451 args.output.write("\n")1791 args.output.write("\n")
1452elif args.subcommand == 'staging-copy':1792elif args.subcommand == "staging-copy":
1453 oem_scripts_config_ini = os.path.join(os.environ["HOME"],1793 oem_scripts_config_ini = os.path.join(
1454 ".config/oem-scripts/config.ini")1794 os.environ["HOME"], ".config/oem-scripts/config.ini"
1795 )
1455 oem_scripts_config = ConfigParser()1796 oem_scripts_config = ConfigParser()
1456 oem_scripts_config.read(oem_scripts_config_ini)1797 oem_scripts_config.read(oem_scripts_config_ini)
1457 config = oem_scripts_config['private']1798 config = oem_scripts_config["private"]
14581799
1459 if args.json:1800 if args.json:
1460 pkgInfo = load_pkg_info(args.json)1801 pkgInfo = load_pkg_info(args.json)
1461 elif args.meta:1802 elif args.meta:
1462 r = requests.get(config['archive'] + "/dists/", auth=(config['username'], config['password']))1803 r = requests.get(
1463 pkgInfo = collect_pkg_info(args.meta, check_private=True, index=r.text, config=config)1804 config["archive"] + "/dists/", auth=(config["username"], config["password"])
1805 )
1806 pkgInfo = collect_pkg_info(
1807 args.meta, check_private=True, index=r.text, config=config
1808 )
1464 else:1809 else:
1465 print("You need to use --json or --meta.")1810 print("You need to use --json or --meta.")
1466 exit(1)1811 exit(1)
@@ -1470,12 +1815,21 @@ elif args.subcommand == 'staging-copy':
1470 staging_locked = set()1815 staging_locked = set()
1471 for pkg_name in sorted(pkgInfo.keys()):1816 for pkg_name in sorted(pkgInfo.keys()):
1472 pkg_info = pkgInfo[pkg_name]1817 pkg_info = pkgInfo[pkg_name]
1473 debug(f"{pkg_name} ppa: {pkg_info.ppa_version}, devel: {pkg_info.devel_version}, staging: {pkg_info.staging_version}.")1818 debug(
1819 f"{pkg_name} ppa: {pkg_info.ppa_version}, devel: {pkg_info.devel_version}, staging: {pkg_info.staging_version}."
1820 )
1474 if pkg_info.ppa_version != pkg_info.devel_version:1821 if pkg_info.ppa_version != pkg_info.devel_version:
1475 warning(f"{pkg_name} versions are not synced between ppa:oem-archive/{pkg_info.ppa_archive} and {pkg_info.devel_archive}.")1822 warning(
1823 f"{pkg_name} versions are not synced between ppa:oem-archive/{pkg_info.ppa_archive} and {pkg_info.devel_archive}."
1824 )
1476 elif pkg_info.staging_version == pkg_info.devel_version:1825 elif pkg_info.staging_version == pkg_info.devel_version:
1477 info(f"{pkg_name} {pkg_info.devel_version} (devel) == {pkg_info.staging_version} (staging) so it doesn't need to copy.")1826 info(
1478 elif apt_pkg.version_compare(pkg_info.staging_version, pkg_info.devel_version) > 0:1827 f"{pkg_name} {pkg_info.devel_version} (devel) == {pkg_info.staging_version} (staging) so it doesn't need to copy."
1828 )
1829 elif (
1830 apt_pkg.version_compare(pkg_info.staging_version, pkg_info.devel_version)
1831 > 0
1832 ):
1479 critical(f"This should never happen.")1833 critical(f"This should never happen.")
1480 exit(1)1834 exit(1)
1481 else:1835 else:
@@ -1485,65 +1839,90 @@ elif args.subcommand == 'staging-copy':
1485 jobs[identity] = list()1839 jobs[identity] = list()
1486 jobs[identity].append(pkg_name)1840 jobs[identity].append(pkg_name)
1487 debug(json.dumps(jobs, indent=4, sort_keys=True))1841 debug(json.dumps(jobs, indent=4, sort_keys=True))
1488 cloudberry = lp.projects['cloudberry']1842 cloudberry = lp.projects["cloudberry"]
1489 assignee = lp.people['oem-archive']1843 assignee = lp.people["oem-archive"]
1490 tasks = cloudberry.searchTasks(1844 tasks = cloudberry.searchTasks(
1491 status=['New', 'Triaged', 'Confirmed', 'In Progress', 'Fix Committed'],1845 status=["New", "Triaged", "Confirmed", "In Progress", "Fix Committed"],
1492 search_text='request of')1846 search_text="request of",
1847 )
1493 for task in tasks:1848 for task in tasks:
1494 bug = task.bug1849 bug = task.bug
1495 for staging in sorted(dest):1850 for staging in sorted(dest):
1496 if staging in bug.description and 'staging-lock' in bug.tags and 'cqa-verified-staging' not in bug.tags:1851 if (
1852 staging in bug.description
1853 and "staging-lock" in bug.tags
1854 and "cqa-verified-staging" not in bug.tags
1855 ):
1497 debug(bug.description)1856 debug(bug.description)
1498 tags = ",".join(bug.tags)1857 tags = ",".join(bug.tags)
1499 for line in bug.description.split('\n'):1858 for line in bug.description.split("\n"):
1500 if line.startswith('Package: '):1859 if line.startswith("Package: "):
1501 package = line1860 package = line
1502 warning(f"https://bugs.launchpad.net/bugs/{bug.id}\n\t({staging})\n\t[{tags}]\n\t{bug.title}\n\t{package}")1861 warning(
1862 f"https://bugs.launchpad.net/bugs/{bug.id}\n\t({staging})\n\t[{tags}]\n\t{bug.title}\n\t{package}"
1863 )
1503 staging_locked.add(staging)1864 staging_locked.add(staging)
1504 for job in jobs:1865 for job in jobs:
1505 source, dest = job.split(':')1866 source, dest = job.split(":")
1506 if dest and dest in staging_locked and not args.ignore_staging_lock:1867 if dest and dest in staging_locked and not args.ignore_staging_lock:
1507 warning(f"The following OEM metapackages will be skipped due to the staging-lock of {dest}.\n" + '\n'.join(jobs[job]))1868 warning(
1869 f"The following OEM metapackages will be skipped due to the staging-lock of {dest}.\n"
1870 + "\n".join(jobs[job])
1871 )
1508 else:1872 else:
1509 title = f"request of copy_package [{source}]"1873 title = f"request of copy_package [{source}]"
1510 tags = ["archive-request", "via-request-script", f"oem-scripts-{oem_scripts.__version__:.2f}", "oem-metapackages"]1874 tags = [
1875 "archive-request",
1876 "via-request-script",
1877 f"oem-scripts-{oem_scripts.__version__:.2f}",
1878 "oem-metapackages",
1879 ]
1511 if args.dry_run:1880 if args.dry_run:
1512 info(f"TITLE: {title}")1881 info(f"TITLE: {title}")
1513 info("TAGS: " + ','.join(tags))1882 info("TAGS: " + ",".join(tags))
1514 else:1883 else:
1515 debug(f"TITLE: {title}")1884 debug(f"TITLE: {title}")
1516 debug("TAGS: " + ','.join(tags))1885 debug("TAGS: " + ",".join(tags))
1517 packages = list()1886 packages = list()
1518 for pkg_name in jobs[job]:1887 for pkg_name in jobs[job]:
1519 pkg_info = pkgInfo[pkg_name]1888 pkg_info = pkgInfo[pkg_name]
1520 packages.append(f"{pkg_name} (=={pkg_info.devel_version})")1889 packages.append(f"{pkg_name} (=={pkg_info.devel_version})")
1521 packages = ', '.join(packages)1890 packages = ", ".join(packages)
1522 distribution = "focal"1891 distribution = "focal"
1523 if dest.startswith('somerville'):1892 if dest.startswith("somerville"):
1524 component = remove_suffix(dest, f"-{distribution}-staging").replace('-fossa', '')1893 component = remove_suffix(dest, f"-{distribution}-staging").replace(
1894 "-fossa", ""
1895 )
1525 else:1896 else:
1526 debug(dest)1897 debug(dest)
1527 project, group, _ = dest.split('-', 2)1898 project, group, _ = dest.split("-", 2)
1528 component = f"{project}.{group}"1899 component = f"{project}.{group}"
1529 production = distribution + '-' + component1900 production = distribution + "-" + component
1530 description = staging_copy_template.substitute(1901 description = staging_copy_template.substitute(
1531 source=source,1902 source=source,
1532 destination=dest,1903 destination=dest,
1533 packages=packages,1904 packages=packages,
1534 production=production,1905 production=production,
1535 username=config['username'],1906 username=config["username"],
1536 url=config['url'],1907 url=config["url"],
1537 distribution=distribution,1908 distribution=distribution,
1538 component=component)1909 component=component,
1910 )
1539 print(description)1911 print(description)
1540 if not args.dry_run and yes_or_ask(args.yes, f"Would you like to create a cloudberry bug to copy the {packages} from {source} to {dest}?"):1912 if not args.dry_run and yes_or_ask(
1541 bug = lp.bugs.createBug(description=description, target=cloudberry, title=title, tags=tags)1913 args.yes,
1914 f"Would you like to create a cloudberry bug to copy the {packages} from {source} to {dest}?",
1915 ):
1916 bug = lp.bugs.createBug(
1917 description=description, target=cloudberry, title=title, tags=tags
1918 )
1542 for task in bug.bug_tasks:1919 for task in bug.bug_tasks:
1543 task.importance = 'High'1920 task.importance = "High"
1544 task.assignee = assignee1921 task.assignee = assignee
1545 task.lp_save()1922 task.lp_save()
1546 bug.lp_save()1923 bug.lp_save()
1547 print(f"The cloudberry staging copy bug has been created on {bug.web_link}.\n")1924 print(
1925 f"The cloudberry staging copy bug has been created on {bug.web_link}.\n"
1926 )
1548else:1927else:
1549 parser.print_help()1928 parser.print_help()
diff --git a/oem_scripts/LaunchpadLogin.py b/oem_scripts/LaunchpadLogin.py
index 0583ad2..01bae8b 100644
--- a/oem_scripts/LaunchpadLogin.py
+++ b/oem_scripts/LaunchpadLogin.py
@@ -8,20 +8,28 @@ import logging
8import os8import os
99
1010
11class ShutUpAndTakeMyTokenAuthorizationEngine(credentials.RequestTokenAuthorizationEngine):11class ShutUpAndTakeMyTokenAuthorizationEngine(
12 credentials.RequestTokenAuthorizationEngine
13):
12 """This stub class prevents launchpadlib from nulling out consumer_name14 """This stub class prevents launchpadlib from nulling out consumer_name
13 in its demented campaign to force the use of desktop integration. """15 in its demented campaign to force the use of desktop integration. """
1416
15 def __init__(self, service_root, application_name=None, consumer_name=None,17 def __init__(
16 credential_save_failed=None, allow_access_levels=None):18 self,
19 service_root,
20 application_name=None,
21 consumer_name=None,
22 credential_save_failed=None,
23 allow_access_levels=None,
24 ):
17 super(ShutUpAndTakeMyTokenAuthorizationEngine, self).__init__(25 super(ShutUpAndTakeMyTokenAuthorizationEngine, self).__init__(
18 service_root, application_name, consumer_name,26 service_root, application_name, consumer_name, credential_save_failed
19 credential_save_failed)27 )
2028
2129
22def launchpad_login(pkg, service_root='production', version='devel'):30def launchpad_login(pkg, service_root="production", version="devel"):
23 """Log into Launchpad API with stored credentials."""31 """Log into Launchpad API with stored credentials."""
24 creds_dir = os.path.expanduser(os.path.join('~', '.' + pkg))32 creds_dir = os.path.expanduser(os.path.join("~", "." + pkg))
25 if not os.path.exists(creds_dir):33 if not os.path.exists(creds_dir):
26 os.makedirs(creds_dir, 0o700)34 os.makedirs(creds_dir, 0o700)
27 os.chmod(creds_dir, 0o700)35 os.chmod(creds_dir, 0o700)
@@ -29,69 +37,83 @@ def launchpad_login(pkg, service_root='production', version='devel'):
29 consumer_name = pkg37 consumer_name = pkg
30 return Launchpad.login_with(38 return Launchpad.login_with(
31 consumer_name=consumer_name,39 consumer_name=consumer_name,
32 credentials_file=os.path.join(creds_dir, 'launchpad.credentials'),40 credentials_file=os.path.join(creds_dir, "launchpad.credentials"),
33 service_root=api_endpoint,41 service_root=api_endpoint,
34 version=version,42 version=version,
35 authorization_engine=ShutUpAndTakeMyTokenAuthorizationEngine(43 authorization_engine=ShutUpAndTakeMyTokenAuthorizationEngine(
36 service_root=api_endpoint,44 service_root=api_endpoint, consumer_name=consumer_name
37 consumer_name=consumer_name,
38 ),45 ),
39 )46 )
4047
4148
42class LaunchpadLogin():49class LaunchpadLogin:
43 """Try to unify all Launchpad login"""50 """Try to unify all Launchpad login"""
44 def __init__(self, application_name='oem-scripts',51
45 service_root=None, launchpadlib_dir=None,52 def __init__(
46 version="devel", bot=False):53 self,
54 application_name="oem-scripts",
55 service_root=None,
56 launchpadlib_dir=None,
57 version="devel",
58 bot=False,
59 ):
4760
48 if launchpadlib_dir is None:61 if launchpadlib_dir is None:
49 launchpadlib_dir = os.path.join(os.environ["HOME"], ".launchpadlib/cache")62 launchpadlib_dir = os.path.join(os.environ["HOME"], ".launchpadlib/cache")
5063
51 if service_root is None:64 if service_root is None:
52 if os.environ.get('LAUNCHPAD_API') == lookup_service_root('staging'):65 if os.environ.get("LAUNCHPAD_API") == lookup_service_root("staging"):
53 service_root = 'staging'66 service_root = "staging"
54 else:67 else:
55 service_root = 'production'68 service_root = "production"
5669
57 oem_scripts_config_ini = os.path.join(os.environ["HOME"],70 oem_scripts_config_ini = os.path.join(
58 ".config/oem-scripts/config.ini")71 os.environ["HOME"], ".config/oem-scripts/config.ini"
72 )
59 launchpad_token = os.environ.get("LAUNCHPAD_TOKEN")73 launchpad_token = os.environ.get("LAUNCHPAD_TOKEN")
6074
61 if bot:75 if bot:
62 logging.info("Using oem-taipei-bot credentials")76 logging.info("Using oem-taipei-bot credentials")
63 self.lp = launchpad_login('/', service_root)77 self.lp = launchpad_login("/", service_root)
6478
65 elif launchpad_token:79 elif launchpad_token:
66 if launchpad_token == "::":80 if launchpad_token == "::":
67 logging.info("Using anonymously login")81 logging.info("Using anonymously login")
68 self.lp = Launchpad.login_anonymously(application_name, service_root)82 self.lp = Launchpad.login_anonymously(application_name, service_root)
69 elif ":" in launchpad_token:83 elif ":" in launchpad_token:
70 oauth_token, oauth_token_secret, oauth_consumer_key = launchpad_token.split(":", maxsplit=2)84 oauth_token, oauth_token_secret, oauth_consumer_key = launchpad_token.split(
71 self.lp = Launchpad.login(oauth_consumer_key,85 ":", maxsplit=2
72 oauth_token,86 )
73 oauth_token_secret,87 self.lp = Launchpad.login(
74 service_root=service_root,88 oauth_consumer_key,
75 cache=launchpadlib_dir,89 oauth_token,
76 version=version)90 oauth_token_secret,
91 service_root=service_root,
92 cache=launchpadlib_dir,
93 version=version,
94 )
77 else:95 else:
78 logging.error(f"invalid LAUNCHPAD_TOKEN '{launchpad_token}'")96 logging.error(f"invalid LAUNCHPAD_TOKEN '{launchpad_token}'")
79 exit(1)97 exit(1)
8098
81 elif os.environ.get('LAUNCHPAD_API') and os.path.exists(oem_scripts_config_ini):99 elif os.environ.get("LAUNCHPAD_API") and os.path.exists(oem_scripts_config_ini):
82 logging.info("Using oem-scripts oauth token")100 logging.info("Using oem-scripts oauth token")
83 oem_scripts_config = ConfigParser()101 oem_scripts_config = ConfigParser()
84 oem_scripts_config.read(oem_scripts_config_ini)102 oem_scripts_config.read(oem_scripts_config_ini)
85 config = oem_scripts_config['oem-scripts']103 config = oem_scripts_config["oem-scripts"]
86 self.lp = Launchpad.login(config['oauth_consumer_key'],104 self.lp = Launchpad.login(
87 config['oauth_token'],105 config["oauth_consumer_key"],
88 config['oauth_token_secret'],106 config["oauth_token"],
89 service_root=service_root,107 config["oauth_token_secret"],
90 cache=launchpadlib_dir,108 service_root=service_root,
91 version=version)109 cache=launchpadlib_dir,
110 version=version,
111 )
92 else:112 else:
93 logging.info("Using oem-scripts login")113 logging.info("Using oem-scripts login")
94 self.lp = Launchpad.login_with(application_name=application_name,114 self.lp = Launchpad.login_with(
95 service_root=service_root,115 application_name=application_name,
96 launchpadlib_dir=launchpadlib_dir,116 service_root=service_root,
97 version=version)117 launchpadlib_dir=launchpadlib_dir,
118 version=version,
119 )
diff --git a/oem_scripts/logging.py b/oem_scripts/logging.py
index 5351f86..0571d3c 100644
--- a/oem_scripts/logging.py
+++ b/oem_scripts/logging.py
@@ -22,29 +22,36 @@ import sys
2222
2323
24def setup_logging(debug=False, quiet=False):24def setup_logging(debug=False, quiet=False):
25 logging.addLevelName(logging.DEBUG,25 logging.addLevelName(
26 "\033[1;96m%s\033[1;0m" %26 logging.DEBUG, "\033[1;96m%s\033[1;0m" % logging.getLevelName(logging.DEBUG)
27 logging.getLevelName(logging.DEBUG))27 )
28 logging.addLevelName(logging.INFO,28 logging.addLevelName(
29 "\033[1;32m%s\033[1;0m" %29 logging.INFO, "\033[1;32m%s\033[1;0m" % logging.getLevelName(logging.INFO)
30 logging.getLevelName(logging.INFO))30 )
31 logging.addLevelName(logging.WARNING,31 logging.addLevelName(
32 "\033[1;33m%s\033[1;0m" %32 logging.WARNING, "\033[1;33m%s\033[1;0m" % logging.getLevelName(logging.WARNING)
33 logging.getLevelName(logging.WARNING))33 )
34 logging.addLevelName(logging.ERROR,34 logging.addLevelName(
35 "\033[1;31m%s\033[1;0m" %35 logging.ERROR, "\033[1;31m%s\033[1;0m" % logging.getLevelName(logging.ERROR)
36 logging.getLevelName(logging.ERROR))36 )
37 logging.addLevelName(logging.CRITICAL,37 logging.addLevelName(
38 "\033[1;41m%s\033[1;0m" %38 logging.CRITICAL,
39 logging.getLevelName(logging.CRITICAL))39 "\033[1;41m%s\033[1;0m" % logging.getLevelName(logging.CRITICAL),
40 )
40 if debug:41 if debug:
41 logging.basicConfig(format='<%(levelname)s> %(message)s',42 logging.basicConfig(
42 level=logging.DEBUG,43 format="<%(levelname)s> %(message)s",
43 handlers=[logging.StreamHandler(sys.stdout)])44 level=logging.DEBUG,
45 handlers=[logging.StreamHandler(sys.stdout)],
46 )
44 elif not quiet:47 elif not quiet:
45 logging.basicConfig(format='<%(levelname)s> %(message)s',48 logging.basicConfig(
46 level=logging.INFO,49 format="<%(levelname)s> %(message)s",
47 handlers=[logging.StreamHandler(sys.stdout)])50 level=logging.INFO,
51 handlers=[logging.StreamHandler(sys.stdout)],
52 )
48 else:53 else:
49 logging.basicConfig(format='<%(levelname)s> %(message)s',54 logging.basicConfig(
50 handlers=[logging.StreamHandler(sys.stdout)])55 format="<%(levelname)s> %(message)s",
56 handlers=[logging.StreamHandler(sys.stdout)],
57 )
diff --git a/pkg-list b/pkg-list
index e5700ac..c5eeca0 100755
--- a/pkg-list
+++ b/pkg-list
@@ -29,8 +29,9 @@ from logging import debug, error, critical, info, warning
29from urllib.parse import urljoin29from urllib.parse import urljoin
3030
3131
32parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,32parser = argparse.ArgumentParser(
33 epilog="""33 formatter_class=argparse.RawDescriptionHelpFormatter,
34 epilog="""
34examples:35examples:
35 pkg-list ubuntu-desktop --recommends > ubuntu-desktop.list36 pkg-list ubuntu-desktop --recommends > ubuntu-desktop.list
36 pkg-list dkms --exclude ubuntu-desktop.list > dkms.list37 pkg-list dkms --exclude ubuntu-desktop.list > dkms.list
@@ -39,63 +40,74 @@ examples:
39 pkg-list linux-generic --exclude all.list40 pkg-list linux-generic --exclude all.list
40 pkg-list linux-generic-hwe-20.04 --exclude all.list41 pkg-list linux-generic-hwe-20.04 --exclude all.list
41 pkg-list linux-oem-20.04 --exclude all.list42 pkg-list linux-oem-20.04 --exclude all.list
42 pkg-list linux-oem-20.04-edge --exclude all.list""")43 pkg-list linux-oem-20.04-edge --exclude all.list""",
4344)
44parser.add_argument("-d", "--debug",45
45 action="store_true",46parser.add_argument("-d", "--debug", action="store_true", help="print debug messages")
46 help="print debug messages")47parser.add_argument(
47parser.add_argument("-l", "--long",48 "-l",
48 action="store_true",49 "--long",
49 help="print long list including the URL, MD5, SHA1 and SHA256.")50 action="store_true",
50parser.add_argument("--apt-dir",51 help="print long list including the URL, MD5, SHA1 and SHA256.",
51 type=str,52)
52 help="specify the dir for apt")53parser.add_argument("--apt-dir", type=str, help="specify the dir for apt")
53parser.add_argument("--recommends",54parser.add_argument(
54 action="store_true",55 "--recommends", action="store_true", help="include recommends packages"
55 help="include recommends packages")56)
56parser.add_argument("--suggests",57parser.add_argument("--suggests", action="store_true", help="include suggests packages")
57 action="store_true",58parser.add_argument(
58 help="include suggests packages")59 "--non-installed",
59parser.add_argument("--non-installed",60 action="store_true",
60 action="store_true",61 help="only get non-installed packages per check current running environments",
61 help="only get non-installed packages per check current running environments")62)
62parser.add_argument("--fail-unavailable",63parser.add_argument(
63 action="store_true",64 "--fail-unavailable",
64 help="Return error when any package is unavailable.")65 action="store_true",
65parser.add_argument("--exclude",66 help="Return error when any package is unavailable.",
66 metavar='pkg.list',67)
67 type=argparse.FileType('r', encoding='UTF-8'),68parser.add_argument(
68 help="package names and versions to exclude.")69 "--exclude",
69parser.add_argument('pkgs',70 metavar="pkg.list",
70 metavar='PKG_NAME',71 type=argparse.FileType("r", encoding="UTF-8"),
71 type=str, nargs='+',72 help="package names and versions to exclude.",
72 help='the names of Debian binary packages')73)
74parser.add_argument(
75 "pkgs",
76 metavar="PKG_NAME",
77 type=str,
78 nargs="+",
79 help="the names of Debian binary packages",
80)
7381
74args = parser.parse_args()82args = parser.parse_args()
7583
76logging.addLevelName(logging.DEBUG,84logging.addLevelName(
77 "\033[1;96m%s\033[1;0m" %85 logging.DEBUG, "\033[1;96m%s\033[1;0m" % logging.getLevelName(logging.DEBUG)
78 logging.getLevelName(logging.DEBUG))86)
79logging.addLevelName(logging.INFO,87logging.addLevelName(
80 "\033[1;32m%s\033[1;0m" %88 logging.INFO, "\033[1;32m%s\033[1;0m" % logging.getLevelName(logging.INFO)
81 logging.getLevelName(logging.INFO))89)
82logging.addLevelName(logging.WARNING,90logging.addLevelName(
83 "\033[1;33m%s\033[1;0m" %91 logging.WARNING, "\033[1;33m%s\033[1;0m" % logging.getLevelName(logging.WARNING)
84 logging.getLevelName(logging.WARNING))92)
85logging.addLevelName(logging.ERROR,93logging.addLevelName(
86 "\033[1;31m%s\033[1;0m" %94 logging.ERROR, "\033[1;31m%s\033[1;0m" % logging.getLevelName(logging.ERROR)
87 logging.getLevelName(logging.ERROR))95)
88logging.addLevelName(logging.CRITICAL,96logging.addLevelName(
89 "\033[1;41m%s\033[1;0m" %97 logging.CRITICAL, "\033[1;41m%s\033[1;0m" % logging.getLevelName(logging.CRITICAL)
90 logging.getLevelName(logging.CRITICAL))98)
9199
92if args.debug:100if args.debug:
93 logging.basicConfig(format='<%(levelname)s> %(message)s',101 logging.basicConfig(
94 level=logging.DEBUG,102 format="<%(levelname)s> %(message)s",
95 handlers=[logging.StreamHandler(sys.stdout)])103 level=logging.DEBUG,
104 handlers=[logging.StreamHandler(sys.stdout)],
105 )
96else:106else:
97 logging.basicConfig(format='<%(levelname)s> %(message)s',107 logging.basicConfig(
98 handlers=[logging.StreamHandler(sys.stdout)])108 format="<%(levelname)s> %(message)s",
109 handlers=[logging.StreamHandler(sys.stdout)],
110 )
99111
100112
101def _debug_pkg(pkg: str) -> None:113def _debug_pkg(pkg: str) -> None:
@@ -106,12 +118,18 @@ def _debug_pkg(pkg: str) -> None:
106 debug(dir(pkg))118 debug(dir(pkg))
107119
108 for attr in dir(pkg):120 for attr in dir(pkg):
109 if not attr.startswith('__'):121 if not attr.startswith("__"):
110 if not isinstance(pkg.__getattribute__(attr), types.BuiltinFunctionType):122 if not isinstance(pkg.__getattribute__(attr), types.BuiltinFunctionType):
111 debug(f"{attr}: {pkg.__getattribute__(attr)}")123 debug(f"{attr}: {pkg.__getattribute__(attr)}")
112124
113125
114def get_depends(pkg_name: str, depends_list: list, recommends: bool, suggests: bool, non_installed: bool) -> bool:126def get_depends(
127 pkg_name: str,
128 depends_list: list,
129 recommends: bool,
130 suggests: bool,
131 non_installed: bool,
132) -> bool:
115 """Recursively get all dependencies.133 """Recursively get all dependencies.
116134
117 Args:135 Args:
@@ -158,25 +176,34 @@ def get_depends(pkg_name: str, depends_list: list, recommends: bool, suggests: b
158 if pkg_name in map(lambda x: x[0], depends_list):176 if pkg_name in map(lambda x: x[0], depends_list):
159 continue177 continue
160178
161 if any(pkg_name == name and version.ver_str == ver for name, ver in exclude_list):179 if any(
180 pkg_name == name and version.ver_str == ver for name, ver in exclude_list
181 ):
162 break182 break
163183
164 if non_installed and pkg.current_ver == version:184 if non_installed and pkg.current_ver == version:
165 break185 break
166186
167 for pfile in version.file_list:187 for pfile in version.file_list:
168 if pfile[0].filename != '/var/lib/dpkg/status' and record.lookup(pfile):188 if pfile[0].filename != "/var/lib/dpkg/status" and record.lookup(pfile):
169 url = urljoin("http://" + pfile[0].site, 'ubuntu/' + record.filename)189 url = urljoin("http://" + pfile[0].site, "ubuntu/" + record.filename)
170 break190 break
171191
172 debug(f"{pkg_name} {version.ver_str} {pkg.architecture} {url}")192 debug(f"{pkg_name} {version.ver_str} {pkg.architecture} {url}")
173 item = (pkg_name, version.ver_str, url, record.hashes.find("MD5Sum"), record.hashes.find("SHA1"), record.hashes.find("SHA256"))193 item = (
194 pkg_name,
195 version.ver_str,
196 url,
197 record.hashes.find("MD5Sum"),
198 record.hashes.find("SHA1"),
199 record.hashes.find("SHA256"),
200 )
174 depends_list.append(item)201 depends_list.append(item)
175202
176 for target in ('PreDepends', 'Depends', 'Recommends', 'Suggests'):203 for target in ("PreDepends", "Depends", "Recommends", "Suggests"):
177 if target == 'Recommends' and not recommends:204 if target == "Recommends" and not recommends:
178 continue205 continue
179 if target == 'Suggests' and not suggests:206 if target == "Suggests" and not suggests:
180 continue207 continue
181 if target not in version.depends_list_str:208 if target not in version.depends_list_str:
182 continue209 continue
@@ -188,10 +215,16 @@ def get_depends(pkg_name: str, depends_list: list, recommends: bool, suggests: b
188 if name in map(lambda x: x[0], depends_list):215 if name in map(lambda x: x[0], depends_list):
189 found = True216 found = True
190 break217 break
191 if arch == 'i386' and pkg.architecture == 'amd64':218 if arch == "i386" and pkg.architecture == "amd64":
192 name = name + ":i386"219 name = name + ":i386"
193 pkg = cache[name]220 pkg = cache[name]
194 found = get_depends(name, depends_list, recommends=recommends, suggests=suggests, non_installed=non_installed)221 found = get_depends(
222 name,
223 depends_list,
224 recommends=recommends,
225 suggests=suggests,
226 non_installed=non_installed,
227 )
195 if found:228 if found:
196 break229 break
197 if not found and args.fail_unavailable:230 if not found and args.fail_unavailable:
@@ -205,11 +238,13 @@ if args.apt_dir:
205 if args.debug:238 if args.debug:
206 old = apt_pkg.config.dump()239 old = apt_pkg.config.dump()
207 apt_pkg.config.set("Dir", args.apt_dir)240 apt_pkg.config.set("Dir", args.apt_dir)
208 apt_pkg.config.set("Dir::State::status", os.path.join(args.apt_dir, "var/lib/dpkg/status"))241 apt_pkg.config.set(
242 "Dir::State::status", os.path.join(args.apt_dir, "var/lib/dpkg/status")
243 )
209 if args.debug:244 if args.debug:
210 new = apt_pkg.config.dump()245 new = apt_pkg.config.dump()
211 d = difflib.Differ()246 d = difflib.Differ()
212 diff = d.compare(old.split('\n'), new.split('\n'))247 diff = d.compare(old.split("\n"), new.split("\n"))
213 for line in diff:248 for line in diff:
214 debug(line.strip())249 debug(line.strip())
215 apt_pkg.init_system()250 apt_pkg.init_system()
@@ -225,12 +260,18 @@ exclude_list = []
225260
226if args.exclude:261if args.exclude:
227 for line in args.exclude.readlines():262 for line in args.exclude.readlines():
228 (name, ver) = line.strip().split(' ')263 (name, ver) = line.strip().split(" ")
229 if not any(name == _name and ver == _ver for _name, _ver in exclude_list):264 if not any(name == _name and ver == _ver for _name, _ver in exclude_list):
230 exclude_list.append((name, ver))265 exclude_list.append((name, ver))
231266
232for pkg in args.pkgs:267for pkg in args.pkgs:
233 get_depends(pkg, pkg_list, recommends=args.recommends, suggests=args.suggests, non_installed=args.non_installed)268 get_depends(
269 pkg,
270 pkg_list,
271 recommends=args.recommends,
272 suggests=args.suggests,
273 non_installed=args.non_installed,
274 )
234275
235for pkg, ver, url, md5, sha1, sha256 in sorted(pkg_list):276for pkg, ver, url, md5, sha1, sha256 in sorted(pkg_list):
236 if args.long:277 if args.long:
diff --git a/pkg-oem-meta b/pkg-oem-meta
index e5e3cf5..c924bfa 100755
--- a/pkg-oem-meta
+++ b/pkg-oem-meta
@@ -16,155 +16,164 @@ program_name = os.path.basename(sys.argv[0])
1616
17setup_logging()17setup_logging()
1818
19if program_name == 'pkg-somerville-meta':19if program_name == "pkg-somerville-meta":
20 codename = 'somerville'20 codename = "somerville"
21 Codename = codename.title()21 Codename = codename.title()
22 brand = 'dell'22 brand = "dell"
23 parser = argparse.ArgumentParser(23 parser = argparse.ArgumentParser(
24 formatter_class=argparse.RawTextHelpFormatter,24 formatter_class=argparse.RawTextHelpFormatter,
25 description=f"{Codename} platform meta package generator.",25 description=f"{Codename} platform meta package generator.",
26 epilog=f"Ex. {program_name} --public-bug 1868254 -s focal -k"26 epilog=f"Ex. {program_name} --public-bug 1868254 -s focal -k"
27 " oem -p three-eyed-raven 0962")27 " oem -p three-eyed-raven 0962",
28elif program_name == 'pkg-stella-meta':28 )
29 codename = 'stella'29elif program_name == "pkg-stella-meta":
30 codename = "stella"
30 Codename = codename.title()31 Codename = codename.title()
31 brand = 'hp'32 brand = "hp"
32 parser = argparse.ArgumentParser(33 parser = argparse.ArgumentParser(
33 formatter_class=argparse.RawTextHelpFormatter,34 formatter_class=argparse.RawTextHelpFormatter,
34 description=f"{Codename} platform meta package generator.",35 description=f"{Codename} platform meta package generator.",
35 epilog=f"Ex. {program_name} -s focal -k oem"36 epilog=f"Ex. {program_name} -s focal -k oem" " -g cmit -p beedrill 8594",
36 " -g cmit -p beedrill 8594")37 )
37elif program_name == 'pkg-sutton-meta':38elif program_name == "pkg-sutton-meta":
38 codename = 'sutton'39 codename = "sutton"
39 Codename = codename.title()40 Codename = codename.title()
40 brand = 'lenovo'41 brand = "lenovo"
41 parser = argparse.ArgumentParser(42 parser = argparse.ArgumentParser(
42 formatter_class=argparse.RawTextHelpFormatter,43 formatter_class=argparse.RawTextHelpFormatter,
43 description=f"{Codename} platform meta package generator.",44 description=f"{Codename} platform meta package generator.",
44 epilog=f"Ex. {program_name} -s focal -k oem"45 epilog=f"Ex. {program_name} -s focal -k oem" " -g bachman -p banaing S08",
45 " -g bachman -p banaing S08")46 )
46else:47else:
47 error('This program can not be executed.')48 error("This program can not be executed.")
48 exit(1)49 exit(1)
4950
50parser.add_argument('-k', '--kernel',51parser.add_argument(
51 choices=['linux-oem-20.04', 'linux-oem-20.04b', 'linux-oem-20.04c', 'linux-generic-hwe-20.04'],52 "-k",
52 default='linux-oem-20.04',53 "--kernel",
53 help="Specify the kernel meta. [linux-oem-20.04|linux-oem-20.04b|linux-oem-20.04c|linux-generic-hwe-20.04]")54 choices=[
55 "linux-oem-20.04",
56 "linux-oem-20.04b",
57 "linux-oem-20.04c",
58 "linux-generic-hwe-20.04",
59 ],
60 default="linux-oem-20.04",
61 help="Specify the kernel meta. [linux-oem-20.04|linux-oem-20.04b|linux-oem-20.04c|linux-generic-hwe-20.04]",
62)
54info = UbuntuDistroInfo()63info = UbuntuDistroInfo()
55try:64try:
56 # for bionic65 # for bionic
57 series = lsb_release.get_lsb_information()['CODENAME']66 series = lsb_release.get_lsb_information()["CODENAME"]
58except AttributeError:67except AttributeError:
59 # for focal68 # for focal
60 series = lsb_release.get_os_release()['CODENAME']69 series = lsb_release.get_os_release()["CODENAME"]
6170
62parser.add_argument('-s', '--series', choices=info.supported(), default=series,71parser.add_argument(
63 help=f"Ubuntu series, such as {series} by default.")72 "-s",
64parser.add_argument('--public-bug',73 "--series",
65 help="Launchpad public bug number.",74 choices=info.supported(),
66 type=int)75 default=series,
67parser.add_argument('--private-bug',76 help=f"Ubuntu series, such as {series} by default.",
68 help="Launchpad private bug number.",77)
69 type=int)78parser.add_argument("--public-bug", help="Launchpad public bug number.", type=int)
7079parser.add_argument("--private-bug", help="Launchpad private bug number.", type=int)
71if codename == 'somerville':80
72 parser.add_argument('bios_id', nargs='+', help="BIOS ID")81if codename == "somerville":
73 parser.add_argument('-p', '--platform', help="platform tag", required=True)82 parser.add_argument("bios_id", nargs="+", help="BIOS ID")
74elif codename == 'stella':83 parser.add_argument("-p", "--platform", help="platform tag", required=True)
75 parser.add_argument('-g', '--group', help="OEM-group", required=True)84elif codename == "stella":
76 parser.add_argument('-p', '--platform', help="platform-codename",85 parser.add_argument("-g", "--group", help="OEM-group", required=True)
77 required=True)86 parser.add_argument("-p", "--platform", help="platform-codename", required=True)
78 parser.add_argument('sd_id', nargs='+',87 parser.add_argument("sd_id", nargs="+", help="subsystem device ID, such as 0962")
79 help="subsystem device ID, such as 0962")88elif codename == "sutton":
80elif codename == 'sutton':89 parser.add_argument("-g", "--group", help="OEM-group", required=True)
81 parser.add_argument('-g', '--group', help="OEM-group", required=True)90 parser.add_argument("-p", "--platform", help="platform-codename", required=True)
82 parser.add_argument('-p', '--platform', help="platform-codename",91 parser.add_argument(
83 required=True)92 "bios_ver",
84 parser.add_argument('bios_ver', nargs='+',93 nargs="+",
85 help="First three chars in bios version, "94 help="First three chars in bios version, " + "such as S08 or bvnLENOVO:bvrS08",
86 + "such as S08 or bvnLENOVO:bvrS08")95 )
87args = parser.parse_args()96args = parser.parse_args()
8897
89platform = args.platform.lower()98platform = args.platform.lower()
90Platform = platform.title()99Platform = platform.title()
91series = args.series100series = args.series
92versions = dict(zip(info.get_all(result='codename'),101versions = dict(zip(info.get_all(result="codename"), info.get_all(result="release")))
93 info.get_all(result='release')))102version = versions[series].split(" ")[0]
94version = versions[series].split(' ')[0]
95103
96# Sanity check104# Sanity check
97if codename == 'somerville':105if codename == "somerville":
98 group = ''106 group = ""
99 if args.platform.startswith("fossa-"):107 if args.platform.startswith("fossa-"):
100 error('Please remove fossa- prefix from the platform name.')108 error("Please remove fossa- prefix from the platform name.")
101 exit(1)109 exit(1)
102 for bios_id in args.bios_id:110 for bios_id in args.bios_id:
103 if not re.match('[0-9a-fA-F]{4}$', bios_id):111 if not re.match("[0-9a-fA-F]{4}$", bios_id):
104 error('Invalid BIOS ID: {%s}' % bios_id)112 error("Invalid BIOS ID: {%s}" % bios_id)
105 exit(1)113 exit(1)
106 meta = 'oem-' + codename + '-' + platform + '-meta'114 meta = "oem-" + codename + "-" + platform + "-meta"
107elif codename == 'stella':115elif codename == "stella":
108 group = args.group.lower()116 group = args.group.lower()
109 for sd_id in args.sd_id:117 for sd_id in args.sd_id:
110 if not re.match('[0-9a-fA-F]{4}$', sd_id):118 if not re.match("[0-9a-fA-F]{4}$", sd_id):
111 error('Invalid subsystem device ID: {%s}' % sd_id)119 error("Invalid subsystem device ID: {%s}" % sd_id)
112 exit(1)120 exit(1)
113 meta = 'oem-' + codename + '.' + group + '-' + platform + '-meta'
The diff has been truncated for viewing.

Subscribers

People subscribed via source and target branches