Merge ~fourdollars/pc-enablement/+git/oem-scripts:master into ~oem-solutions-engineers/pc-enablement/+git/oem-scripts:master
- Git
- lp:~fourdollars/pc-enablement/+git/oem-scripts
- master
- Merge into master
Status: | Merged |
---|---|
Approved by: | Shih-Yuan Lee |
Approved revision: | 633af21b9daeea13ce683c10089d4db0efcda4bd |
Merged at revision: | f08dc80657cb9ef6f23ee18eea7cdc3076a7cc44 |
Proposed branch: | ~fourdollars/pc-enablement/+git/oem-scripts:master |
Merge into: | ~oem-solutions-engineers/pc-enablement/+git/oem-scripts:master |
Diff against target: |
5933 lines (+2255/-1371) 22 files modified
bug-bind.py (+53/-30) copyPackage.py (+23/-18) debian/changelog (+19/-0) debian/control (+1/-1) debian/rules (+1/-1) dev/null (+0/-16) get-oem-auth-token (+16/-14) get-oemshare-auth-token (+2/-2) lp-bug (+143/-99) mir-bug (+455/-224) oem-getiso (+96/-73) oem-meta-packages (+836/-457) oem_scripts/LaunchpadLogin.py (+61/-39) oem_scripts/logging.py (+30/-23) pkg-list (+109/-68) pkg-oem-meta (+235/-189) rename-everything.py (+89/-60) review-merge-proposal (+20/-6) setup.py (+43/-42) tests/test_black (+3/-0) tests/test_bugbind.py (+12/-7) tests/test_flake8 (+8/-2) |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Bin Li | Approve | ||
Review via email: mp+404053@code.launchpad.net |
Commit message
Description of the change
There is no functional change.
It is just to refactor the Python checking by flake8 and black.
Shih-Yuan Lee (fourdollars) wrote : | # |
[BOT]
$ cat oem-scripts-
autopkgtest-
autopkgtest-
pkg-somerville-meta PASS
pkg-stella-meta PASS
pkg-sutton-meta PASS
bug-bind PASS
get-private-ppa PASS
jq-lp PASS
launchpad-api PASS
lp-bug PASS
oem-meta-packages PASS
pkg-list PASS
review-
run-autopkgtest PASS
setup-apt-dir PASS
mir-bug SKIP exit status 77 and marked as skippable
git-url-
recovery-
mir-bug-
https:/
Preview Diff
1 | diff --git a/bug-bind.py b/bug-bind.py | |||
2 | index 16056b6..60a9db0 100755 | |||
3 | --- a/bug-bind.py | |||
4 | +++ b/bug-bind.py | |||
5 | @@ -9,18 +9,19 @@ import re | |||
6 | 9 | import lazr.restfulclient.resource | 9 | import lazr.restfulclient.resource |
7 | 10 | from oem_scripts.LaunchpadLogin import LaunchpadLogin | 10 | from oem_scripts.LaunchpadLogin import LaunchpadLogin |
8 | 11 | 11 | ||
11 | 12 | HWE_PUBLIC_PROJECT = 'hwe-next' | 12 | HWE_PUBLIC_PROJECT = "hwe-next" |
12 | 13 | OEM_PUBLIC_PROJECT = 'oem-priority' | 13 | OEM_PUBLIC_PROJECT = "oem-priority" |
13 | 14 | 14 | ||
14 | 15 | lp = None | 15 | lp = None |
16 | 16 | log = logging.getLogger('bug-bind-logger') | 16 | log = logging.getLogger("bug-bind-logger") |
17 | 17 | log.setLevel(logging.DEBUG) | 17 | log.setLevel(logging.DEBUG) |
20 | 18 | logging.basicConfig(format='%(levelname)s %(asctime)s - %(message)s', | 18 | logging.basicConfig( |
21 | 19 | datefmt='%m/%d/%Y %I:%M:%S %p') | 19 | format="%(levelname)s %(asctime)s - %(message)s", datefmt="%m/%d/%Y %I:%M:%S %p" |
22 | 20 | ) | ||
23 | 20 | 21 | ||
24 | 21 | 22 | ||
25 | 22 | def link_bugs(public_bugnum, privates, ihv): | 23 | def link_bugs(public_bugnum, privates, ihv): |
27 | 23 | assert(public_bugnum.isdigit()) | 24 | assert public_bugnum.isdigit() |
28 | 24 | login = LaunchpadLogin() | 25 | login = LaunchpadLogin() |
29 | 25 | lp = login.lp | 26 | lp = login.lp |
30 | 26 | pub_bug = lp.bugs[public_bugnum] | 27 | pub_bug = lp.bugs[public_bugnum] |
31 | @@ -29,7 +30,7 @@ def link_bugs(public_bugnum, privates, ihv): | |||
32 | 29 | 30 | ||
33 | 30 | # Add X-HWE-Bug: tag to description. | 31 | # Add X-HWE-Bug: tag to description. |
34 | 31 | for priv in privates: | 32 | for priv in privates: |
36 | 32 | assert(priv.isdigit()) | 33 | assert priv.isdigit() |
37 | 33 | bug = lp.bugs[priv] | 34 | bug = lp.bugs[priv] |
38 | 34 | 35 | ||
39 | 35 | if re.search(tag, bug.description) is None: | 36 | if re.search(tag, bug.description) is None: |
40 | @@ -41,30 +42,35 @@ def link_bugs(public_bugnum, privates, ihv): | |||
41 | 41 | 42 | ||
42 | 42 | if ihv == "hwe": | 43 | if ihv == "hwe": |
43 | 43 | hwe_next = lp.projects[HWE_PUBLIC_PROJECT] | 44 | hwe_next = lp.projects[HWE_PUBLIC_PROJECT] |
45 | 44 | sub_url = "%s~%s" % (lp._root_uri, 'canonical-hwe-team') | 45 | sub_url = "%s~%s" % (lp._root_uri, "canonical-hwe-team") |
46 | 45 | pub_bug.subscribe(person=sub_url) | 46 | pub_bug.subscribe(person=sub_url) |
48 | 46 | remote_bug_tag(pub_bug, 'hwe-needs-public-bug') | 47 | remote_bug_tag(pub_bug, "hwe-needs-public-bug") |
49 | 47 | elif ihv == "swe": | 48 | elif ihv == "swe": |
50 | 48 | hwe_next = lp.projects[OEM_PUBLIC_PROJECT] | 49 | hwe_next = lp.projects[OEM_PUBLIC_PROJECT] |
52 | 49 | sub_url = "%s~%s" % (lp._root_uri, 'oem-solutions-engineers') | 50 | sub_url = "%s~%s" % (lp._root_uri, "oem-solutions-engineers") |
53 | 50 | pub_bug.subscribe(person=sub_url) | 51 | pub_bug.subscribe(person=sub_url) |
55 | 51 | remote_bug_tag(pub_bug, 'swe-needs-public-bug') | 52 | remote_bug_tag(pub_bug, "swe-needs-public-bug") |
56 | 52 | else: | 53 | else: |
57 | 53 | if lp.projects[ihv]: | 54 | if lp.projects[ihv]: |
58 | 54 | hwe_next = lp.projects[ihv] | 55 | hwe_next = lp.projects[ihv] |
60 | 55 | remote_bug_tag(pub_bug, 'hwe-needs-public-bug') | 56 | remote_bug_tag(pub_bug, "hwe-needs-public-bug") |
61 | 56 | else: | 57 | else: |
63 | 57 | log.error('Project ' + ihv + ' not defined') | 58 | log.error("Project " + ihv + " not defined") |
64 | 58 | 59 | ||
68 | 59 | add_bug_tags(pub_bug, ['originate-from-' + str(bug.id), | 60 | add_bug_tags( |
69 | 60 | bug.bug_tasks_collection[0].bug_target_name, # OEM codename | 61 | pub_bug, |
70 | 61 | 'oem-priority']) | 62 | [ |
71 | 63 | "originate-from-" + str(bug.id), | ||
72 | 64 | bug.bug_tasks_collection[0].bug_target_name, # OEM codename | ||
73 | 65 | "oem-priority", | ||
74 | 66 | ], | ||
75 | 67 | ) | ||
76 | 62 | 68 | ||
77 | 63 | add_bug_task(pub_bug, hwe_next) | 69 | add_bug_task(pub_bug, hwe_next) |
78 | 64 | 70 | ||
79 | 65 | 71 | ||
80 | 66 | def link_priv_bugs(main_bugnum, privates, ihv): | 72 | def link_priv_bugs(main_bugnum, privates, ihv): |
82 | 67 | assert(main_bugnum.isdigit()) | 73 | assert main_bugnum.isdigit() |
83 | 68 | login = LaunchpadLogin() | 74 | login = LaunchpadLogin() |
84 | 69 | lp = login.lp | 75 | lp = login.lp |
85 | 70 | main_bug = lp.bugs[main_bugnum] | 76 | main_bug = lp.bugs[main_bugnum] |
86 | @@ -73,7 +79,7 @@ def link_priv_bugs(main_bugnum, privates, ihv): | |||
87 | 73 | 79 | ||
88 | 74 | # Add X-HWE-Bug: tag to description. | 80 | # Add X-HWE-Bug: tag to description. |
89 | 75 | for priv in privates: | 81 | for priv in privates: |
91 | 76 | assert(priv.isdigit()) | 82 | assert priv.isdigit() |
92 | 77 | bug = lp.bugs[priv] | 83 | bug = lp.bugs[priv] |
93 | 78 | 84 | ||
94 | 79 | if re.search(tag, bug.description) is None: | 85 | if re.search(tag, bug.description) is None: |
95 | @@ -83,20 +89,20 @@ def link_priv_bugs(main_bugnum, privates, ihv): | |||
96 | 83 | else: | 89 | else: |
97 | 84 | log.warning("Bug already linked to main bug " + tag) | 90 | log.warning("Bug already linked to main bug " + tag) |
98 | 85 | 91 | ||
100 | 86 | add_bug_tags(main_bug, ['originate-from-' + str(bug.id)]) | 92 | add_bug_tags(main_bug, ["originate-from-" + str(bug.id)]) |
101 | 87 | 93 | ||
102 | 88 | 94 | ||
103 | 89 | def add_bug_task(bug, bug_task): | 95 | def add_bug_task(bug, bug_task): |
105 | 90 | assert(type(bug_task) == lazr.restfulclient.resource.Entry) | 96 | assert type(bug_task) == lazr.restfulclient.resource.Entry |
106 | 91 | 97 | ||
107 | 92 | # Check if already have the requested | 98 | # Check if already have the requested |
108 | 93 | for i in bug.bug_tasks: | 99 | for i in bug.bug_tasks: |
109 | 94 | if bug_task.name == i.bug_target_name: | 100 | if bug_task.name == i.bug_target_name: |
111 | 95 | log.warning('Also-affects on {} already complete.'.format(bug_task)) | 101 | log.warning("Also-affects on {} already complete.".format(bug_task)) |
112 | 96 | return | 102 | return |
113 | 97 | bug.addTask(target=bug_task) | 103 | bug.addTask(target=bug_task) |
114 | 98 | bug.lp_save() | 104 | bug.lp_save() |
116 | 99 | log.info('Also-affects on {} successful.'.format(bug_task)) | 105 | log.info("Also-affects on {} successful.".format(bug_task)) |
117 | 100 | 106 | ||
118 | 101 | 107 | ||
119 | 102 | def remote_bug_tag(bug, tag): | 108 | def remote_bug_tag(bug, tag): |
120 | @@ -110,7 +116,7 @@ def remote_bug_tag(bug, tag): | |||
121 | 110 | 116 | ||
122 | 111 | def add_bug_tags(bug, tags): | 117 | def add_bug_tags(bug, tags): |
123 | 112 | """ add tags to the bug. """ | 118 | """ add tags to the bug. """ |
125 | 113 | log.info('Add tags {} to bug {}'.format(tags, bug.web_link)) | 119 | log.info("Add tags {} to bug {}".format(tags, bug.web_link)) |
126 | 114 | new_tags = [] | 120 | new_tags = [] |
127 | 115 | for tag_to_add in tags: | 121 | for tag_to_add in tags: |
128 | 116 | if tag_to_add not in bug.tags: | 122 | if tag_to_add not in bug.tags: |
129 | @@ -119,7 +125,7 @@ def add_bug_tags(bug, tags): | |||
130 | 119 | bug.lp_save() | 125 | bug.lp_save() |
131 | 120 | 126 | ||
132 | 121 | 127 | ||
134 | 122 | if __name__ == '__main__': | 128 | if __name__ == "__main__": |
135 | 123 | description = """bind private bugs with pubilc bug | 129 | description = """bind private bugs with pubilc bug |
136 | 124 | bud-bind -p bugnumber private_bugnumber1 private_bugnumber2""" | 130 | bud-bind -p bugnumber private_bugnumber1 private_bugnumber2""" |
137 | 125 | help = """The expected live cycle of an oem-priority bug is: | 131 | help = """The expected live cycle of an oem-priority bug is: |
138 | @@ -127,11 +133,26 @@ bud-bind -p bugnumber private_bugnumber1 private_bugnumber2""" | |||
139 | 127 | 2. SWE/HWE manually create a public bug. | 133 | 2. SWE/HWE manually create a public bug. |
140 | 128 | 3. Use bug-bind to bind public and private bug.""" | 134 | 3. Use bug-bind to bind public and private bug.""" |
141 | 129 | 135 | ||
147 | 130 | parser = argparse.ArgumentParser(description=description, epilog=help, formatter_class=argparse.RawDescriptionHelpFormatter) | 136 | parser = argparse.ArgumentParser( |
148 | 131 | parser.add_argument('-m', '--main', help='main bug for private bugs') | 137 | description=description, |
149 | 132 | parser.add_argument('-p', '--public', help='The public bug number') | 138 | epilog=help, |
150 | 133 | parser.add_argument('-i', '--ihv', help='Launchpad project name for IHV\nExpecting "swe", "hwe", "intel", "amd", "nvidia", "lsi", "emulex"', default='swe') | 139 | formatter_class=argparse.RawDescriptionHelpFormatter, |
151 | 134 | parser.add_argument('-v', '--vebose', help='shows debug messages', action='store_true', default=False) | 140 | ) |
152 | 141 | parser.add_argument("-m", "--main", help="main bug for private bugs") | ||
153 | 142 | parser.add_argument("-p", "--public", help="The public bug number") | ||
154 | 143 | parser.add_argument( | ||
155 | 144 | "-i", | ||
156 | 145 | "--ihv", | ||
157 | 146 | help='Launchpad project name for IHV\nExpecting "swe", "hwe", "intel", "amd", "nvidia", "lsi", "emulex"', | ||
158 | 147 | default="swe", | ||
159 | 148 | ) | ||
160 | 149 | parser.add_argument( | ||
161 | 150 | "-v", | ||
162 | 151 | "--vebose", | ||
163 | 152 | help="shows debug messages", | ||
164 | 153 | action="store_true", | ||
165 | 154 | default=False, | ||
166 | 155 | ) | ||
167 | 135 | # TODO | 156 | # TODO |
168 | 136 | # parser.add_argument('-c', '--clean', help='unlnk the bug between public and private', action='store_true', default=False) | 157 | # parser.add_argument('-c', '--clean', help='unlnk the bug between public and private', action='store_true', default=False) |
169 | 137 | 158 | ||
170 | @@ -139,7 +160,9 @@ bud-bind -p bugnumber private_bugnumber1 private_bugnumber2""" | |||
171 | 139 | if args.vebose: | 160 | if args.vebose: |
172 | 140 | log.setLevel(logging.DEBUG) | 161 | log.setLevel(logging.DEBUG) |
173 | 141 | if args.ihv not in ["swe", "hwe", "intel", "amd", "nvidia", "lsi", "emulex"]: | 162 | if args.ihv not in ["swe", "hwe", "intel", "amd", "nvidia", "lsi", "emulex"]: |
175 | 142 | raise Exception('Expecting "swe", "hwe", "intel", "amd", "nvidia", "lsi", "emulex" for ihv') | 163 | raise Exception( |
176 | 164 | 'Expecting "swe", "hwe", "intel", "amd", "nvidia", "lsi", "emulex" for ihv' | ||
177 | 165 | ) | ||
178 | 143 | if len(private_bugs) == 0: | 166 | if len(private_bugs) == 0: |
179 | 144 | parser.error("must provide private bug numbers.") | 167 | parser.error("must provide private bug numbers.") |
180 | 145 | if args.main: | 168 | if args.main: |
181 | diff --git a/copyPackage.py b/copyPackage.py | |||
182 | index 5358674..c6c0b5c 100755 | |||
183 | --- a/copyPackage.py | |||
184 | +++ b/copyPackage.py | |||
185 | @@ -1,7 +1,8 @@ | |||
186 | 1 | #!/usr/bin/python | 1 | #!/usr/bin/python |
187 | 2 | import sys | 2 | import sys |
188 | 3 | from launchpadlib.launchpad import Launchpad | 3 | from launchpadlib.launchpad import Launchpad |
190 | 4 | launchpad = Launchpad.login_with('test', "production") | 4 | |
191 | 5 | launchpad = Launchpad.login_with("test", "production") | ||
192 | 5 | 6 | ||
193 | 6 | 7 | ||
194 | 7 | def getValueWithDefault(prompt, default): | 8 | def getValueWithDefault(prompt, default): |
195 | @@ -10,8 +11,9 @@ def getValueWithDefault(prompt, default): | |||
196 | 10 | return result and result or default | 11 | return result and result or default |
197 | 11 | 12 | ||
198 | 12 | 13 | ||
201 | 13 | from_pocket = getValueWithDefault("From Pocket (Proposed|Updates|Release...)?", | 14 | from_pocket = getValueWithDefault( |
202 | 14 | "Proposed") | 15 | "From Pocket (Proposed|Updates|Release...)?", "Proposed" |
203 | 16 | ) | ||
204 | 15 | 17 | ||
205 | 16 | team = None | 18 | team = None |
206 | 17 | while not team: | 19 | while not team: |
207 | @@ -29,8 +31,7 @@ while not ppa: | |||
208 | 29 | except e: | 31 | except e: |
209 | 30 | print("Invalid ppa name") | 32 | print("Invalid ppa name") |
210 | 31 | 33 | ||
213 | 32 | to_pocket = getValueWithDefault("To Pocket (Proposed|Updates|Release...)?", | 34 | to_pocket = getValueWithDefault("To Pocket (Proposed|Updates|Release...)?", "Release") |
212 | 33 | "Release") | ||
214 | 34 | to_series = getValueWithDefault("To Series?", "precise") | 35 | to_series = getValueWithDefault("To Series?", "precise") |
215 | 35 | 36 | ||
216 | 36 | # Get link to ubuntu archive | 37 | # Get link to ubuntu archive |
217 | @@ -42,33 +43,37 @@ while True: | |||
218 | 42 | 43 | ||
219 | 43 | # View packages in ubuntu archive | 44 | # View packages in ubuntu archive |
220 | 44 | pkgs = archive.getPublishedSources( | 45 | pkgs = archive.getPublishedSources( |
222 | 45 | source_name=package_name, pocket=from_pocket, status="Published") | 46 | source_name=package_name, pocket=from_pocket, status="Published" |
223 | 47 | ) | ||
224 | 46 | 48 | ||
225 | 47 | while True: | 49 | while True: |
226 | 48 | print("\n----") | 50 | print("\n----") |
227 | 49 | names = [p.display_name for p in pkgs] | 51 | names = [p.display_name for p in pkgs] |
228 | 50 | for i, name in enumerate(names): | 52 | for i, name in enumerate(names): |
230 | 51 | print " %d: %s" % (i, name) | 53 | print(" %d: %s" % (i, name)) |
231 | 52 | print("----\n") | 54 | print("----\n") |
234 | 53 | i = raw_input("Enter pkg to transfer (0..%d/[Q]uit/[a]nother)> " | 55 | i = raw_input( |
235 | 54 | % (len(names) - 1)) | 56 | "Enter pkg to transfer (0..%d/[Q]uit/[a]nother)> " % (len(names) - 1) |
236 | 57 | ) | ||
237 | 55 | try: | 58 | try: |
238 | 56 | pkg = pkgs[int(i)] | 59 | pkg = pkgs[int(i)] |
239 | 57 | 60 | ||
240 | 58 | print("Ready to copy package %s" % pkg.display_name) | 61 | print("Ready to copy package %s" % pkg.display_name) |
242 | 59 | if raw_input("Confirm: [Y/n]").lower()[:1] != 'n': | 62 | if raw_input("Confirm: [Y/n]").lower()[:1] != "n": |
243 | 60 | pass | 63 | pass |
250 | 61 | ppa.syncSource(from_archive=archive, | 64 | ppa.syncSource( |
251 | 62 | include_binaries=True, | 65 | from_archive=archive, |
252 | 63 | source_name=pkg.display_name.split()[0], | 66 | include_binaries=True, |
253 | 64 | to_pocket=to_pocket, | 67 | source_name=pkg.display_name.split()[0], |
254 | 65 | to_series=to_series, | 68 | to_pocket=to_pocket, |
255 | 66 | version=pkg.source_package_version) | 69 | to_series=to_series, |
256 | 70 | version=pkg.source_package_version, | ||
257 | 71 | ) | ||
258 | 67 | 72 | ||
259 | 68 | except (ValueError, IndexError): | 73 | except (ValueError, IndexError): |
261 | 69 | if i.lower()[:1] == 'q': | 74 | if i.lower()[:1] == "q": |
262 | 70 | print("Quitting") | 75 | print("Quitting") |
263 | 71 | sys.exit(0) | 76 | sys.exit(0) |
265 | 72 | if i.lower()[:1] == 'a': | 77 | if i.lower()[:1] == "a": |
266 | 73 | break | 78 | break |
267 | 74 | print("invalid input\n") | 79 | print("invalid input\n") |
268 | diff --git a/debian/changelog b/debian/changelog | |||
269 | index da72513..02a4499 100644 | |||
270 | --- a/debian/changelog | |||
271 | +++ b/debian/changelog | |||
272 | @@ -1,3 +1,22 @@ | |||
273 | 1 | oem-scripts (0.98) UNRELEASED; urgency=medium | ||
274 | 2 | |||
275 | 3 | * Black all Python files. | ||
276 | 4 | * debian/control, | ||
277 | 5 | debian/rules: Use black to check all Python files. | ||
278 | 6 | * tests/test_flake8: Ignore W503 because it is incompatible with PEP 8. | ||
279 | 7 | * tests/test_flake8: Ignore E203 because flake8 doesn't deal with it well | ||
280 | 8 | and black will cover it. | ||
281 | 9 | * debian/rules, | ||
282 | 10 | tests/test_black: Move the black check script out of debian/rules so | ||
283 | 11 | people can use it to test Python files directly. | ||
284 | 12 | * debian/control, | ||
285 | 13 | debian/rules, | ||
286 | 14 | tests/test_flake8, | ||
287 | 15 | tests/test_pep8: Remove the pep8 check because flake8 will use | ||
288 | 16 | pycodestyle (formerly called pep8) to check. | ||
289 | 17 | |||
290 | 18 | -- Shih-Yuan Lee (FourDollars) <sylee@canonical.com> Wed, 09 Jun 2021 20:55:21 +0800 | ||
291 | 19 | |||
292 | 1 | oem-scripts (0.97) focal; urgency=medium | 20 | oem-scripts (0.97) focal; urgency=medium |
293 | 2 | 21 | ||
294 | 3 | * pkg-oem-meta: Add linux-oem-20.04c parameter support | 22 | * pkg-oem-meta: Add linux-oem-20.04c parameter support |
295 | diff --git a/debian/control b/debian/control | |||
296 | index f9e9d76..b805787 100644 | |||
297 | --- a/debian/control | |||
298 | +++ b/debian/control | |||
299 | @@ -2,10 +2,10 @@ Source: oem-scripts | |||
300 | 2 | Section: admin | 2 | Section: admin |
301 | 3 | Priority: optional | 3 | Priority: optional |
302 | 4 | Build-Depends: | 4 | Build-Depends: |
303 | 5 | black, | ||
304 | 5 | debhelper (>=11), | 6 | debhelper (>=11), |
305 | 6 | dh-python, | 7 | dh-python, |
306 | 7 | flake8, | 8 | flake8, |
307 | 8 | pep8, | ||
308 | 9 | python3-all, | 9 | python3-all, |
309 | 10 | python3-debian, | 10 | python3-debian, |
310 | 11 | python3-launchpadlib, | 11 | python3-launchpadlib, |
311 | diff --git a/debian/rules b/debian/rules | |||
312 | index a739670..98284ec 100755 | |||
313 | --- a/debian/rules | |||
314 | +++ b/debian/rules | |||
315 | @@ -5,6 +5,6 @@ | |||
316 | 5 | dh $@ --with python3 --buildsystem=pybuild | 5 | dh $@ --with python3 --buildsystem=pybuild |
317 | 6 | 6 | ||
318 | 7 | override_dh_auto_test: | 7 | override_dh_auto_test: |
319 | 8 | ./tests/test_black | ||
320 | 8 | ./tests/test_flake8 | 9 | ./tests/test_flake8 |
321 | 9 | ./tests/test_pep8 | ||
322 | 10 | ./tests/test_shellcheck | 10 | ./tests/test_shellcheck |
323 | diff --git a/get-oem-auth-token b/get-oem-auth-token | |||
324 | index d3e8428..2e8c679 100755 | |||
325 | --- a/get-oem-auth-token | |||
326 | +++ b/get-oem-auth-token | |||
327 | @@ -23,7 +23,7 @@ def prompt_for_credentials(): | |||
328 | 23 | """ | 23 | """ |
329 | 24 | Return username and password collected from stdin. | 24 | Return username and password collected from stdin. |
330 | 25 | """ | 25 | """ |
332 | 26 | print("\rEmail: ", file=sys.stderr, end='') | 26 | print("\rEmail: ", file=sys.stderr, end="") |
333 | 27 | username = input() | 27 | username = input() |
334 | 28 | password = getpass.getpass(stream=sys.stderr) | 28 | password = getpass.getpass(stream=sys.stderr) |
335 | 29 | return username, password | 29 | return username, password |
336 | @@ -41,7 +41,7 @@ def prompt_for_code(): | |||
337 | 41 | """ | 41 | """ |
338 | 42 | Return code collected from stdin. | 42 | Return code collected from stdin. |
339 | 43 | """ | 43 | """ |
341 | 44 | print("\r2FA Code: ", file=sys.stderr, end='') | 44 | print("\r2FA Code: ", file=sys.stderr, end="") |
342 | 45 | return input() | 45 | return input() |
343 | 46 | 46 | ||
344 | 47 | 47 | ||
345 | @@ -52,14 +52,16 @@ def get_session_cookie(browser=get_browser(), args=None): | |||
346 | 52 | """ | 52 | """ |
347 | 53 | 53 | ||
348 | 54 | prop = { | 54 | prop = { |
350 | 55 | 'oem-ibs': { | 55 | "oem-ibs": { |
351 | 56 | "name": "oem-ibs.canonical.com", | 56 | "name": "oem-ibs.canonical.com", |
352 | 57 | "url": "https://oem-ibs.canonical.com/builds/", | 57 | "url": "https://oem-ibs.canonical.com/builds/", |
355 | 58 | "session": "sessionid"}, | 58 | "session": "sessionid", |
356 | 59 | 'oem-share': { | 59 | }, |
357 | 60 | "oem-share": { | ||
358 | 60 | "name": "oem-share.canonical.com", | 61 | "name": "oem-share.canonical.com", |
359 | 61 | "url": "https://oem-share.canonical.com/oem/cesg-builds/", | 62 | "url": "https://oem-share.canonical.com/oem/cesg-builds/", |
361 | 62 | "session": "pysid"} | 63 | "session": "pysid", |
362 | 64 | }, | ||
363 | 63 | } | 65 | } |
364 | 64 | 66 | ||
365 | 65 | bad_creds = False | 67 | bad_creds = False |
366 | @@ -75,8 +77,7 @@ def get_session_cookie(browser=get_browser(), args=None): | |||
367 | 75 | return | 77 | return |
368 | 76 | server = prop[args.server] | 78 | server = prop[args.server] |
369 | 77 | 79 | ||
372 | 78 | print("Authenticating %s with Ubuntu SSO..." % args.server, | 80 | print("Authenticating %s with Ubuntu SSO..." % args.server, file=sys.stderr) |
371 | 79 | file=sys.stderr) | ||
373 | 80 | browser.open(server["url"]) | 81 | browser.open(server["url"]) |
374 | 81 | # apachd2-openid | 82 | # apachd2-openid |
375 | 82 | try: | 83 | try: |
376 | @@ -144,15 +145,16 @@ def get_session_cookie(browser=get_browser(), args=None): | |||
377 | 144 | 145 | ||
378 | 145 | def main(): | 146 | def main(): |
379 | 146 | parser = argparse.ArgumentParser( | 147 | parser = argparse.ArgumentParser( |
384 | 147 | description='Retrieve Canonical internal websites session ID', | 148 | description="Retrieve Canonical internal websites session ID", |
385 | 148 | formatter_class=argparse.ArgumentDefaultsHelpFormatter) | 149 | formatter_class=argparse.ArgumentDefaultsHelpFormatter, |
386 | 149 | parser.add_argument("server", | 150 | ) |
387 | 150 | help="Specify server identifier [oem-ibs|oem-share]") | 151 | parser.add_argument("server", help="Specify server identifier [oem-ibs|oem-share]") |
388 | 151 | parser.add_argument("-u", "--username", help="Specify user's email") | 152 | parser.add_argument("-u", "--username", help="Specify user's email") |
389 | 152 | parser.add_argument("-p", "--password", help="Specify password") | 153 | parser.add_argument("-p", "--password", help="Specify password") |
390 | 153 | parser.add_argument("-c", "--code", help="Specify 2-factor code") | 154 | parser.add_argument("-c", "--code", help="Specify 2-factor code") |
393 | 154 | parser.add_argument("-r", "--retry", help="Sepcify authentication retry", | 155 | parser.add_argument( |
394 | 155 | type=int, default=3) | 156 | "-r", "--retry", help="Sepcify authentication retry", type=int, default=3 |
395 | 157 | ) | ||
396 | 156 | args = parser.parse_args() | 158 | args = parser.parse_args() |
397 | 157 | result = 1 | 159 | result = 1 |
398 | 158 | try: | 160 | try: |
399 | diff --git a/get-oemshare-auth-token b/get-oemshare-auth-token | |||
400 | index 7e1379b..182649f 100755 | |||
401 | --- a/get-oemshare-auth-token | |||
402 | +++ b/get-oemshare-auth-token | |||
403 | @@ -18,7 +18,7 @@ def prompt_for_credentials(): | |||
404 | 18 | """ | 18 | """ |
405 | 19 | Return username and password collected from stdin. | 19 | Return username and password collected from stdin. |
406 | 20 | """ | 20 | """ |
408 | 21 | print("\rEmail: ", file=sys.stderr, end='') | 21 | print("\rEmail: ", file=sys.stderr, end="") |
409 | 22 | username = input() | 22 | username = input() |
410 | 23 | password = getpass.getpass(stream=sys.stderr) | 23 | password = getpass.getpass(stream=sys.stderr) |
411 | 24 | return username, password | 24 | return username, password |
412 | @@ -28,7 +28,7 @@ def prompt_for_code(): | |||
413 | 28 | """ | 28 | """ |
414 | 29 | Return code collected from stdin. | 29 | Return code collected from stdin. |
415 | 30 | """ | 30 | """ |
417 | 31 | print("\r2FA Code: ", file=sys.stderr, end='') | 31 | print("\r2FA Code: ", file=sys.stderr, end="") |
418 | 32 | return input() | 32 | return input() |
419 | 33 | 33 | ||
420 | 34 | 34 | ||
421 | diff --git a/lp-bug b/lp-bug | |||
422 | index 6bfb087..e76abb2 100755 | |||
423 | --- a/lp-bug | |||
424 | +++ b/lp-bug | |||
425 | @@ -30,77 +30,94 @@ from logging import debug, warning, info, critical | |||
426 | 30 | from oem_scripts.LaunchpadLogin import LaunchpadLogin | 30 | from oem_scripts.LaunchpadLogin import LaunchpadLogin |
427 | 31 | from tempfile import TemporaryDirectory | 31 | from tempfile import TemporaryDirectory |
428 | 32 | 32 | ||
431 | 33 | parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter, | 33 | parser = argparse.ArgumentParser( |
432 | 34 | epilog=""" | 34 | formatter_class=argparse.RawDescriptionHelpFormatter, |
433 | 35 | epilog=""" | ||
434 | 35 | examples: | 36 | examples: |
435 | 36 | lp-bug copy --output=target_bug_id SOURCE_BUG_ID | 37 | lp-bug copy --output=target_bug_id SOURCE_BUG_ID |
436 | 37 | lp-bug cleanup BUG_ID | 38 | lp-bug cleanup BUG_ID |
438 | 38 | lp-bug cqa-verify [BUG_ID]""") | 39 | lp-bug cqa-verify [BUG_ID]""", |
439 | 40 | ) | ||
440 | 39 | 41 | ||
445 | 40 | parser.add_argument("-d", "--debug", | 42 | parser.add_argument("-d", "--debug", help="print debug messages", action="store_true") |
446 | 41 | help="print debug messages", action="store_true") | 43 | parser.add_argument( |
447 | 42 | parser.add_argument("-q", "--quiet", | 44 | "-q", "--quiet", help="Don't print info messages", action="store_true" |
448 | 43 | help="Don't print info messages", action="store_true") | 45 | ) |
449 | 44 | 46 | ||
450 | 45 | subparsers = parser.add_subparsers(dest="subcommand") | 47 | subparsers = parser.add_subparsers(dest="subcommand") |
451 | 46 | 48 | ||
479 | 47 | copy = subparsers.add_parser('copy', help='[-h] [-o=targetBugID|--output=targetBugID] sourceBugID') | 49 | copy = subparsers.add_parser( |
480 | 48 | copy.add_argument("-o", "--output", | 50 | "copy", help="[-h] [-o=targetBugID|--output=targetBugID] sourceBugID" |
481 | 49 | help="Specify a file name to write the bug number.", | 51 | ) |
482 | 50 | type=argparse.FileType('w', encoding='UTF-8')) | 52 | copy.add_argument( |
483 | 51 | copy.add_argument("-t", "--target", | 53 | "-o", |
484 | 52 | help="Specify the target project, 'oem-priority' by default.", | 54 | "--output", |
485 | 53 | type=str) | 55 | help="Specify a file name to write the bug number.", |
486 | 54 | copy.add_argument("bugID", | 56 | type=argparse.FileType("w", encoding="UTF-8"), |
487 | 55 | help="Specify the bug number on Launchpad to copy from.", type=int) | 57 | ) |
488 | 56 | copy.add_argument("--public", | 58 | copy.add_argument( |
489 | 57 | help="Make the bug public.", action="store_true") | 59 | "-t", |
490 | 58 | 60 | "--target", | |
491 | 59 | cleanup = subparsers.add_parser('cleanup', help='[-h] [--yes] bugID') | 61 | help="Specify the target project, 'oem-priority' by default.", |
492 | 60 | cleanup.add_argument("bugID", | 62 | type=str, |
493 | 61 | help="Specify the bug number on Launchpad to clean up.", type=int) | 63 | ) |
494 | 62 | cleanup.add_argument("--yes", | 64 | copy.add_argument( |
495 | 63 | help="Say yes for all prompts.", action="store_true") | 65 | "bugID", help="Specify the bug number on Launchpad to copy from.", type=int |
496 | 64 | 66 | ) | |
497 | 65 | cqa_verify = subparsers.add_parser('cqa-verify', help='[-h] [--yes] [--dry-run] [bugID]', | 67 | copy.add_argument("--public", help="Make the bug public.", action="store_true") |
498 | 66 | formatter_class=argparse.RawDescriptionHelpFormatter, | 68 | |
499 | 67 | epilog=""" | 69 | cleanup = subparsers.add_parser("cleanup", help="[-h] [--yes] bugID") |
500 | 68 | The 'cqa-verify' subcommand will check the versions in the production archive automatically.""") | 70 | cleanup.add_argument( |
501 | 69 | cqa_verify.add_argument("--yes", | 71 | "bugID", help="Specify the bug number on Launchpad to clean up.", type=int |
502 | 70 | help="Say yes for all prompts.", action="store_true") | 72 | ) |
503 | 71 | cqa_verify.add_argument("--dry-run", | 73 | cleanup.add_argument("--yes", help="Say yes for all prompts.", action="store_true") |
504 | 72 | help="Dry run the process.", action="store_true") | 74 | |
505 | 73 | cqa_verify.add_argument('bugID', nargs='?', type=int) | 75 | cqa_verify = subparsers.add_parser( |
506 | 76 | "cqa-verify", | ||
507 | 77 | help="[-h] [--yes] [--dry-run] [bugID]", | ||
508 | 78 | formatter_class=argparse.RawDescriptionHelpFormatter, | ||
509 | 79 | epilog=""" | ||
510 | 80 | The 'cqa-verify' subcommand will check the versions in the production archive automatically.""", | ||
511 | 81 | ) | ||
512 | 82 | cqa_verify.add_argument("--yes", help="Say yes for all prompts.", action="store_true") | ||
513 | 83 | cqa_verify.add_argument("--dry-run", help="Dry run the process.", action="store_true") | ||
514 | 84 | cqa_verify.add_argument("bugID", nargs="?", type=int) | ||
515 | 74 | 85 | ||
516 | 75 | args = parser.parse_args() | 86 | args = parser.parse_args() |
517 | 76 | 87 | ||
533 | 77 | logging.addLevelName(logging.DEBUG, | 88 | logging.addLevelName( |
534 | 78 | "\033[1;96m%s\033[1;0m" % | 89 | logging.DEBUG, "\033[1;96m%s\033[1;0m" % logging.getLevelName(logging.DEBUG) |
535 | 79 | logging.getLevelName(logging.DEBUG)) | 90 | ) |
536 | 80 | logging.addLevelName(logging.INFO, | 91 | logging.addLevelName( |
537 | 81 | "\033[1;32m%s\033[1;0m" % | 92 | logging.INFO, "\033[1;32m%s\033[1;0m" % logging.getLevelName(logging.INFO) |
538 | 82 | logging.getLevelName(logging.INFO)) | 93 | ) |
539 | 83 | logging.addLevelName(logging.WARNING, | 94 | logging.addLevelName( |
540 | 84 | "\033[1;33m%s\033[1;0m" % | 95 | logging.WARNING, "\033[1;33m%s\033[1;0m" % logging.getLevelName(logging.WARNING) |
541 | 85 | logging.getLevelName(logging.WARNING)) | 96 | ) |
542 | 86 | logging.addLevelName(logging.ERROR, | 97 | logging.addLevelName( |
543 | 87 | "\033[1;31m%s\033[1;0m" % | 98 | logging.ERROR, "\033[1;31m%s\033[1;0m" % logging.getLevelName(logging.ERROR) |
544 | 88 | logging.getLevelName(logging.ERROR)) | 99 | ) |
545 | 89 | logging.addLevelName(logging.CRITICAL, | 100 | logging.addLevelName( |
546 | 90 | "\033[1;41m%s\033[1;0m" % | 101 | logging.CRITICAL, "\033[1;41m%s\033[1;0m" % logging.getLevelName(logging.CRITICAL) |
547 | 91 | logging.getLevelName(logging.CRITICAL)) | 102 | ) |
548 | 92 | 103 | ||
549 | 93 | if args.debug: | 104 | if args.debug: |
553 | 94 | logging.basicConfig(format='<%(levelname)s> %(message)s', | 105 | logging.basicConfig( |
554 | 95 | level=logging.DEBUG, | 106 | format="<%(levelname)s> %(message)s", |
555 | 96 | handlers=[logging.StreamHandler(sys.stdout)]) | 107 | level=logging.DEBUG, |
556 | 108 | handlers=[logging.StreamHandler(sys.stdout)], | ||
557 | 109 | ) | ||
558 | 97 | elif not args.quiet: | 110 | elif not args.quiet: |
562 | 98 | logging.basicConfig(format='<%(levelname)s> %(message)s', | 111 | logging.basicConfig( |
563 | 99 | level=logging.INFO, | 112 | format="<%(levelname)s> %(message)s", |
564 | 100 | handlers=[logging.StreamHandler(sys.stdout)]) | 113 | level=logging.INFO, |
565 | 114 | handlers=[logging.StreamHandler(sys.stdout)], | ||
566 | 115 | ) | ||
567 | 101 | else: | 116 | else: |
570 | 102 | logging.basicConfig(format='<%(levelname)s> %(message)s', | 117 | logging.basicConfig( |
571 | 103 | handlers=[logging.StreamHandler(sys.stdout)]) | 118 | format="<%(levelname)s> %(message)s", |
572 | 119 | handlers=[logging.StreamHandler(sys.stdout)], | ||
573 | 120 | ) | ||
574 | 104 | 121 | ||
575 | 105 | 122 | ||
576 | 106 | def _yes_or_ask(yes: bool, message: str) -> bool: | 123 | def _yes_or_ask(yes: bool, message: str) -> bool: |
577 | @@ -111,7 +128,7 @@ def _yes_or_ask(yes: bool, message: str) -> bool: | |||
578 | 111 | res = input(f"> \033[1;34m{message}\033[1;0m (y/n) ").lower() | 128 | res = input(f"> \033[1;34m{message}\033[1;0m (y/n) ").lower() |
579 | 112 | if res not in {"y", "n"}: | 129 | if res not in {"y", "n"}: |
580 | 113 | continue | 130 | continue |
582 | 114 | if res == 'y': | 131 | if res == "y": |
583 | 115 | return True | 132 | return True |
584 | 116 | else: | 133 | else: |
585 | 117 | return False | 134 | return False |
586 | @@ -125,7 +142,7 @@ def copy_bug(lp, bug_number: int, target: str, public: bool, output=None) -> Non | |||
587 | 125 | project = lp.projects["oem-priority"] | 142 | project = lp.projects["oem-priority"] |
588 | 126 | 143 | ||
589 | 127 | if public: | 144 | if public: |
591 | 128 | information_type = 'Public' | 145 | information_type = "Public" |
592 | 129 | else: | 146 | else: |
593 | 130 | information_type = bug.information_type | 147 | information_type = bug.information_type |
594 | 131 | 148 | ||
595 | @@ -134,7 +151,8 @@ def copy_bug(lp, bug_number: int, target: str, public: bool, output=None) -> Non | |||
596 | 134 | target=project, | 151 | target=project, |
597 | 135 | title=bug.title, | 152 | title=bug.title, |
598 | 136 | information_type=information_type, | 153 | information_type=information_type, |
600 | 137 | tags=bug.tags) | 154 | tags=bug.tags, |
601 | 155 | ) | ||
602 | 138 | info(f'LP: #{new_bug.id} - "{new_bug.title}" is created. {new_bug.web_link}') | 156 | info(f'LP: #{new_bug.id} - "{new_bug.title}" is created. {new_bug.web_link}') |
603 | 139 | if output: | 157 | if output: |
604 | 140 | output.write(f"{new_bug.id}\n") | 158 | output.write(f"{new_bug.id}\n") |
605 | @@ -142,7 +160,10 @@ def copy_bug(lp, bug_number: int, target: str, public: bool, output=None) -> Non | |||
606 | 142 | 160 | ||
607 | 143 | def cleanup_bug(lp, bug_number: int, yes: bool) -> None: | 161 | def cleanup_bug(lp, bug_number: int, yes: bool) -> None: |
608 | 144 | bug = lp.bugs[bug_number] | 162 | bug = lp.bugs[bug_number] |
610 | 145 | if not _yes_or_ask(yes, f'Do you want to cleanup all information on LP: #{bug_number} - "{bug.title}"? {bug.web_link}'): | 163 | if not _yes_or_ask( |
611 | 164 | yes, | ||
612 | 165 | f'Do you want to cleanup all information on LP: #{bug_number} - "{bug.title}"? {bug.web_link}', | ||
613 | 166 | ): | ||
614 | 146 | return | 167 | return |
615 | 147 | 168 | ||
616 | 148 | if bug.title != "null": | 169 | if bug.title != "null": |
617 | @@ -159,14 +180,17 @@ def cleanup_bug(lp, bug_number: int, yes: bool) -> None: | |||
618 | 159 | found = False | 180 | found = False |
619 | 160 | 181 | ||
620 | 161 | for bug_task in bug.bug_tasks: | 182 | for bug_task in bug.bug_tasks: |
622 | 162 | if bug_task.bug_target_name == 'null-and-void': | 183 | if bug_task.bug_target_name == "null-and-void": |
623 | 163 | found = True | 184 | found = True |
624 | 164 | 185 | ||
626 | 165 | if not found and bug.information_type == 'Public': | 186 | if not found and bug.information_type == "Public": |
627 | 166 | bug.addTask(target=lp.projects["null-and-void"]) | 187 | bug.addTask(target=lp.projects["null-and-void"]) |
628 | 167 | 188 | ||
629 | 168 | for bug_task in bug.bug_tasks: | 189 | for bug_task in bug.bug_tasks: |
631 | 169 | if bug_task.bug_target_name != 'null-and-void' and bug.information_type == 'Public': | 190 | if ( |
632 | 191 | bug_task.bug_target_name != "null-and-void" | ||
633 | 192 | and bug.information_type == "Public" | ||
634 | 193 | ): | ||
635 | 170 | try: | 194 | try: |
636 | 171 | bug_task.lp_delete() | 195 | bug_task.lp_delete() |
637 | 172 | except lazr.restfulclient.errors.BadRequest as e: | 196 | except lazr.restfulclient.errors.BadRequest as e: |
638 | @@ -183,21 +207,27 @@ def cleanup_bug(lp, bug_number: int, yes: bool) -> None: | |||
639 | 183 | if subscription.canBeUnsubscribedByUser(): | 207 | if subscription.canBeUnsubscribedByUser(): |
640 | 184 | bug.unsubscribe(person=lp.people[subscription.person.name]) | 208 | bug.unsubscribe(person=lp.people[subscription.person.name]) |
641 | 185 | else: | 209 | else: |
643 | 186 | warning(f"{lp.me.name} doesn't have the permission to unsubscribe {subscription.person.name}.") | 210 | warning( |
644 | 211 | f"{lp.me.name} doesn't have the permission to unsubscribe {subscription.person.name}." | ||
645 | 212 | ) | ||
646 | 187 | 213 | ||
648 | 188 | info(f'LP: #{bug.id} has been cleaned. {bug.web_link}') | 214 | info(f"LP: #{bug.id} has been cleaned. {bug.web_link}") |
649 | 189 | 215 | ||
650 | 190 | 216 | ||
652 | 191 | def _run_command(command: list or tuple, returncode=(0,), env=None, silent=False) -> (str, str, int): | 217 | def _run_command( |
653 | 218 | command: list or tuple, returncode=(0,), env=None, silent=False | ||
654 | 219 | ) -> (str, str, int): | ||
655 | 192 | if not silent: | 220 | if not silent: |
656 | 193 | debug("$ " + " ".join(command)) | 221 | debug("$ " + " ".join(command)) |
658 | 194 | proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env) | 222 | proc = subprocess.Popen( |
659 | 223 | command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env | ||
660 | 224 | ) | ||
661 | 195 | out, err = proc.communicate() | 225 | out, err = proc.communicate() |
662 | 196 | 226 | ||
663 | 197 | if out: | 227 | if out: |
665 | 198 | out = out.decode('utf-8').strip() | 228 | out = out.decode("utf-8").strip() |
666 | 199 | if err: | 229 | if err: |
668 | 200 | err = err.decode('utf-8').strip() | 230 | err = err.decode("utf-8").strip() |
669 | 201 | 231 | ||
670 | 202 | if proc.returncode not in returncode: | 232 | if proc.returncode not in returncode: |
671 | 203 | critical(f"return {proc.returncode}") | 233 | critical(f"return {proc.returncode}") |
672 | @@ -216,44 +246,46 @@ def _run_command(command: list or tuple, returncode=(0,), env=None, silent=False | |||
673 | 216 | return (out, err, proc.returncode) | 246 | return (out, err, proc.returncode) |
674 | 217 | 247 | ||
675 | 218 | 248 | ||
677 | 219 | pattern = re.compile(r'(.*) \(==(.*)\)') | 249 | pattern = re.compile(r"(.*) \(==(.*)\)") |
678 | 220 | 250 | ||
679 | 221 | 251 | ||
680 | 222 | def cloudberry_cqa_verified(lp, yes: bool, bugID: int) -> None: | 252 | def cloudberry_cqa_verified(lp, yes: bool, bugID: int) -> None: |
682 | 223 | cloudberry = lp.projects['cloudberry'] | 253 | cloudberry = lp.projects["cloudberry"] |
683 | 224 | # Only deal with those bugs with 'Fix Committed' and 'request of publish_package' in the title. | 254 | # Only deal with those bugs with 'Fix Committed' and 'request of publish_package' in the title. |
685 | 225 | tasks = cloudberry.searchTasks(status=['Fix Committed'], search_text='request of publish_package') | 255 | tasks = cloudberry.searchTasks( |
686 | 256 | status=["Fix Committed"], search_text="request of publish_package" | ||
687 | 257 | ) | ||
688 | 226 | for task in tasks: | 258 | for task in tasks: |
689 | 227 | bug = task.bug | 259 | bug = task.bug |
690 | 228 | # Only deal with one bug id when it is provided. | 260 | # Only deal with one bug id when it is provided. |
691 | 229 | if bugID and bug.id != bugID: | 261 | if bugID and bug.id != bugID: |
692 | 230 | continue | 262 | continue |
693 | 231 | # Only deal with those bugs with this tag. | 263 | # Only deal with those bugs with this tag. |
695 | 232 | if 'cqa-verified-staging' not in bug.tags: | 264 | if "cqa-verified-staging" not in bug.tags: |
696 | 233 | continue | 265 | continue |
697 | 234 | info(f'LP: #{bug.id} "{bug.title}"\n{bug.description}') | 266 | info(f'LP: #{bug.id} "{bug.title}"\n{bug.description}') |
698 | 235 | debug(bug.tags) | 267 | debug(bug.tags) |
699 | 236 | multiple = False | 268 | multiple = False |
700 | 237 | packages = [] | 269 | packages = [] |
701 | 238 | prod_archive_line = "" | 270 | prod_archive_line = "" |
703 | 239 | lines = bug.description.split('\n') | 271 | lines = bug.description.split("\n") |
704 | 240 | # Parse the package list and the production archive in the bug description. | 272 | # Parse the package list and the production archive in the bug description. |
705 | 241 | for idx, line in enumerate(lines): | 273 | for idx, line in enumerate(lines): |
707 | 242 | if line.startswith('Package: '): | 274 | if line.startswith("Package: "): |
708 | 243 | debug(line) | 275 | debug(line) |
710 | 244 | if line.endswith(','): | 276 | if line.endswith(","): |
711 | 245 | multiple = True | 277 | multiple = True |
712 | 246 | packages.append(line[9:-1]) | 278 | packages.append(line[9:-1]) |
713 | 247 | else: | 279 | else: |
715 | 248 | packages = line[9:].split(',') | 280 | packages = line[9:].split(",") |
716 | 249 | elif multiple is True: | 281 | elif multiple is True: |
717 | 250 | debug(line) | 282 | debug(line) |
719 | 251 | if not line.endswith(','): | 283 | if not line.endswith(","): |
720 | 252 | multiple = False | 284 | multiple = False |
721 | 253 | packages.append(line.strip()) | 285 | packages.append(line.strip()) |
722 | 254 | else: | 286 | else: |
723 | 255 | packages.append(line.strip()[:-1]) | 287 | packages.append(line.strip()[:-1]) |
725 | 256 | elif 'production archive' in line: | 288 | elif "production archive" in line: |
726 | 257 | prod_archive_line = lines[idx + 2] | 289 | prod_archive_line = lines[idx + 2] |
727 | 258 | # Skip the bug when it found no production archive. | 290 | # Skip the bug when it found no production archive. |
728 | 259 | if not prod_archive_line: | 291 | if not prod_archive_line: |
729 | @@ -272,9 +304,9 @@ def cloudberry_cqa_verified(lp, yes: bool, bugID: int) -> None: | |||
730 | 272 | # Check if the production archive provided the packages and versions. | 304 | # Check if the production archive provided the packages and versions. |
731 | 273 | with TemporaryDirectory() as tmpdir: | 305 | with TemporaryDirectory() as tmpdir: |
732 | 274 | failed = False | 306 | failed = False |
736 | 275 | fingerprint = 'F9FDA6BED73CDC22' | 307 | fingerprint = "F9FDA6BED73CDC22" |
737 | 276 | series = ['focal', 'bionic', 'xenial'] | 308 | series = ["focal", "bionic", "xenial"] |
738 | 277 | codename = '' | 309 | codename = "" |
739 | 278 | for item in series: | 310 | for item in series: |
740 | 279 | if item in prod_archive_line: | 311 | if item in prod_archive_line: |
741 | 280 | codename = item | 312 | codename = item |
742 | @@ -284,23 +316,32 @@ def cloudberry_cqa_verified(lp, yes: bool, bugID: int) -> None: | |||
743 | 284 | continue | 316 | continue |
744 | 285 | # Setup the temporary apt dir to include the production archive. | 317 | # Setup the temporary apt dir to include the production archive. |
745 | 286 | output, _, returncode = _run_command( | 318 | output, _, returncode = _run_command( |
754 | 287 | ['setup-apt-dir.sh', | 319 | [ |
755 | 288 | '-c', codename, | 320 | "setup-apt-dir.sh", |
756 | 289 | '--disable-updates', | 321 | "-c", |
757 | 290 | '--disable-backports', | 322 | codename, |
758 | 291 | '--apt-dir', tmpdir, | 323 | "--disable-updates", |
759 | 292 | '--extra-key', fingerprint, | 324 | "--disable-backports", |
760 | 293 | '--extra-repo', prod_archive_line.replace("deb ", f"deb [signed-by={tmpdir}/{fingerprint}.pub] ") | 325 | "--apt-dir", |
761 | 294 | ], returncode=(0, 100)) | 326 | tmpdir, |
762 | 327 | "--extra-key", | ||
763 | 328 | fingerprint, | ||
764 | 329 | "--extra-repo", | ||
765 | 330 | prod_archive_line.replace( | ||
766 | 331 | "deb ", f"deb [signed-by={tmpdir}/{fingerprint}.pub] " | ||
767 | 332 | ), | ||
768 | 333 | ], | ||
769 | 334 | returncode=(0, 100), | ||
770 | 335 | ) | ||
771 | 295 | # Skip the bug when it found some error in the production archive. | 336 | # Skip the bug when it found some error in the production archive. |
772 | 296 | if returncode == 100: | 337 | if returncode == 100: |
773 | 297 | warning(output) | 338 | warning(output) |
774 | 298 | continue | 339 | continue |
775 | 299 | # Use the temporary apt dir to compare the package versions. | 340 | # Use the temporary apt dir to compare the package versions. |
776 | 300 | for pkg, ver in packages: | 341 | for pkg, ver in packages: |
780 | 301 | output, _, _ = _run_command(['pkg-list', '--apt-dir', tmpdir, pkg]) | 342 | output, _, _ = _run_command(["pkg-list", "--apt-dir", tmpdir, pkg]) |
781 | 302 | for line in output.split('\n'): | 343 | for line in output.split("\n"): |
782 | 303 | archive_pkg, archive_ver = line.split(' ') | 344 | archive_pkg, archive_ver = line.split(" ") |
783 | 304 | if pkg == archive_pkg: | 345 | if pkg == archive_pkg: |
784 | 305 | if apt_pkg.version_compare(archive_ver, ver) >= 0: | 346 | if apt_pkg.version_compare(archive_ver, ver) >= 0: |
785 | 306 | print(f"{line} >= {ver}") | 347 | print(f"{line} >= {ver}") |
786 | @@ -309,9 +350,12 @@ def cloudberry_cqa_verified(lp, yes: bool, bugID: int) -> None: | |||
787 | 309 | failed = True | 350 | failed = True |
788 | 310 | # Tag "cqa-verified" if no failure. | 351 | # Tag "cqa-verified" if no failure. |
789 | 311 | if not failed: | 352 | if not failed: |
791 | 312 | if not args.dry_run and _yes_or_ask(yes, f'Would you like to tag "cqa-verified" for LP: #{bug.id} "{bug.title}"?'): | 353 | if not args.dry_run and _yes_or_ask( |
792 | 354 | yes, | ||
793 | 355 | f'Would you like to tag "cqa-verified" for LP: #{bug.id} "{bug.title}"?', | ||
794 | 356 | ): | ||
795 | 313 | tags = bug.tags.copy() | 357 | tags = bug.tags.copy() |
797 | 314 | tags.append('cqa-verified') | 358 | tags.append("cqa-verified") |
798 | 315 | if f"oem-scripts-{oem_scripts.__version__}" not in tags: | 359 | if f"oem-scripts-{oem_scripts.__version__}" not in tags: |
799 | 316 | tags.append(f"oem-scripts-{oem_scripts.__version__}") | 360 | tags.append(f"oem-scripts-{oem_scripts.__version__}") |
800 | 317 | bug.tags = tags | 361 | bug.tags = tags |
801 | @@ -322,11 +366,11 @@ if args.subcommand: | |||
802 | 322 | login = LaunchpadLogin() | 366 | login = LaunchpadLogin() |
803 | 323 | lp = login.lp | 367 | lp = login.lp |
804 | 324 | 368 | ||
806 | 325 | if args.subcommand == 'copy': | 369 | if args.subcommand == "copy": |
807 | 326 | copy_bug(lp, args.bugID, output=args.output, target=args.target, public=args.public) | 370 | copy_bug(lp, args.bugID, output=args.output, target=args.target, public=args.public) |
809 | 327 | elif args.subcommand == 'cleanup': | 371 | elif args.subcommand == "cleanup": |
810 | 328 | cleanup_bug(lp, args.bugID, args.yes) | 372 | cleanup_bug(lp, args.bugID, args.yes) |
812 | 329 | elif args.subcommand == 'cqa-verify': | 373 | elif args.subcommand == "cqa-verify": |
813 | 330 | cloudberry_cqa_verified(lp, args.yes, args.bugID) | 374 | cloudberry_cqa_verified(lp, args.yes, args.bugID) |
814 | 331 | else: | 375 | else: |
815 | 332 | parser.print_help() | 376 | parser.print_help() |
816 | diff --git a/mir-bug b/mir-bug | |||
817 | index dee3a25..9683a3b 100755 | |||
818 | --- a/mir-bug | |||
819 | +++ b/mir-bug | |||
820 | @@ -36,75 +36,108 @@ from oem_scripts.logging import setup_logging | |||
821 | 36 | from string import Template | 36 | from string import Template |
822 | 37 | from tempfile import TemporaryDirectory | 37 | from tempfile import TemporaryDirectory |
823 | 38 | 38 | ||
829 | 39 | SUBSCRIBER_LIST = ('oem-solutions-engineers', 'ubuntu-sponsors', 'ubuntu-desktop') | 39 | SUBSCRIBER_LIST = ("oem-solutions-engineers", "ubuntu-sponsors", "ubuntu-desktop") |
830 | 40 | TAG_LIST = ('oem-meta-packages', 'oem-priority', f'oem-scripts-{oem_scripts.__version__:.2f}') | 40 | TAG_LIST = ( |
831 | 41 | 41 | "oem-meta-packages", | |
832 | 42 | parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter, | 42 | "oem-priority", |
833 | 43 | epilog=""" | 43 | f"oem-scripts-{oem_scripts.__version__:.2f}", |
834 | 44 | ) | ||
835 | 45 | |||
836 | 46 | parser = argparse.ArgumentParser( | ||
837 | 47 | formatter_class=argparse.RawDescriptionHelpFormatter, | ||
838 | 48 | epilog=""" | ||
839 | 44 | examples: | 49 | examples: |
840 | 45 | mir-bug create sutton.newell ace \"ThinkPad X1 Carbon Gen 8\" | 50 | mir-bug create sutton.newell ace \"ThinkPad X1 Carbon Gen 8\" |
841 | 46 | mir-bug check BUG_NUMBER | 51 | mir-bug check BUG_NUMBER |
842 | 47 | mir-bug update BUG_NUMBER | 52 | mir-bug update BUG_NUMBER |
844 | 48 | mir-bug collect oem-meta-mir-bugs.json""") | 53 | mir-bug collect oem-meta-mir-bugs.json""", |
845 | 54 | ) | ||
846 | 49 | 55 | ||
851 | 50 | parser.add_argument("-d", "--debug", | 56 | parser.add_argument("-d", "--debug", help="print debug messages", action="store_true") |
852 | 51 | help="print debug messages", action="store_true") | 57 | parser.add_argument( |
853 | 52 | parser.add_argument("-q", "--quiet", | 58 | "-q", "--quiet", help="Don't print info messages", action="store_true" |
854 | 53 | help="Don't print info messages", action="store_true") | 59 | ) |
855 | 54 | 60 | ||
856 | 55 | subparsers = parser.add_subparsers(dest="subcommand") | 61 | subparsers = parser.add_subparsers(dest="subcommand") |
857 | 56 | 62 | ||
903 | 57 | create = subparsers.add_parser('create', help='[-h] [-o=bugID|--output=bugID] oemCodename platformCodename deviceName') | 63 | create = subparsers.add_parser( |
904 | 58 | create.add_argument("oemCodename", | 64 | "create", |
905 | 59 | help="Such as somerville, stella, or sutton.simon") | 65 | help="[-h] [-o=bugID|--output=bugID] oemCodename platformCodename deviceName", |
906 | 60 | create.add_argument("platformCodename", | 66 | ) |
907 | 61 | help="Name deined by PM, like ace.") | 67 | create.add_argument("oemCodename", help="Such as somerville, stella, or sutton.simon") |
908 | 62 | create.add_argument("deviceName", | 68 | create.add_argument("platformCodename", help="Name deined by PM, like ace.") |
909 | 63 | help="ThinkPad X1 Carbon Gen 8") | 69 | create.add_argument("deviceName", help="ThinkPad X1 Carbon Gen 8") |
910 | 64 | create.add_argument("-o", "--output", | 70 | create.add_argument( |
911 | 65 | help="Specify a file name to write the bug number.", | 71 | "-o", |
912 | 66 | type=argparse.FileType('w', encoding='UTF-8')) | 72 | "--output", |
913 | 67 | 73 | help="Specify a file name to write the bug number.", | |
914 | 68 | update = subparsers.add_parser('update', help='[-h] [--ready] [--skip] [--tz=UTC-8] [--yes] bugNumber') | 74 | type=argparse.FileType("w", encoding="UTF-8"), |
915 | 69 | update.add_argument("bugNumber", | 75 | ) |
916 | 70 | help="Specify the bug number on Launchpad to update.", type=int) | 76 | |
917 | 71 | update.add_argument("--yes", | 77 | update = subparsers.add_parser( |
918 | 72 | help="Say yes for all prompts.", action="store_true") | 78 | "update", help="[-h] [--ready] [--skip] [--tz=UTC-8] [--yes] bugNumber" |
919 | 73 | update.add_argument("--skip", | 79 | ) |
920 | 74 | help="Skip updating bootstrap branch of Git repository.", action="store_true") | 80 | update.add_argument( |
921 | 75 | update.add_argument("--tz", | 81 | "bugNumber", help="Specify the bug number on Launchpad to update.", type=int |
922 | 76 | help="Specify the value for TZ. (UTC-8 by default)", default="UTC-8") | 82 | ) |
923 | 77 | update.add_argument("--ready", | 83 | update.add_argument("--yes", help="Say yes for all prompts.", action="store_true") |
924 | 78 | action="store_true", | 84 | update.add_argument( |
925 | 79 | help="Update the bug to Fix Committed, also affects 'Ubuntu', and subscribe 'ubuntu-sponsors' and 'ubuntu-desktop'.") | 85 | "--skip", |
926 | 80 | 86 | help="Skip updating bootstrap branch of Git repository.", | |
927 | 81 | check = subparsers.add_parser('check', help='[-h] [--ready] [--skip] [--tz=UTC-8] bugNumber') | 87 | action="store_true", |
928 | 82 | check.add_argument("bugNumber", | 88 | ) |
929 | 83 | help="Specify the bug number on Launchpad to do some sanity checks.", type=int) | 89 | update.add_argument( |
930 | 84 | check.add_argument("--skip", | 90 | "--tz", help="Specify the value for TZ. (UTC-8 by default)", default="UTC-8" |
931 | 85 | help="Skip checking oem branch of Git repository.", action="store_true") | 91 | ) |
932 | 86 | check.add_argument("--tz", | 92 | update.add_argument( |
933 | 87 | help="Specify the value for TZ. (UTC-8 by default)", default="UTC-8") | 93 | "--ready", |
934 | 88 | check.add_argument("--ready", | 94 | action="store_true", |
935 | 89 | action="store_true", | 95 | help="Update the bug to Fix Committed, also affects 'Ubuntu', and subscribe 'ubuntu-sponsors' and 'ubuntu-desktop'.", |
936 | 90 | help="Check if the bug is Fix Committed, also affects 'Ubuntu', and subscribe 'ubuntu-sponsors' and 'ubuntu-desktop'.") | 96 | ) |
937 | 91 | 97 | ||
938 | 92 | collect = subparsers.add_parser('collect', help='[-h] [--ubuntu-certified] jsonFile') | 98 | check = subparsers.add_parser( |
939 | 93 | collect.add_argument("json", | 99 | "check", help="[-h] [--ready] [--skip] [--tz=UTC-8] bugNumber" |
940 | 94 | help="Specify the json file name to write.", | 100 | ) |
941 | 95 | type=argparse.FileType('w', encoding='UTF-8')) | 101 | check.add_argument( |
942 | 96 | collect.add_argument("--ubuntu-certified", | 102 | "bugNumber", |
943 | 97 | action="store_true", | 103 | help="Specify the bug number on Launchpad to do some sanity checks.", |
944 | 98 | help="Only collect those bugs with the 'ubuntu-certified' tag.") | 104 | type=int, |
945 | 99 | collect.add_argument("--verification-needed", | 105 | ) |
946 | 100 | action="store_true", | 106 | check.add_argument( |
947 | 101 | help="Only collect those bugs with the 'verification-needed' tag.") | 107 | "--skip", help="Skip checking oem branch of Git repository.", action="store_true" |
948 | 108 | ) | ||
949 | 109 | check.add_argument( | ||
950 | 110 | "--tz", help="Specify the value for TZ. (UTC-8 by default)", default="UTC-8" | ||
951 | 111 | ) | ||
952 | 112 | check.add_argument( | ||
953 | 113 | "--ready", | ||
954 | 114 | action="store_true", | ||
955 | 115 | help="Check if the bug is Fix Committed, also affects 'Ubuntu', and subscribe 'ubuntu-sponsors' and 'ubuntu-desktop'.", | ||
956 | 116 | ) | ||
957 | 117 | |||
958 | 118 | collect = subparsers.add_parser("collect", help="[-h] [--ubuntu-certified] jsonFile") | ||
959 | 119 | collect.add_argument( | ||
960 | 120 | "json", | ||
961 | 121 | help="Specify the json file name to write.", | ||
962 | 122 | type=argparse.FileType("w", encoding="UTF-8"), | ||
963 | 123 | ) | ||
964 | 124 | collect.add_argument( | ||
965 | 125 | "--ubuntu-certified", | ||
966 | 126 | action="store_true", | ||
967 | 127 | help="Only collect those bugs with the 'ubuntu-certified' tag.", | ||
968 | 128 | ) | ||
969 | 129 | collect.add_argument( | ||
970 | 130 | "--verification-needed", | ||
971 | 131 | action="store_true", | ||
972 | 132 | help="Only collect those bugs with the 'verification-needed' tag.", | ||
973 | 133 | ) | ||
974 | 102 | 134 | ||
975 | 103 | args = parser.parse_args() | 135 | args = parser.parse_args() |
976 | 104 | 136 | ||
977 | 105 | setup_logging(debug=args.debug, quiet=args.quiet) | 137 | setup_logging(debug=args.debug, quiet=args.quiet) |
978 | 106 | 138 | ||
980 | 107 | mir_bug_description_template = Template(f"""[Availability] | 139 | mir_bug_description_template = Template( |
981 | 140 | f"""[Availability] | ||
982 | 108 | This is a meta package for https://wiki.ubuntu.com/MIRTeam/Exceptions/OEM that means the package doesn't exist in Debian or Ubuntu archive yet. | 141 | This is a meta package for https://wiki.ubuntu.com/MIRTeam/Exceptions/OEM that means the package doesn't exist in Debian or Ubuntu archive yet. |
983 | 109 | The source code of the $metaPkgName for focal: | 142 | The source code of the $metaPkgName for focal: |
984 | 110 | git clone -b $branchName https://git.launchpad.net/~oem-solutions-engineers/pc-enablement/+git/oem-$oemCodenameNogroup-projects-meta | 143 | git clone -b $branchName https://git.launchpad.net/~oem-solutions-engineers/pc-enablement/+git/oem-$oemCodenameNogroup-projects-meta |
985 | @@ -131,26 +164,27 @@ Canonical OEM Enablement Team will take care of the maintenance. | |||
986 | 131 | [Background information] | 164 | [Background information] |
987 | 132 | Please check https://wiki.ubuntu.com/MIRTeam/Exceptions/OEM for details. | 165 | Please check https://wiki.ubuntu.com/MIRTeam/Exceptions/OEM for details. |
988 | 133 | 166 | ||
990 | 134 | Please use "oem-metapackage-mir-check" in lp:ubuntu-archive-tools to verify this MIR against the reference package in the archive.""") | 167 | Please use "oem-metapackage-mir-check" in lp:ubuntu-archive-tools to verify this MIR against the reference package in the archive.""" |
991 | 168 | ) | ||
992 | 135 | 169 | ||
994 | 136 | pattern = re.compile(r'.*\[MIR\]\W*oem-([^-]*)-(.*)-meta\W*') | 170 | pattern = re.compile(r".*\[MIR\]\W*oem-([^-]*)-(.*)-meta\W*") |
995 | 137 | 171 | ||
996 | 138 | 172 | ||
997 | 139 | def create_bug(lp, oemCodename, platformCodename, deviceName): | 173 | def create_bug(lp, oemCodename, platformCodename, deviceName): |
998 | 140 | info("Creating bug...") | 174 | info("Creating bug...") |
1000 | 141 | tempList = oemCodename.split('.') | 175 | tempList = oemCodename.split(".") |
1001 | 142 | oemCodenameNogroup = tempList[0] | 176 | oemCodenameNogroup = tempList[0] |
1002 | 143 | if len(tempList) == 2: | 177 | if len(tempList) == 2: |
1003 | 144 | oemGroupName = tempList[1] | 178 | oemGroupName = tempList[1] |
1004 | 145 | else: | 179 | else: |
1006 | 146 | oemGroupName = '' | 180 | oemGroupName = "" |
1007 | 147 | 181 | ||
1008 | 148 | # metaPkgName's examples | 182 | # metaPkgName's examples |
1009 | 149 | # oem-somerville-metapod-meta | 183 | # oem-somerville-metapod-meta |
1010 | 150 | # oem-sutton.newell-ace-meta | 184 | # oem-sutton.newell-ace-meta |
1011 | 151 | metaPkgName = "oem-" + oemCodename + "-" + platformCodename + "-meta" | 185 | metaPkgName = "oem-" + oemCodename + "-" + platformCodename + "-meta" |
1012 | 152 | 186 | ||
1014 | 153 | if oemGroupName.strip() != '': | 187 | if oemGroupName.strip() != "": |
1015 | 154 | branchName = oemGroupName + "." + platformCodename + "-focal-ubuntu" | 188 | branchName = oemGroupName + "." + platformCodename + "-focal-ubuntu" |
1016 | 155 | else: | 189 | else: |
1017 | 156 | branchName = platformCodename + "-focal-ubuntu" | 190 | branchName = platformCodename + "-focal-ubuntu" |
1018 | @@ -162,8 +196,15 @@ def create_bug(lp, oemCodename, platformCodename, deviceName): | |||
1019 | 162 | metaPkgName=metaPkgName, | 196 | metaPkgName=metaPkgName, |
1020 | 163 | branchName=branchName, | 197 | branchName=branchName, |
1021 | 164 | oemCodenameNogroup=oemCodenameNogroup, | 198 | oemCodenameNogroup=oemCodenameNogroup, |
1024 | 165 | deviceName=deviceName) | 199 | deviceName=deviceName, |
1025 | 166 | bug = lp.bugs.createBug(description=bd, target=project, title=bt, information_type='Public', tags=TAG_LIST) | 200 | ) |
1026 | 201 | bug = lp.bugs.createBug( | ||
1027 | 202 | description=bd, | ||
1028 | 203 | target=project, | ||
1029 | 204 | title=bt, | ||
1030 | 205 | information_type="Public", | ||
1031 | 206 | tags=TAG_LIST, | ||
1032 | 207 | ) | ||
1033 | 167 | 208 | ||
1034 | 168 | info("meta package public bug: " + bug.web_link) | 209 | info("meta package public bug: " + bug.web_link) |
1035 | 169 | 210 | ||
1036 | @@ -171,23 +212,32 @@ def create_bug(lp, oemCodename, platformCodename, deviceName): | |||
1037 | 171 | args.output.write(f"{bug.id}\n") | 212 | args.output.write(f"{bug.id}\n") |
1038 | 172 | 213 | ||
1039 | 173 | for task in bug.bug_tasks: | 214 | for task in bug.bug_tasks: |
1042 | 174 | task.status = 'Confirmed' | 215 | task.status = "Confirmed" |
1043 | 175 | task.importance = 'Critical' | 216 | task.importance = "Critical" |
1044 | 176 | # Assign to reporter by default | 217 | # Assign to reporter by default |
1045 | 177 | task.assignee = lp.me | 218 | task.assignee = lp.me |
1046 | 178 | task.lp_save() | 219 | task.lp_save() |
1047 | 179 | 220 | ||
1048 | 180 | # Subscribe the oem-solutions-engineers | 221 | # Subscribe the oem-solutions-engineers |
1050 | 181 | bug.subscribe(person=lp.people['oem-solutions-engineers']) | 222 | bug.subscribe(person=lp.people["oem-solutions-engineers"]) |
1051 | 182 | bug.lp_save() | 223 | bug.lp_save() |
1052 | 183 | 224 | ||
1053 | 184 | 225 | ||
1054 | 185 | def collect_bugs(lp, output): | 226 | def collect_bugs(lp, output): |
1055 | 186 | info("Collecting bugs...") | 227 | info("Collecting bugs...") |
1057 | 187 | project = lp.projects['oem-priority'] | 228 | project = lp.projects["oem-priority"] |
1058 | 188 | tasks = project.searchTasks( | 229 | tasks = project.searchTasks( |
1061 | 189 | status=['New', 'Incomplete', 'Triaged', 'Opinion', 'Confirmed', 'In Progress', 'Fix Committed'], | 230 | status=[ |
1062 | 190 | search_text='[MIR]') | 231 | "New", |
1063 | 232 | "Incomplete", | ||
1064 | 233 | "Triaged", | ||
1065 | 234 | "Opinion", | ||
1066 | 235 | "Confirmed", | ||
1067 | 236 | "In Progress", | ||
1068 | 237 | "Fix Committed", | ||
1069 | 238 | ], | ||
1070 | 239 | search_text="[MIR]", | ||
1071 | 240 | ) | ||
1072 | 191 | try: | 241 | try: |
1073 | 192 | total = int(tasks.total_size) | 242 | total = int(tasks.total_size) |
1074 | 193 | except TypeError: # When the total size becomes more than 50, it won't return 'int' but 'ScalarValue' instead. | 243 | except TypeError: # When the total size becomes more than 50, it won't return 'int' but 'ScalarValue' instead. |
1075 | @@ -197,28 +247,38 @@ def collect_bugs(lp, output): | |||
1076 | 197 | for counter, task in enumerate(tasks, 1): | 247 | for counter, task in enumerate(tasks, 1): |
1077 | 198 | bug = task.bug | 248 | bug = task.bug |
1078 | 199 | 249 | ||
1081 | 200 | if '[MIR]' not in bug.title or 'oem' not in bug.title or 'meta' not in bug.title: | 250 | if ( |
1082 | 201 | info(f"{counter}/{total} LP: #{bug.id} {bug.title} ({task.status}) **NOT MATCHED**") | 251 | "[MIR]" not in bug.title |
1083 | 252 | or "oem" not in bug.title | ||
1084 | 253 | or "meta" not in bug.title | ||
1085 | 254 | ): | ||
1086 | 255 | info( | ||
1087 | 256 | f"{counter}/{total} LP: #{bug.id} {bug.title} ({task.status}) **NOT MATCHED**" | ||
1088 | 257 | ) | ||
1089 | 202 | continue | 258 | continue |
1090 | 203 | 259 | ||
1093 | 204 | if args.ubuntu_certified and 'ubuntu-certified' not in bug.tags: | 260 | if args.ubuntu_certified and "ubuntu-certified" not in bug.tags: |
1094 | 205 | info(f"{counter}/{total} LP: #{bug.id} {bug.title} ({task.status}) **NOT CERTIFIED**") | 261 | info( |
1095 | 262 | f"{counter}/{total} LP: #{bug.id} {bug.title} ({task.status}) **NOT CERTIFIED**" | ||
1096 | 263 | ) | ||
1097 | 206 | continue | 264 | continue |
1098 | 207 | 265 | ||
1099 | 208 | if args.verification_needed: | 266 | if args.verification_needed: |
1100 | 209 | verification_needed = False | 267 | verification_needed = False |
1101 | 210 | for tag in bug.tags: | 268 | for tag in bug.tags: |
1103 | 211 | if tag.startswith('verification-needed'): | 269 | if tag.startswith("verification-needed"): |
1104 | 212 | verification_needed = True | 270 | verification_needed = True |
1105 | 213 | if not verification_needed: | 271 | if not verification_needed: |
1107 | 214 | info(f"{counter}/{total} LP: #{bug.id} {bug.title} ({task.status}) **NOT VERIFICATION NEEDED**") | 272 | info( |
1108 | 273 | f"{counter}/{total} LP: #{bug.id} {bug.title} ({task.status}) **NOT VERIFICATION NEEDED**" | ||
1109 | 274 | ) | ||
1110 | 215 | continue | 275 | continue |
1111 | 216 | 276 | ||
1112 | 217 | result = pattern.match(bug.title) | 277 | result = pattern.match(bug.title) |
1113 | 218 | git = None | 278 | git = None |
1114 | 219 | if result: | 279 | if result: |
1117 | 220 | if '.' in result.group(1): | 280 | if "." in result.group(1): |
1118 | 221 | project, group = result.group(1).split('.') | 281 | project, group = result.group(1).split(".") |
1119 | 222 | else: | 282 | else: |
1120 | 223 | project = result.group(1) | 283 | project = result.group(1) |
1121 | 224 | group = None | 284 | group = None |
1122 | @@ -234,32 +294,38 @@ def collect_bugs(lp, output): | |||
1123 | 234 | 294 | ||
1124 | 235 | ubuntu_status = None | 295 | ubuntu_status = None |
1125 | 236 | for bug_task in bug.bug_tasks: | 296 | for bug_task in bug.bug_tasks: |
1127 | 237 | if bug_task.bug_target_name == 'ubuntu': | 297 | if bug_task.bug_target_name == "ubuntu": |
1128 | 238 | ubuntu_status = bug_task.status | 298 | ubuntu_status = bug_task.status |
1129 | 239 | 299 | ||
1130 | 240 | attachments = [] | 300 | attachments = [] |
1131 | 241 | for attachment in bug.attachments: | 301 | for attachment in bug.attachments: |
1133 | 242 | attachments.append({'title': attachment.title, 'data_link': attachment.data_link, 'type': attachment.type}) | 302 | attachments.append( |
1134 | 303 | { | ||
1135 | 304 | "title": attachment.title, | ||
1136 | 305 | "data_link": attachment.data_link, | ||
1137 | 306 | "type": attachment.type, | ||
1138 | 307 | } | ||
1139 | 308 | ) | ||
1140 | 243 | clip = { | 309 | clip = { |
1154 | 244 | 'bug': "https://bugs.launchpad.net/bugs/%s" % bug.id, | 310 | "bug": "https://bugs.launchpad.net/bugs/%s" % bug.id, |
1155 | 245 | 'link': bug.self_link, | 311 | "link": bug.self_link, |
1156 | 246 | 'title': bug.title, | 312 | "title": bug.title, |
1157 | 247 | 'importance': task.importance, | 313 | "importance": task.importance, |
1158 | 248 | 'tag': bug.tags, | 314 | "tag": bug.tags, |
1159 | 249 | 'description': bug.description, | 315 | "description": bug.description, |
1160 | 250 | 'status': task.status, | 316 | "status": task.status, |
1161 | 251 | 'ubuntu_status': ubuntu_status, | 317 | "ubuntu_status": ubuntu_status, |
1162 | 252 | 'owner': task.owner.name, | 318 | "owner": task.owner.name, |
1163 | 253 | 'assignee': task.assignee.name if task.assignee else 'none', | 319 | "assignee": task.assignee.name if task.assignee else "none", |
1164 | 254 | 'subscriptions': subscriptions, | 320 | "subscriptions": subscriptions, |
1165 | 255 | 'attachments': attachments, | 321 | "attachments": attachments, |
1166 | 256 | 'git': git | 322 | "git": git, |
1167 | 257 | } | 323 | } |
1168 | 258 | bugs.append(clip) | 324 | bugs.append(clip) |
1169 | 259 | info(f"{counter}/{total} LP: #{bug.id} {bug.title} ({task.status})") | 325 | info(f"{counter}/{total} LP: #{bug.id} {bug.title} ({task.status})") |
1170 | 260 | 326 | ||
1171 | 261 | info("total: %d matched" % len(bugs)) | 327 | info("total: %d matched" % len(bugs)) |
1173 | 262 | output.write(json.dumps(bugs, sort_keys=True, separators=(',', ':'))) | 328 | output.write(json.dumps(bugs, sort_keys=True, separators=(",", ":"))) |
1174 | 263 | output.write("\n") | 329 | output.write("\n") |
1175 | 264 | 330 | ||
1176 | 265 | 331 | ||
1177 | @@ -271,7 +337,7 @@ def yes_or_ask(yes: bool, message: str) -> bool: | |||
1178 | 271 | res = input(f"> \033[1;34m{message}\033[1;0m (y/n) ").lower() | 337 | res = input(f"> \033[1;34m{message}\033[1;0m (y/n) ").lower() |
1179 | 272 | if res not in {"y", "n"}: | 338 | if res not in {"y", "n"}: |
1180 | 273 | continue | 339 | continue |
1182 | 274 | if res == 'y': | 340 | if res == "y": |
1183 | 275 | return True | 341 | return True |
1184 | 276 | else: | 342 | else: |
1185 | 277 | return False | 343 | return False |
1186 | @@ -288,7 +354,9 @@ def update_bug(lp, bug_number: int, yes: bool) -> None: | |||
1187 | 288 | bug_modified = True | 354 | bug_modified = True |
1188 | 289 | bug.description = desc | 355 | bug.description = desc |
1189 | 290 | 356 | ||
1191 | 291 | if check_bug_title(bug, pkg_name) is False and yes_or_ask(yes, "Do you want to update the bug title?"): | 357 | if check_bug_title(bug, pkg_name) is False and yes_or_ask( |
1192 | 358 | yes, "Do you want to update the bug title?" | ||
1193 | 359 | ): | ||
1194 | 292 | bug_modified = True | 360 | bug_modified = True |
1195 | 293 | bug.title = f"[MIR] {pkg_name}" | 361 | bug.title = f"[MIR] {pkg_name}" |
1196 | 294 | 362 | ||
1197 | @@ -298,11 +366,13 @@ def update_bug(lp, bug_number: int, yes: bool) -> None: | |||
1198 | 298 | 366 | ||
1199 | 299 | if check_bug_importance(bug) is False: | 367 | if check_bug_importance(bug) is False: |
1200 | 300 | for task in bug.bug_tasks: | 368 | for task in bug.bug_tasks: |
1204 | 301 | if task.importance != 'Critical': | 369 | if task.importance != "Critical": |
1205 | 302 | if task.bug_target_name == 'oem-priority' and \ | 370 | if task.bug_target_name == "oem-priority" and yes_or_ask( |
1206 | 303 | yes_or_ask(yes, f"Do you want to update the importance of {task.bug_target_name} from {task.importance} to Critical?"): | 371 | yes, |
1207 | 372 | f"Do you want to update the importance of {task.bug_target_name} from {task.importance} to Critical?", | ||
1208 | 373 | ): | ||
1209 | 304 | task_modified = True | 374 | task_modified = True |
1211 | 305 | task.importance = 'Critical' | 375 | task.importance = "Critical" |
1212 | 306 | task.lp_save() | 376 | task.lp_save() |
1213 | 307 | 377 | ||
1214 | 308 | update_bug_status(bug, yes) | 378 | update_bug_status(bug, yes) |
1215 | @@ -312,12 +382,14 @@ def update_bug(lp, bug_number: int, yes: bool) -> None: | |||
1216 | 312 | if check_bug_tags(bug) is False: | 382 | if check_bug_tags(bug) is False: |
1217 | 313 | tags = copy(bug.tags) | 383 | tags = copy(bug.tags) |
1218 | 314 | for tag in TAG_LIST: | 384 | for tag in TAG_LIST: |
1220 | 315 | if tag not in bug.tags and yes_or_ask(yes, f"Do you want to add '{tag}' tag?"): | 385 | if tag not in bug.tags and yes_or_ask( |
1221 | 386 | yes, f"Do you want to add '{tag}' tag?" | ||
1222 | 387 | ): | ||
1223 | 316 | bug_modified = True | 388 | bug_modified = True |
1224 | 317 | tags.append(tag) | 389 | tags.append(tag) |
1225 | 318 | for tag in bug.tags: | 390 | for tag in bug.tags: |
1228 | 319 | if tag.startswith('oem-scripts-'): | 391 | if tag.startswith("oem-scripts-"): |
1229 | 320 | if tag[len("oem-scripts-"):] != f'{oem_scripts.__version__:.2f}': | 392 | if tag[len("oem-scripts-") :] != f"{oem_scripts.__version__:.2f}": |
1230 | 321 | if yes_or_ask(yes, f"Do you want to remove '{tag}' tag?"): | 393 | if yes_or_ask(yes, f"Do you want to remove '{tag}' tag?"): |
1231 | 322 | tags.remove(tag) | 394 | tags.remove(tag) |
1232 | 323 | if tags != bug.tags: | 395 | if tags != bug.tags: |
1233 | @@ -329,11 +401,11 @@ def update_bug(lp, bug_number: int, yes: bool) -> None: | |||
1234 | 329 | 401 | ||
1235 | 330 | if bug_modified: | 402 | if bug_modified: |
1236 | 331 | bug.lp_save() | 403 | bug.lp_save() |
1238 | 332 | info(f'LP: #{bug_number} is updated.') | 404 | info(f"LP: #{bug_number} is updated.") |
1239 | 333 | elif task_modified: | 405 | elif task_modified: |
1241 | 334 | info(f'LP: #{bug_number} is updated.') | 406 | info(f"LP: #{bug_number} is updated.") |
1242 | 335 | elif yes: | 407 | elif yes: |
1244 | 336 | info('Everything looks OK.') | 408 | info("Everything looks OK.") |
1245 | 337 | 409 | ||
1246 | 338 | 410 | ||
1247 | 339 | def check_bug(lp, bug_number: int) -> None: | 411 | def check_bug(lp, bug_number: int) -> None: |
1248 | @@ -357,11 +429,14 @@ def check_bug(lp, bug_number: int) -> None: | |||
1249 | 357 | need_fixing = True | 429 | need_fixing = True |
1250 | 358 | if check_and_update_bug_attachments(bug, pkg_name) is False: | 430 | if check_and_update_bug_attachments(bug, pkg_name) is False: |
1251 | 359 | need_fixing = True | 431 | need_fixing = True |
1253 | 360 | if not args.skip and check_and_update_git_repo(bug, pkg_name, bootstrap=False) is False: | 432 | if ( |
1254 | 433 | not args.skip | ||
1255 | 434 | and check_and_update_git_repo(bug, pkg_name, bootstrap=False) is False | ||
1256 | 435 | ): | ||
1257 | 361 | need_fixing = True | 436 | need_fixing = True |
1258 | 362 | 437 | ||
1259 | 363 | if need_fixing is False: | 438 | if need_fixing is False: |
1261 | 364 | info('Everything looks OK.') | 439 | info("Everything looks OK.") |
1262 | 365 | else: | 440 | else: |
1263 | 366 | exit(1) | 441 | exit(1) |
1264 | 367 | 442 | ||
1265 | @@ -373,8 +448,8 @@ def check_bug_description(bug) -> (str, str): | |||
1266 | 373 | critical(f"LP: #{bug.id} '{bug.title}' is NOT MATCHED") | 448 | critical(f"LP: #{bug.id} '{bug.title}' is NOT MATCHED") |
1267 | 374 | exit(1) | 449 | exit(1) |
1268 | 375 | 450 | ||
1271 | 376 | if '.' in result.group(1): | 451 | if "." in result.group(1): |
1272 | 377 | project, group = result.group(1).split('.') | 452 | project, group = result.group(1).split(".") |
1273 | 378 | platform = result.group(2) | 453 | platform = result.group(2) |
1274 | 379 | branchName = group + "." + platform + "-focal-ubuntu" | 454 | branchName = group + "." + platform + "-focal-ubuntu" |
1275 | 380 | else: | 455 | else: |
1276 | @@ -384,10 +459,10 @@ def check_bug_description(bug) -> (str, str): | |||
1277 | 384 | branchName = platform + "-focal-ubuntu" | 459 | branchName = platform + "-focal-ubuntu" |
1278 | 385 | 460 | ||
1279 | 386 | metaPkgName = f"oem-{result.group(1)}-{result.group(2)}-meta" | 461 | metaPkgName = f"oem-{result.group(1)}-{result.group(2)}-meta" |
1281 | 387 | prog = re.compile(r'\W*We want to improve the hardware support for ([^.]*).\W*') | 462 | prog = re.compile(r"\W*We want to improve the hardware support for ([^.]*).\W*") |
1282 | 388 | deviceName = None | 463 | deviceName = None |
1283 | 389 | 464 | ||
1285 | 390 | for line in bug.description.split('\n'): | 465 | for line in bug.description.split("\n"): |
1286 | 391 | result = prog.match(line) | 466 | result = prog.match(line) |
1287 | 392 | if not result: | 467 | if not result: |
1288 | 393 | continue | 468 | continue |
1289 | @@ -403,11 +478,12 @@ def check_bug_description(bug) -> (str, str): | |||
1290 | 403 | metaPkgName=metaPkgName, | 478 | metaPkgName=metaPkgName, |
1291 | 404 | branchName=branchName, | 479 | branchName=branchName, |
1292 | 405 | oemCodenameNogroup=project, | 480 | oemCodenameNogroup=project, |
1294 | 406 | deviceName=deviceName) | 481 | deviceName=deviceName, |
1295 | 482 | ) | ||
1296 | 407 | 483 | ||
1297 | 408 | if bug.description != desc: | 484 | if bug.description != desc: |
1298 | 409 | d = difflib.Differ() | 485 | d = difflib.Differ() |
1300 | 410 | diff = d.compare(bug.description.split('\n'), desc.split('\n')) | 486 | diff = d.compare(bug.description.split("\n"), desc.split("\n")) |
1301 | 411 | error("The description needs to update.") | 487 | error("The description needs to update.") |
1302 | 412 | if not args.quiet: | 488 | if not args.quiet: |
1303 | 413 | for i, line in enumerate(diff): | 489 | for i, line in enumerate(diff): |
1304 | @@ -431,15 +507,19 @@ def check_bug_importance(bug) -> bool: | |||
1305 | 431 | info("Checking bug importance...") | 507 | info("Checking bug importance...") |
1306 | 432 | result = True | 508 | result = True |
1307 | 433 | for task in bug.bug_tasks: | 509 | for task in bug.bug_tasks: |
1310 | 434 | if task.bug_target_name == 'oem-priority' and task.importance != 'Critical': | 510 | if task.bug_target_name == "oem-priority" and task.importance != "Critical": |
1311 | 435 | error(f"The '{task.bug_target_name}' importance is expected to be 'Critical' instead of '{task.importance}'.") | 511 | error( |
1312 | 512 | f"The '{task.bug_target_name}' importance is expected to be 'Critical' instead of '{task.importance}'." | ||
1313 | 513 | ) | ||
1314 | 436 | result = False | 514 | result = False |
1315 | 437 | return result | 515 | return result |
1316 | 438 | 516 | ||
1317 | 439 | 517 | ||
1318 | 440 | def _expected_status(target_name: str, status: str, expected: str) -> bool: | 518 | def _expected_status(target_name: str, status: str, expected: str) -> bool: |
1319 | 441 | if status != expected: | 519 | if status != expected: |
1321 | 442 | error(f"The '{target_name}' status is expected to be '{expected}' instead of '{status}'.") | 520 | error( |
1322 | 521 | f"The '{target_name}' status is expected to be '{expected}' instead of '{status}'." | ||
1323 | 522 | ) | ||
1324 | 443 | return False | 523 | return False |
1325 | 444 | return True | 524 | return True |
1326 | 445 | 525 | ||
1327 | @@ -449,20 +529,32 @@ def check_bug_status(bug, pkg_name: str) -> bool: | |||
1328 | 449 | result = True | 529 | result = True |
1329 | 450 | saw_ubuntu_task = False | 530 | saw_ubuntu_task = False |
1330 | 451 | for task in bug.bug_tasks: | 531 | for task in bug.bug_tasks: |
1332 | 452 | if task.bug_target_name == 'oem-priority': | 532 | if task.bug_target_name == "oem-priority": |
1333 | 453 | if args.ready: | 533 | if args.ready: |
1335 | 454 | if _expected_status(task.bug_target_name, task.status, 'Fix Committed') is False: | 534 | if ( |
1336 | 535 | _expected_status(task.bug_target_name, task.status, "Fix Committed") | ||
1337 | 536 | is False | ||
1338 | 537 | ): | ||
1339 | 455 | result = False | 538 | result = False |
1340 | 456 | else: | 539 | else: |
1342 | 457 | if _expected_status(task.bug_target_name, task.status, 'In Progress') is False: | 540 | if ( |
1343 | 541 | _expected_status(task.bug_target_name, task.status, "In Progress") | ||
1344 | 542 | is False | ||
1345 | 543 | ): | ||
1346 | 458 | result = False | 544 | result = False |
1348 | 459 | elif task.bug_target_name == 'ubuntu': | 545 | elif task.bug_target_name == "ubuntu": |
1349 | 460 | saw_ubuntu_task = True | 546 | saw_ubuntu_task = True |
1350 | 461 | if args.ready: | 547 | if args.ready: |
1352 | 462 | if _expected_status(task.bug_target_name, task.status, 'Confirmed') is False: | 548 | if ( |
1353 | 549 | _expected_status(task.bug_target_name, task.status, "Confirmed") | ||
1354 | 550 | is False | ||
1355 | 551 | ): | ||
1356 | 463 | result = False | 552 | result = False |
1357 | 464 | else: | 553 | else: |
1359 | 465 | if _expected_status(task.bug_target_name, task.status, 'Incomplete') is False: | 554 | if ( |
1360 | 555 | _expected_status(task.bug_target_name, task.status, "Incomplete") | ||
1361 | 556 | is False | ||
1362 | 557 | ): | ||
1363 | 466 | result = False | 558 | result = False |
1364 | 467 | elif f"{pkg_name} (Ubuntu)" not in task.bug_target_name: | 559 | elif f"{pkg_name} (Ubuntu)" not in task.bug_target_name: |
1365 | 468 | critical(f"It is unexpected to have '{task.bug_target_name}' task") | 560 | critical(f"It is unexpected to have '{task.bug_target_name}' task") |
1366 | @@ -472,17 +564,25 @@ def check_bug_status(bug, pkg_name: str) -> bool: | |||
1367 | 472 | return result | 564 | return result |
1368 | 473 | 565 | ||
1369 | 474 | 566 | ||
1371 | 475 | def _ok_to_change_status(target_name: str, orig_status: str, new_status: str, yes: bool) -> bool: | 567 | def _ok_to_change_status( |
1372 | 568 | target_name: str, orig_status: str, new_status: str, yes: bool | ||
1373 | 569 | ) -> bool: | ||
1374 | 476 | if orig_status == new_status: | 570 | if orig_status == new_status: |
1375 | 477 | return False | 571 | return False |
1377 | 478 | if yes_or_ask(yes, f"Would you like to change the '{target_name}' status from '{orig_status}' to '{new_status}'?"): | 572 | if yes_or_ask( |
1378 | 573 | yes, | ||
1379 | 574 | f"Would you like to change the '{target_name}' status from '{orig_status}' to '{new_status}'?", | ||
1380 | 575 | ): | ||
1381 | 479 | return True | 576 | return True |
1382 | 480 | return False | 577 | return False |
1383 | 481 | 578 | ||
1384 | 482 | 579 | ||
1385 | 483 | def _change_task_status(task, new_status: str, yes: bool) -> bool: | 580 | def _change_task_status(task, new_status: str, yes: bool) -> bool: |
1388 | 484 | if _expected_status(task.bug_target_name, task.status, new_status) is False and \ | 581 | if _expected_status( |
1389 | 485 | _ok_to_change_status(task.bug_target_name, task.status, new_status, yes): | 582 | task.bug_target_name, task.status, new_status |
1390 | 583 | ) is False and _ok_to_change_status( | ||
1391 | 584 | task.bug_target_name, task.status, new_status, yes | ||
1392 | 585 | ): | ||
1393 | 486 | task.status = new_status | 586 | task.status = new_status |
1394 | 487 | task.lp_save() | 587 | task.lp_save() |
1395 | 488 | 588 | ||
1396 | @@ -491,35 +591,42 @@ def update_bug_status(bug, yes: bool) -> None: | |||
1397 | 491 | info("Updating bug status...") | 591 | info("Updating bug status...") |
1398 | 492 | saw_ubuntu_task = False | 592 | saw_ubuntu_task = False |
1399 | 493 | for bug_task in bug.bug_tasks: | 593 | for bug_task in bug.bug_tasks: |
1401 | 494 | if bug_task.bug_target_name == 'oem-priority': | 594 | if bug_task.bug_target_name == "oem-priority": |
1402 | 495 | if args.ready: | 595 | if args.ready: |
1404 | 496 | _change_task_status(bug_task, 'Fix Committed', yes) | 596 | _change_task_status(bug_task, "Fix Committed", yes) |
1405 | 497 | else: | 597 | else: |
1408 | 498 | _change_task_status(bug_task, 'In Progress', yes) | 598 | _change_task_status(bug_task, "In Progress", yes) |
1409 | 499 | elif bug_task.bug_target_name == 'ubuntu': | 599 | elif bug_task.bug_target_name == "ubuntu": |
1410 | 500 | saw_ubuntu_task = True | 600 | saw_ubuntu_task = True |
1411 | 501 | if args.ready: | 601 | if args.ready: |
1414 | 502 | _change_task_status(bug_task, 'Confirmed', yes) | 602 | _change_task_status(bug_task, "Confirmed", yes) |
1415 | 503 | elif yes_or_ask(yes, f"Would you like to delete the '{bug_task.bug_target_name}' bug_task? (Don't affect '{bug_task.bug_target_display_name}')"): | 603 | elif yes_or_ask( |
1416 | 604 | yes, | ||
1417 | 605 | f"Would you like to delete the '{bug_task.bug_target_name}' bug_task? (Don't affect '{bug_task.bug_target_display_name}')", | ||
1418 | 606 | ): | ||
1419 | 504 | try: | 607 | try: |
1420 | 505 | bug_task.lp_delete() | 608 | bug_task.lp_delete() |
1421 | 506 | except lazr.restfulclient.errors.BadRequest as e: | 609 | except lazr.restfulclient.errors.BadRequest as e: |
1423 | 507 | warning(f"{bug_task.bug_target_name} can not be deleted, so changing the status to Incomplete instead.") | 610 | warning( |
1424 | 611 | f"{bug_task.bug_target_name} can not be deleted, so changing the status to Incomplete instead." | ||
1425 | 612 | ) | ||
1426 | 508 | debug(e) | 613 | debug(e) |
1428 | 509 | _change_task_status(bug_task, 'Incomplete', yes) | 614 | _change_task_status(bug_task, "Incomplete", yes) |
1429 | 510 | except lazr.restfulclient.errors.Unauthorized as e: | 615 | except lazr.restfulclient.errors.Unauthorized as e: |
1431 | 511 | warning(f"{bug_task.bug_target_name} can not be deleted, so changing the status to Incomplete instead.") | 616 | warning( |
1432 | 617 | f"{bug_task.bug_target_name} can not be deleted, so changing the status to Incomplete instead." | ||
1433 | 618 | ) | ||
1434 | 512 | debug(e) | 619 | debug(e) |
1436 | 513 | _change_task_status(bug_task, 'Incomplete', yes) | 620 | _change_task_status(bug_task, "Incomplete", yes) |
1437 | 514 | else: | 621 | else: |
1439 | 515 | _change_task_status(bug_task, 'Incomplete', yes) | 622 | _change_task_status(bug_task, "Incomplete", yes) |
1440 | 516 | else: | 623 | else: |
1441 | 517 | warning(f"{bug_task.bug_target_name} {bug_task.status}") | 624 | warning(f"{bug_task.bug_target_name} {bug_task.status}") |
1442 | 518 | if args.ready and saw_ubuntu_task is False: | 625 | if args.ready and saw_ubuntu_task is False: |
1443 | 519 | bug.addTask(target=lp.projects["Ubuntu"]) | 626 | bug.addTask(target=lp.projects["Ubuntu"]) |
1444 | 520 | for bug_task in bug.bug_tasks: | 627 | for bug_task in bug.bug_tasks: |
1447 | 521 | if bug_task.bug_target_name == 'ubuntu': | 628 | if bug_task.bug_target_name == "ubuntu": |
1448 | 522 | _change_task_status(bug_task, 'Confirmed', yes) | 629 | _change_task_status(bug_task, "Confirmed", yes) |
1449 | 523 | 630 | ||
1450 | 524 | 631 | ||
1451 | 525 | def check_and_update_bug_subscriptions(lp, bug, update=False, yes=False) -> bool: | 632 | def check_and_update_bug_subscriptions(lp, bug, update=False, yes=False) -> bool: |
1452 | @@ -532,29 +639,41 @@ def check_and_update_bug_subscriptions(lp, bug, update=False, yes=False) -> bool | |||
1453 | 532 | for subscription in bug.subscriptions: | 639 | for subscription in bug.subscriptions: |
1454 | 533 | subscriptions.append(subscription.person.name) | 640 | subscriptions.append(subscription.person.name) |
1455 | 534 | if not args.ready: | 641 | if not args.ready: |
1457 | 535 | for subscriber in ('ubuntu-sponsors', 'ubuntu-desktop'): | 642 | for subscriber in ("ubuntu-sponsors", "ubuntu-desktop"): |
1458 | 536 | if subscriber == subscription.person.name: | 643 | if subscriber == subscription.person.name: |
1459 | 537 | error(f"'{subscriber}' should not be in the subscriptions.") | 644 | error(f"'{subscriber}' should not be in the subscriptions.") |
1461 | 538 | if update and yes_or_ask(yes, f"Do you want to unsubscribe '{subscriber}'?"): | 645 | if update and yes_or_ask( |
1462 | 646 | yes, f"Do you want to unsubscribe '{subscriber}'?" | ||
1463 | 647 | ): | ||
1464 | 539 | if subscription.canBeUnsubscribedByUser(): | 648 | if subscription.canBeUnsubscribedByUser(): |
1465 | 540 | bug.unsubscribe(person=lp.people[subscriber]) | 649 | bug.unsubscribe(person=lp.people[subscriber]) |
1466 | 541 | else: | 650 | else: |
1468 | 542 | warning(f"{lp.me.name} doesn't have the permission to unsubscribe {subscriber}.") | 651 | warning( |
1469 | 652 | f"{lp.me.name} doesn't have the permission to unsubscribe {subscriber}." | ||
1470 | 653 | ) | ||
1471 | 543 | result = False | 654 | result = False |
1472 | 544 | else: | 655 | else: |
1473 | 545 | result = False | 656 | result = False |
1474 | 546 | if args.ready: | 657 | if args.ready: |
1476 | 547 | for subscriber in ('oem-solutions-engineers', 'ubuntu-sponsors', 'ubuntu-desktop'): | 658 | for subscriber in ( |
1477 | 659 | "oem-solutions-engineers", | ||
1478 | 660 | "ubuntu-sponsors", | ||
1479 | 661 | "ubuntu-desktop", | ||
1480 | 662 | ): | ||
1481 | 548 | if subscriber not in subscriptions: | 663 | if subscriber not in subscriptions: |
1482 | 549 | error(f"'{subscriber}' is not in the subscriptions.") | 664 | error(f"'{subscriber}' is not in the subscriptions.") |
1484 | 550 | if update and yes_or_ask(yes, f"Do you want to subscribe '{subscriber}'?"): | 665 | if update and yes_or_ask( |
1485 | 666 | yes, f"Do you want to subscribe '{subscriber}'?" | ||
1486 | 667 | ): | ||
1487 | 551 | bug.subscribe(person=lp.people[subscriber]) | 668 | bug.subscribe(person=lp.people[subscriber]) |
1488 | 552 | else: | 669 | else: |
1489 | 553 | result = False | 670 | result = False |
1490 | 554 | else: | 671 | else: |
1492 | 555 | if 'oem-solutions-engineers' not in subscriptions: | 672 | if "oem-solutions-engineers" not in subscriptions: |
1493 | 556 | error(f"'oem-solutions-engineers' is not in the subscriptions.") | 673 | error(f"'oem-solutions-engineers' is not in the subscriptions.") |
1495 | 557 | if update and yes_or_ask(yes, f"Do you want to subscribe 'oem-solutions-engineers'?"): | 674 | if update and yes_or_ask( |
1496 | 675 | yes, f"Do you want to subscribe 'oem-solutions-engineers'?" | ||
1497 | 676 | ): | ||
1498 | 558 | bug.subscribe(person=lp.people[subscriber]) | 677 | bug.subscribe(person=lp.people[subscriber]) |
1499 | 559 | else: | 678 | else: |
1500 | 560 | result = False | 679 | result = False |
1501 | @@ -573,13 +692,15 @@ def check_bug_tags(bug) -> bool: | |||
1502 | 573 | 692 | ||
1503 | 574 | def _run_command(command: list or tuple, returncode=(0,), env=None) -> (str, str): | 693 | def _run_command(command: list or tuple, returncode=(0,), env=None) -> (str, str): |
1504 | 575 | debug("$ " + " ".join(command)) | 694 | debug("$ " + " ".join(command)) |
1506 | 576 | proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env) | 695 | proc = subprocess.Popen( |
1507 | 696 | command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env | ||
1508 | 697 | ) | ||
1509 | 577 | out, err = proc.communicate() | 698 | out, err = proc.communicate() |
1510 | 578 | 699 | ||
1511 | 579 | if out: | 700 | if out: |
1513 | 580 | out = out.decode('utf-8').strip() | 701 | out = out.decode("utf-8").strip() |
1514 | 581 | if err: | 702 | if err: |
1516 | 582 | err = err.decode('utf-8').strip() | 703 | err = err.decode("utf-8").strip() |
1517 | 583 | 704 | ||
1518 | 584 | if proc.returncode not in returncode: | 705 | if proc.returncode not in returncode: |
1519 | 585 | critical(f"return {proc.returncode}") | 706 | critical(f"return {proc.returncode}") |
1520 | @@ -597,7 +718,9 @@ def _run_command(command: list or tuple, returncode=(0,), env=None) -> (str, str | |||
1521 | 597 | return (out, err, proc.returncode) | 718 | return (out, err, proc.returncode) |
1522 | 598 | 719 | ||
1523 | 599 | 720 | ||
1525 | 600 | def check_and_update_bug_attachments(bug, pkg_name: str, update=False, yes=False) -> bool: | 721 | def check_and_update_bug_attachments( |
1526 | 722 | bug, pkg_name: str, update=False, yes=False | ||
1527 | 723 | ) -> bool: | ||
1528 | 601 | if update: | 724 | if update: |
1529 | 602 | info("Checking and updating attachments...") | 725 | info("Checking and updating attachments...") |
1530 | 603 | else: | 726 | else: |
1531 | @@ -608,8 +731,8 @@ def check_and_update_bug_attachments(bug, pkg_name: str, update=False, yes=False | |||
1532 | 608 | critical(f"{pkg_name} failed.") | 731 | critical(f"{pkg_name} failed.") |
1533 | 609 | exit(1) | 732 | exit(1) |
1534 | 610 | 733 | ||
1537 | 611 | if '.' in result.group(1): | 734 | if "." in result.group(1): |
1538 | 612 | project, group = result.group(1).split('.') | 735 | project, group = result.group(1).split(".") |
1539 | 613 | else: | 736 | else: |
1540 | 614 | project = result.group(1) | 737 | project = result.group(1) |
1541 | 615 | group = None | 738 | group = None |
1542 | @@ -620,7 +743,16 @@ def check_and_update_bug_attachments(bug, pkg_name: str, update=False, yes=False | |||
1543 | 620 | else: | 743 | else: |
1544 | 621 | branch = f"{platform}-focal-ubuntu" | 744 | branch = f"{platform}-focal-ubuntu" |
1545 | 622 | 745 | ||
1547 | 623 | git_command = ("git", "clone", "--depth", "1", "-b", branch, f"https://git.launchpad.net/~oem-solutions-engineers/pc-enablement/+git/oem-{project}-projects-meta", pkg_name) | 746 | git_command = ( |
1548 | 747 | "git", | ||
1549 | 748 | "clone", | ||
1550 | 749 | "--depth", | ||
1551 | 750 | "1", | ||
1552 | 751 | "-b", | ||
1553 | 752 | branch, | ||
1554 | 753 | f"https://git.launchpad.net/~oem-solutions-engineers/pc-enablement/+git/oem-{project}-projects-meta", | ||
1555 | 754 | pkg_name, | ||
1556 | 755 | ) | ||
1557 | 624 | 756 | ||
1558 | 625 | debdiff = None | 757 | debdiff = None |
1559 | 626 | content = None | 758 | content = None |
1560 | @@ -628,33 +760,49 @@ def check_and_update_bug_attachments(bug, pkg_name: str, update=False, yes=False | |||
1561 | 628 | 760 | ||
1562 | 629 | with TemporaryDirectory() as tmpdir: | 761 | with TemporaryDirectory() as tmpdir: |
1563 | 630 | os.chdir(tmpdir) | 762 | os.chdir(tmpdir) |
1565 | 631 | _run_command(['wget', 'https://bazaar.launchpad.net/~ubuntu-archive/ubuntu-archive-tools/trunk/download/head:/oem-metapackage-mir-check']) | 763 | _run_command( |
1566 | 764 | [ | ||
1567 | 765 | "wget", | ||
1568 | 766 | "https://bazaar.launchpad.net/~ubuntu-archive/ubuntu-archive-tools/trunk/download/head:/oem-metapackage-mir-check", | ||
1569 | 767 | ] | ||
1570 | 768 | ) | ||
1571 | 632 | _run_command(git_command) | 769 | _run_command(git_command) |
1572 | 633 | git_dir = os.path.join(tmpdir, pkg_name) | 770 | git_dir = os.path.join(tmpdir, pkg_name) |
1573 | 634 | os.chdir(git_dir) | 771 | os.chdir(git_dir) |
1575 | 635 | _run_command(['dpkg-buildpackage', '-S', '-us', '-uc']) | 772 | _run_command(["dpkg-buildpackage", "-S", "-us", "-uc"]) |
1576 | 636 | os.chdir(tmpdir) | 773 | os.chdir(tmpdir) |
1579 | 637 | dsc = glob(f'{pkg_name}*.dsc')[0] | 774 | dsc = glob(f"{pkg_name}*.dsc")[0] |
1580 | 638 | prog = re.compile(fr'{pkg_name}_(.*).dsc') | 775 | prog = re.compile(fr"{pkg_name}_(.*).dsc") |
1581 | 639 | result = prog.match(dsc) | 776 | result = prog.match(dsc) |
1582 | 640 | version = result.group(1) | 777 | version = result.group(1) |
1583 | 641 | debdiff = f"{pkg_name}_{version}.debdiff" | 778 | debdiff = f"{pkg_name}_{version}.debdiff" |
1584 | 642 | # It should generate some debdiff so the return code should be 1 unless comparing to oem-qemu-meta itself. | 779 | # It should generate some debdiff so the return code should be 1 unless comparing to oem-qemu-meta itself. |
1585 | 643 | debug(f"TZ={args.tz}") | 780 | debug(f"TZ={args.tz}") |
1587 | 644 | content, _, _ = _run_command(['bash', 'oem-metapackage-mir-check', dsc], returncode=(1,), env=dict(os.environ, TZ=args.tz)) | 781 | content, _, _ = _run_command( |
1588 | 782 | ["bash", "oem-metapackage-mir-check", dsc], | ||
1589 | 783 | returncode=(1,), | ||
1590 | 784 | env=dict(os.environ, TZ=args.tz), | ||
1591 | 785 | ) | ||
1592 | 645 | content += "\n" | 786 | content += "\n" |
1594 | 646 | with open(debdiff, 'w') as f: | 787 | with open(debdiff, "w") as f: |
1595 | 647 | f.write(content) | 788 | f.write(content) |
1596 | 648 | 789 | ||
1597 | 649 | for attachment in bug.attachments: | 790 | for attachment in bug.attachments: |
1601 | 650 | if 'debdiff' in attachment.title: | 791 | if "debdiff" in attachment.title: |
1602 | 651 | _run_command(['wget', attachment.data_link, '-O', 'data']) | 792 | _run_command(["wget", attachment.data_link, "-O", "data"]) |
1603 | 652 | out, err, returncode = _run_command(['colordiff', '-ur', 'data', debdiff], returncode=(0, 1)) | 793 | out, err, returncode = _run_command( |
1604 | 794 | ["colordiff", "-ur", "data", debdiff], returncode=(0, 1) | ||
1605 | 795 | ) | ||
1606 | 653 | if returncode == 1: | 796 | if returncode == 1: |
1608 | 654 | warning(f"{attachment.title} - {attachment.web_link} has unexpected content.") | 797 | warning( |
1609 | 798 | f"{attachment.title} - {attachment.web_link} has unexpected content." | ||
1610 | 799 | ) | ||
1611 | 655 | info(f"{out}") | 800 | info(f"{out}") |
1612 | 656 | found = True | 801 | found = True |
1614 | 657 | if update and yes_or_ask(yes, f"Do you want to remove {attachment.title} - {attachment.web_link}?"): | 802 | if update and yes_or_ask( |
1615 | 803 | yes, | ||
1616 | 804 | f"Do you want to remove {attachment.title} - {attachment.web_link}?", | ||
1617 | 805 | ): | ||
1618 | 658 | try: | 806 | try: |
1619 | 659 | attachment.removeFromBug() | 807 | attachment.removeFromBug() |
1620 | 660 | except lazr.restfulclient.errors.NotFound as e: | 808 | except lazr.restfulclient.errors.NotFound as e: |
1621 | @@ -669,32 +817,54 @@ def check_and_update_bug_attachments(bug, pkg_name: str, update=False, yes=False | |||
1622 | 669 | error(f"There is no {debdiff}.") | 817 | error(f"There is no {debdiff}.") |
1623 | 670 | info(content) | 818 | info(content) |
1624 | 671 | if update and yes_or_ask(yes, f"Do you want to attach {debdiff}?"): | 819 | if update and yes_or_ask(yes, f"Do you want to attach {debdiff}?"): |
1626 | 672 | bug.addAttachment(filename=debdiff, data=content.encode("utf-8"), comment=f"Attach {debdiff} by oem-scripts {oem_scripts.__version__:.2f}.", is_patch=True) | 820 | bug.addAttachment( |
1627 | 821 | filename=debdiff, | ||
1628 | 822 | data=content.encode("utf-8"), | ||
1629 | 823 | comment=f"Attach {debdiff} by oem-scripts {oem_scripts.__version__:.2f}.", | ||
1630 | 824 | is_patch=True, | ||
1631 | 825 | ) | ||
1632 | 673 | return True | 826 | return True |
1633 | 674 | else: | 827 | else: |
1634 | 675 | return False | 828 | return False |
1635 | 676 | 829 | ||
1636 | 677 | 830 | ||
1637 | 678 | def _get_items_from_git(project: str, branch: str, pkg_name: str) -> tuple: | 831 | def _get_items_from_git(project: str, branch: str, pkg_name: str) -> tuple: |
1639 | 679 | git_command = ("git", "clone", "--depth", "1", "-b", branch, f"https://git.launchpad.net/~oem-solutions-engineers/pc-enablement/+git/oem-{project}-projects-meta", pkg_name) | 832 | git_command = ( |
1640 | 833 | "git", | ||
1641 | 834 | "clone", | ||
1642 | 835 | "--depth", | ||
1643 | 836 | "1", | ||
1644 | 837 | "-b", | ||
1645 | 838 | branch, | ||
1646 | 839 | f"https://git.launchpad.net/~oem-solutions-engineers/pc-enablement/+git/oem-{project}-projects-meta", | ||
1647 | 840 | pkg_name, | ||
1648 | 841 | ) | ||
1649 | 680 | with TemporaryDirectory() as tmpdir: | 842 | with TemporaryDirectory() as tmpdir: |
1650 | 681 | os.chdir(tmpdir) | 843 | os.chdir(tmpdir) |
1651 | 682 | _run_command(git_command) | 844 | _run_command(git_command) |
1652 | 683 | git_dir = os.path.join(tmpdir, pkg_name) | 845 | git_dir = os.path.join(tmpdir, pkg_name) |
1653 | 684 | 846 | ||
1658 | 685 | if project == 'somerville': | 847 | if project == "somerville": |
1659 | 686 | prog = re.compile(r"alias pci:\*sv00001028sd0000([0-9A-F]{4})[^ ]* meta (.*)") | 848 | prog = re.compile( |
1660 | 687 | elif project == 'stella': | 849 | r"alias pci:\*sv00001028sd0000([0-9A-F]{4})[^ ]* meta (.*)" |
1661 | 688 | prog = re.compile(r"alias pci:\*sv0000103Csd0000([0-9A-F]{4})[^ ]* meta (.*)") | 850 | ) |
1662 | 851 | elif project == "stella": | ||
1663 | 852 | prog = re.compile( | ||
1664 | 853 | r"alias pci:\*sv0000103Csd0000([0-9A-F]{4})[^ ]* meta (.*)" | ||
1665 | 854 | ) | ||
1666 | 689 | else: | 855 | else: |
1668 | 690 | prog = re.compile(r"alias dmi:\*bvn([0-9a-zA-Z]+):bvr([0-9a-zA-Z]{3})\* meta (.*)") | 856 | prog = re.compile( |
1669 | 857 | r"alias dmi:\*bvn([0-9a-zA-Z]+):bvr([0-9a-zA-Z]{3})\* meta (.*)" | ||
1670 | 858 | ) | ||
1671 | 691 | 859 | ||
1672 | 692 | ids = [] | 860 | ids = [] |
1674 | 693 | with open(os.path.join(git_dir, 'debian', 'modaliases'), 'r') as modaliases: | 861 | with open(os.path.join(git_dir, "debian", "modaliases"), "r") as modaliases: |
1675 | 694 | for line in modaliases: | 862 | for line in modaliases: |
1676 | 695 | result = prog.match(line.strip()) | 863 | result = prog.match(line.strip()) |
1677 | 696 | if result.group(result.lastindex) != pkg_name: | 864 | if result.group(result.lastindex) != pkg_name: |
1679 | 697 | error("Something wrong in debian/modaliases. Please fix it manually first.") | 865 | error( |
1680 | 866 | "Something wrong in debian/modaliases. Please fix it manually first." | ||
1681 | 867 | ) | ||
1682 | 698 | return False | 868 | return False |
1683 | 699 | if result.lastindex == 3: | 869 | if result.lastindex == 3: |
1684 | 700 | ids.append((result.group(1), result.group(2))) | 870 | ids.append((result.group(1), result.group(2))) |
1685 | @@ -702,21 +872,25 @@ def _get_items_from_git(project: str, branch: str, pkg_name: str) -> tuple: | |||
1686 | 702 | ids.append(result.group(1)) | 872 | ids.append(result.group(1)) |
1687 | 703 | kernel_flavour = None | 873 | kernel_flavour = None |
1688 | 704 | kernel_meta = None | 874 | kernel_meta = None |
1690 | 705 | with open(os.path.join(git_dir, 'debian', 'control'), 'r') as control: | 875 | with open(os.path.join(git_dir, "debian", "control"), "r") as control: |
1691 | 706 | for line in control: | 876 | for line in control: |
1701 | 707 | if line.startswith('XB-Ubuntu-OEM-Kernel-Flavour:'): | 877 | if line.startswith("XB-Ubuntu-OEM-Kernel-Flavour:"): |
1702 | 708 | kernel_flavour = line[len('XB-Ubuntu-OEM-Kernel-Flavour:'):].strip() | 878 | kernel_flavour = line[ |
1703 | 709 | elif line.startswith('Depends:'): | 879 | len("XB-Ubuntu-OEM-Kernel-Flavour:") : |
1704 | 710 | if 'linux-oem-20.04b' in line: | 880 | ].strip() |
1705 | 711 | kernel_meta = 'linux-oem-20.04b' | 881 | elif line.startswith("Depends:"): |
1706 | 712 | elif 'linux-oem-20.04' in line: | 882 | if "linux-oem-20.04b" in line: |
1707 | 713 | kernel_meta = 'linux-oem-20.04' | 883 | kernel_meta = "linux-oem-20.04b" |
1708 | 714 | elif 'linux-generic-hwe-20.04' in line: | 884 | elif "linux-oem-20.04" in line: |
1709 | 715 | kernel_meta = 'linux-generic-hwe-20.04' | 885 | kernel_meta = "linux-oem-20.04" |
1710 | 886 | elif "linux-generic-hwe-20.04" in line: | ||
1711 | 887 | kernel_meta = "linux-generic-hwe-20.04" | ||
1712 | 716 | return kernel_flavour, kernel_meta, ids | 888 | return kernel_flavour, kernel_meta, ids |
1713 | 717 | 889 | ||
1714 | 718 | 890 | ||
1716 | 719 | def check_and_update_git_repo(bug, pkg_name: str, yes=False, bootstrap=True, update=False, username=None) -> bool: | 891 | def check_and_update_git_repo( |
1717 | 892 | bug, pkg_name: str, yes=False, bootstrap=True, update=False, username=None | ||
1718 | 893 | ) -> bool: | ||
1719 | 720 | if update: | 894 | if update: |
1720 | 721 | if bootstrap: | 895 | if bootstrap: |
1721 | 722 | info("Checking and updating git repo for bootstrap branch...") | 896 | info("Checking and updating git repo for bootstrap branch...") |
1722 | @@ -734,17 +908,17 @@ def check_and_update_git_repo(bug, pkg_name: str, yes=False, bootstrap=True, upd | |||
1723 | 734 | critical(f"{pkg_name} failed.") | 908 | critical(f"{pkg_name} failed.") |
1724 | 735 | exit(1) | 909 | exit(1) |
1725 | 736 | 910 | ||
1728 | 737 | if '.' in result.group(1): | 911 | if "." in result.group(1): |
1729 | 738 | project, group = result.group(1).split('.') | 912 | project, group = result.group(1).split(".") |
1730 | 739 | else: | 913 | else: |
1731 | 740 | project = result.group(1) | 914 | project = result.group(1) |
1732 | 741 | group = None | 915 | group = None |
1733 | 742 | platform = result.group(2) | 916 | platform = result.group(2) |
1734 | 743 | 917 | ||
1735 | 744 | if bootstrap: | 918 | if bootstrap: |
1737 | 745 | suffix = 'ubuntu' | 919 | suffix = "ubuntu" |
1738 | 746 | else: | 920 | else: |
1740 | 747 | suffix = 'oem' | 921 | suffix = "oem" |
1741 | 748 | 922 | ||
1742 | 749 | if group: | 923 | if group: |
1743 | 750 | branch = f"{group}.{platform}-focal-{suffix}" | 924 | branch = f"{group}.{platform}-focal-{suffix}" |
1744 | @@ -752,23 +926,27 @@ def check_and_update_git_repo(bug, pkg_name: str, yes=False, bootstrap=True, upd | |||
1745 | 752 | branch = f"{platform}-focal-{suffix}" | 926 | branch = f"{platform}-focal-{suffix}" |
1746 | 753 | 927 | ||
1747 | 754 | kernel_flavour, kernel_meta, ids = _get_items_from_git(project, branch, pkg_name) | 928 | kernel_flavour, kernel_meta, ids = _get_items_from_git(project, branch, pkg_name) |
1749 | 755 | if kernel_flavour == 'default': | 929 | if kernel_flavour == "default": |
1750 | 756 | if kernel_meta is None: | 930 | if kernel_meta is None: |
1753 | 757 | kernel_meta = 'linux-generic-hwe-20.04' | 931 | kernel_meta = "linux-generic-hwe-20.04" |
1754 | 758 | elif kernel_meta == 'linux-generic-hwe-20.04': | 932 | elif kernel_meta == "linux-generic-hwe-20.04": |
1755 | 759 | pass | 933 | pass |
1756 | 760 | else: | 934 | else: |
1758 | 761 | critical(f"{pkg_name}'s {branch} branch has unexpected kernel_flavour=${kernel_flavour}, kernel_meta={kernel_meta}.") | 935 | critical( |
1759 | 936 | f"{pkg_name}'s {branch} branch has unexpected kernel_flavour=${kernel_flavour}, kernel_meta={kernel_meta}." | ||
1760 | 937 | ) | ||
1761 | 762 | exit(1) | 938 | exit(1) |
1763 | 763 | elif kernel_flavour == 'oem': | 939 | elif kernel_flavour == "oem": |
1764 | 764 | if kernel_meta is None: | 940 | if kernel_meta is None: |
1767 | 765 | kernel_meta = 'linux-oem-20.04' | 941 | kernel_meta = "linux-oem-20.04" |
1768 | 766 | elif kernel_meta == 'linux-oem-20.04b': | 942 | elif kernel_meta == "linux-oem-20.04b": |
1769 | 767 | pass | 943 | pass |
1771 | 768 | elif kernel_meta == 'linux-oem-20.04': | 944 | elif kernel_meta == "linux-oem-20.04": |
1772 | 769 | pass | 945 | pass |
1773 | 770 | else: | 946 | else: |
1775 | 771 | critical(f"{pkg_name}'s {branch} branch has unexpected kernel_flavour=${kernel_flavour}, kernel_meta={kernel_meta}.") | 947 | critical( |
1776 | 948 | f"{pkg_name}'s {branch} branch has unexpected kernel_flavour=${kernel_flavour}, kernel_meta={kernel_meta}." | ||
1777 | 949 | ) | ||
1778 | 772 | exit(1) | 950 | exit(1) |
1779 | 773 | 951 | ||
1780 | 774 | if ids == []: | 952 | if ids == []: |
1781 | @@ -778,27 +956,61 @@ def check_and_update_git_repo(bug, pkg_name: str, yes=False, bootstrap=True, upd | |||
1782 | 778 | with TemporaryDirectory() as tmpdir: | 956 | with TemporaryDirectory() as tmpdir: |
1783 | 779 | os.chdir(tmpdir) | 957 | os.chdir(tmpdir) |
1784 | 780 | # Generated the meta package by pkg-oem-meta | 958 | # Generated the meta package by pkg-oem-meta |
1787 | 781 | if project == 'somerville': | 959 | if project == "somerville": |
1788 | 782 | command = ['pkg-somerville-meta', '-s', 'focal', '-k', kernel_meta, '-p', platform, '--public-bug', str(bug.id)] | 960 | command = [ |
1789 | 961 | "pkg-somerville-meta", | ||
1790 | 962 | "-s", | ||
1791 | 963 | "focal", | ||
1792 | 964 | "-k", | ||
1793 | 965 | kernel_meta, | ||
1794 | 966 | "-p", | ||
1795 | 967 | platform, | ||
1796 | 968 | "--public-bug", | ||
1797 | 969 | str(bug.id), | ||
1798 | 970 | ] | ||
1799 | 783 | command.extend(ids) | 971 | command.extend(ids) |
1802 | 784 | elif project == 'stella': | 972 | elif project == "stella": |
1803 | 785 | command = ['pkg-stella-meta', '-s', 'focal', '-k', kernel_meta, '-g', group, '-p', platform, '--public-bug', str(bug.id)] | 973 | command = [ |
1804 | 974 | "pkg-stella-meta", | ||
1805 | 975 | "-s", | ||
1806 | 976 | "focal", | ||
1807 | 977 | "-k", | ||
1808 | 978 | kernel_meta, | ||
1809 | 979 | "-g", | ||
1810 | 980 | group, | ||
1811 | 981 | "-p", | ||
1812 | 982 | platform, | ||
1813 | 983 | "--public-bug", | ||
1814 | 984 | str(bug.id), | ||
1815 | 985 | ] | ||
1816 | 786 | command.extend(ids) | 986 | command.extend(ids) |
1819 | 787 | elif project == 'sutton': | 987 | elif project == "sutton": |
1820 | 788 | command = ['pkg-sutton-meta', '-s', 'focal', '-k', kernel_meta, '-g', group, '-p', platform, '--public-bug', str(bug.id)] | 988 | command = [ |
1821 | 989 | "pkg-sutton-meta", | ||
1822 | 990 | "-s", | ||
1823 | 991 | "focal", | ||
1824 | 992 | "-k", | ||
1825 | 993 | kernel_meta, | ||
1826 | 994 | "-g", | ||
1827 | 995 | group, | ||
1828 | 996 | "-p", | ||
1829 | 997 | platform, | ||
1830 | 998 | "--public-bug", | ||
1831 | 999 | str(bug.id), | ||
1832 | 1000 | ] | ||
1833 | 789 | for bvn, bvr in ids: | 1001 | for bvn, bvr in ids: |
1834 | 790 | command.append(f"bvn{bvn}:bvr{bvr}") | 1002 | command.append(f"bvn{bvn}:bvr{bvr}") |
1835 | 791 | _run_command(command) | 1003 | _run_command(command) |
1836 | 792 | new_dir = os.path.join(tmpdir, pkg_name) | 1004 | new_dir = os.path.join(tmpdir, pkg_name) |
1837 | 793 | os.chdir(new_dir) | 1005 | os.chdir(new_dir) |
1838 | 794 | if bootstrap: | 1006 | if bootstrap: |
1840 | 795 | _run_command(['git', 'checkout', branch]) | 1007 | _run_command(["git", "checkout", branch]) |
1841 | 796 | 1008 | ||
1842 | 797 | os.chdir(tmpdir) | 1009 | os.chdir(tmpdir) |
1844 | 798 | os.rename(new_dir, new_dir + '.new') | 1010 | os.rename(new_dir, new_dir + ".new") |
1845 | 799 | new_dir += ".new" | 1011 | new_dir += ".new" |
1848 | 800 | shutil.rmtree(os.path.join(new_dir, '.git')) | 1012 | shutil.rmtree(os.path.join(new_dir, ".git")) |
1849 | 801 | os.remove(os.path.join(new_dir, 'debian', 'changelog')) | 1013 | os.remove(os.path.join(new_dir, "debian", "changelog")) |
1850 | 802 | 1014 | ||
1851 | 803 | if username: | 1015 | if username: |
1852 | 804 | git_repo = f"git+ssh://{username}@git.launchpad.net/~oem-solutions-engineers/pc-enablement/+git/oem-{project}-projects-meta" | 1016 | git_repo = f"git+ssh://{username}@git.launchpad.net/~oem-solutions-engineers/pc-enablement/+git/oem-{project}-projects-meta" |
1853 | @@ -811,25 +1023,27 @@ def check_and_update_git_repo(bug, pkg_name: str, yes=False, bootstrap=True, upd | |||
1854 | 811 | 1023 | ||
1855 | 812 | if bootstrap: | 1024 | if bootstrap: |
1856 | 813 | lines = None | 1025 | lines = None |
1859 | 814 | changelog = os.path.join(tmpdir, pkg_name, 'debian', 'changelog') | 1026 | changelog = os.path.join(tmpdir, pkg_name, "debian", "changelog") |
1860 | 815 | with open(changelog, 'r') as f: | 1027 | with open(changelog, "r") as f: |
1861 | 816 | lines = f.readlines() | 1028 | lines = f.readlines() |
1862 | 817 | 1029 | ||
1863 | 818 | lines[0] = f"{pkg_name} (20.04~ubuntu1) UNRELEASED; urgency=medium\n" | 1030 | lines[0] = f"{pkg_name} (20.04~ubuntu1) UNRELEASED; urgency=medium\n" |
1864 | 819 | 1031 | ||
1866 | 820 | if f"(LP: #{bug.id})" not in lines[2] and lines[2].startswith(" * Meta package for"): | 1032 | if f"(LP: #{bug.id})" not in lines[2] and lines[2].startswith( |
1867 | 1033 | " * Meta package for" | ||
1868 | 1034 | ): | ||
1869 | 821 | lines[2] = " " + lines[2].strip() + f" (LP: #{bug.id})\n" | 1035 | lines[2] = " " + lines[2].strip() + f" (LP: #{bug.id})\n" |
1870 | 822 | 1036 | ||
1872 | 823 | with open(changelog, 'w') as f: | 1037 | with open(changelog, "w") as f: |
1873 | 824 | f.writelines(lines) | 1038 | f.writelines(lines) |
1874 | 825 | 1039 | ||
1875 | 826 | # Remove deprecated autopkgtest file | 1040 | # Remove deprecated autopkgtest file |
1877 | 827 | deprecated_test = os.path.join(tmpdir, pkg_name, 'debian', 'tests', pkg_name) | 1041 | deprecated_test = os.path.join(tmpdir, pkg_name, "debian", "tests", pkg_name) |
1878 | 828 | if os.path.exists(deprecated_test): | 1042 | if os.path.exists(deprecated_test): |
1879 | 829 | _run_command(["git", "rm", f"debian/tests/{pkg_name}"]) | 1043 | _run_command(["git", "rm", f"debian/tests/{pkg_name}"]) |
1880 | 830 | 1044 | ||
1881 | 831 | # Remove deprecated debian/compat | 1045 | # Remove deprecated debian/compat |
1883 | 832 | deprecated_compat = os.path.join(tmpdir, pkg_name, 'debian', 'compat') | 1046 | deprecated_compat = os.path.join(tmpdir, pkg_name, "debian", "compat") |
1884 | 833 | if os.path.exists(deprecated_compat): | 1047 | if os.path.exists(deprecated_compat): |
1885 | 834 | _run_command(["git", "rm", f"debian/compat"]) | 1048 | _run_command(["git", "rm", f"debian/compat"]) |
1886 | 835 | 1049 | ||
1887 | @@ -841,22 +1055,39 @@ def check_and_update_git_repo(bug, pkg_name: str, yes=False, bootstrap=True, upd | |||
1888 | 841 | 1055 | ||
1889 | 842 | _run_command(["git", "add", "."]) | 1056 | _run_command(["git", "add", "."]) |
1890 | 843 | out, _, _ = _run_command(["git", "diff", "--color=always", "--cached"]) | 1057 | out, _, _ = _run_command(["git", "diff", "--color=always", "--cached"]) |
1892 | 844 | if out != b'': | 1058 | if out != b"": |
1893 | 845 | warning("$ git diff") | 1059 | warning("$ git diff") |
1894 | 846 | print(out) | 1060 | print(out) |
1895 | 847 | if update is True: | 1061 | if update is True: |
1896 | 848 | if args.skip: | 1062 | if args.skip: |
1898 | 849 | warning(f"The update of the '{branch}' branch of {pkg_name}'s Git repository is skipped on demand.") | 1063 | warning( |
1899 | 1064 | f"The update of the '{branch}' branch of {pkg_name}'s Git repository is skipped on demand." | ||
1900 | 1065 | ) | ||
1901 | 850 | return False | 1066 | return False |
1905 | 851 | elif yes_or_ask(yes, f"Do you want to commit and push the changes above into the '{branch}' branch of {pkg_name}'s Git repository?"): | 1067 | elif yes_or_ask( |
1906 | 852 | _run_command(['git', 'commit', '-a', '-m', f"Updated by oem-scripts {oem_scripts.__version__:.2f}."]) | 1068 | yes, |
1907 | 853 | _run_command(['git', 'push']) | 1069 | f"Do you want to commit and push the changes above into the '{branch}' branch of {pkg_name}'s Git repository?", |
1908 | 1070 | ): | ||
1909 | 1071 | _run_command( | ||
1910 | 1072 | [ | ||
1911 | 1073 | "git", | ||
1912 | 1074 | "commit", | ||
1913 | 1075 | "-a", | ||
1914 | 1076 | "-m", | ||
1915 | 1077 | f"Updated by oem-scripts {oem_scripts.__version__:.2f}.", | ||
1916 | 1078 | ] | ||
1917 | 1079 | ) | ||
1918 | 1080 | _run_command(["git", "push"]) | ||
1919 | 854 | return True | 1081 | return True |
1920 | 855 | else: | 1082 | else: |
1921 | 856 | if bootstrap: | 1083 | if bootstrap: |
1923 | 857 | error(f"The '{branch}' branch of {pkg_name} in Git repository needs to update.") | 1084 | error( |
1924 | 1085 | f"The '{branch}' branch of {pkg_name} in Git repository needs to update." | ||
1925 | 1086 | ) | ||
1926 | 858 | else: | 1087 | else: |
1928 | 859 | warning(f"The '{branch}' branch of {pkg_name} in Git repository may need to update.") | 1088 | warning( |
1929 | 1089 | f"The '{branch}' branch of {pkg_name} in Git repository may need to update." | ||
1930 | 1090 | ) | ||
1931 | 860 | return False | 1091 | return False |
1932 | 861 | else: | 1092 | else: |
1933 | 862 | info(f"The '{branch}' branch of {pkg_name} in Git repository looks fine.") | 1093 | info(f"The '{branch}' branch of {pkg_name} in Git repository looks fine.") |
1934 | @@ -867,13 +1098,13 @@ if args.subcommand: | |||
1935 | 867 | login = LaunchpadLogin() | 1098 | login = LaunchpadLogin() |
1936 | 868 | lp = login.lp | 1099 | lp = login.lp |
1937 | 869 | 1100 | ||
1939 | 870 | if args.subcommand == 'create': | 1101 | if args.subcommand == "create": |
1940 | 871 | create_bug(lp, args.oemCodename, args.platformCodename, args.deviceName) | 1102 | create_bug(lp, args.oemCodename, args.platformCodename, args.deviceName) |
1942 | 872 | elif args.subcommand == 'update': | 1103 | elif args.subcommand == "update": |
1943 | 873 | update_bug(lp, args.bugNumber, args.yes) | 1104 | update_bug(lp, args.bugNumber, args.yes) |
1945 | 874 | elif args.subcommand == 'check': | 1105 | elif args.subcommand == "check": |
1946 | 875 | check_bug(lp, args.bugNumber) | 1106 | check_bug(lp, args.bugNumber) |
1948 | 876 | elif args.subcommand == 'collect': | 1107 | elif args.subcommand == "collect": |
1949 | 877 | collect_bugs(lp, args.json) | 1108 | collect_bugs(lp, args.json) |
1950 | 878 | else: | 1109 | else: |
1951 | 879 | parser.print_help() | 1110 | parser.print_help() |
1952 | diff --git a/oem-getiso b/oem-getiso | |||
1953 | index fc7723d..932e769 100755 | |||
1954 | --- a/oem-getiso | |||
1955 | +++ b/oem-getiso | |||
1956 | @@ -11,11 +11,10 @@ import sys | |||
1957 | 11 | import re | 11 | import re |
1958 | 12 | from optparse import OptionParser | 12 | from optparse import OptionParser |
1959 | 13 | 13 | ||
1961 | 14 | __DEBUG__ = (os.getenv('DEBUG') == 'true') | 14 | __DEBUG__ = os.getenv("DEBUG") == "true" |
1962 | 15 | 15 | ||
1963 | 16 | 16 | ||
1964 | 17 | class ImageURI(object): | 17 | class ImageURI(object): |
1965 | 18 | |||
1966 | 19 | def __init__(self, project, date, buildorder, image_type): | 18 | def __init__(self, project, date, buildorder, image_type): |
1967 | 20 | self.project = project | 19 | self.project = project |
1968 | 21 | self.date = date | 20 | self.date = date |
1969 | @@ -24,13 +23,12 @@ class ImageURI(object): | |||
1970 | 24 | self.image_type = image_type | 23 | self.image_type = image_type |
1971 | 25 | 24 | ||
1972 | 26 | # default value | 25 | # default value |
1974 | 27 | self.siteurl = 'https://oem-share.canonical.com' | 26 | self.siteurl = "https://oem-share.canonical.com" |
1975 | 28 | self.ispublished = False | 27 | self.ispublished = False |
1976 | 29 | 28 | ||
1977 | 30 | @property | 29 | @property |
1978 | 31 | def rootdir(self): | 30 | def rootdir(self): |
1981 | 32 | self._rootdir = self.ispublished and\ | 31 | self._rootdir = self.ispublished and "/partners/" or "/oem/cesg-builds/" |
1980 | 33 | '/partners/' or '/oem/cesg-builds/' | ||
1982 | 34 | return self._rootdir | 32 | return self._rootdir |
1983 | 35 | 33 | ||
1984 | 36 | @property | 34 | @property |
1985 | @@ -44,23 +42,31 @@ class ImageURI(object): | |||
1986 | 44 | @property | 42 | @property |
1987 | 45 | def imagepath(self): | 43 | def imagepath(self): |
1988 | 46 | if self.ispublished: | 44 | if self.ispublished: |
1992 | 47 | return os.path.join(self.project, 'images', | 45 | return os.path.join( |
1993 | 48 | '{}-{}'.format(self.date, self.buildorder), | 46 | self.project, |
1994 | 49 | self.image_type, self.isoname) | 47 | "images", |
1995 | 48 | "{}-{}".format(self.date, self.buildorder), | ||
1996 | 49 | self.image_type, | ||
1997 | 50 | self.isoname, | ||
1998 | 51 | ) | ||
1999 | 50 | else: | 52 | else: |
2002 | 51 | return os.path.join(self.project, self.date, self.buildorder, | 53 | return os.path.join( |
2003 | 52 | 'images', self.image_type, self.isoname) | 54 | self.project, |
2004 | 55 | self.date, | ||
2005 | 56 | self.buildorder, | ||
2006 | 57 | "images", | ||
2007 | 58 | self.image_type, | ||
2008 | 59 | self.isoname, | ||
2009 | 60 | ) | ||
2010 | 53 | 61 | ||
2011 | 54 | @property | 62 | @property |
2012 | 55 | def isoname(self): | 63 | def isoname(self): |
2013 | 56 | if self.manifest_ver: | 64 | if self.manifest_ver: |
2015 | 57 | project = self.project + '-' + self.manifest_ver | 65 | project = self.project + "-" + self.manifest_ver |
2016 | 58 | else: | 66 | else: |
2017 | 59 | project = self.project | 67 | project = self.project |
2022 | 60 | image_type = 'iso' \ | 68 | image_type = "iso" if re.match(r"^dell-bto", self.project) else self.image_type |
2023 | 61 | if re.match(r'^dell-bto', self.project) else self.image_type | 69 | return "{}-{}-{}-{}.iso".format(project, image_type, self.date, self.buildorder) |
2020 | 62 | return '{}-{}-{}-{}.iso'.format(project, image_type, | ||
2021 | 63 | self.date, self.buildorder) | ||
2024 | 64 | 70 | ||
2025 | 65 | @classmethod | 71 | @classmethod |
2026 | 66 | def from_url(cls, url): | 72 | def from_url(cls, url): |
2027 | @@ -88,33 +94,41 @@ class ImageURI(object): | |||
2028 | 88 | 'dell-bto-oneiric-pebble-beach-X05-iso-20111226-0.iso' | 94 | 'dell-bto-oneiric-pebble-beach-X05-iso-20111226-0.iso' |
2029 | 89 | >>> o = ImageURI.from_url('https://oem-share.canonical.com/partners/dell-bto-oneiric-audi-13-intel/images/20120316-3/iso/dell-bto-oneiric-audi-13-intel-A04-iso-20120316-3.iso') | 95 | >>> o = ImageURI.from_url('https://oem-share.canonical.com/partners/dell-bto-oneiric-audi-13-intel/images/20120316-3/iso/dell-bto-oneiric-audi-13-intel-A04-iso-20120316-3.iso') |
2030 | 90 | """ | 96 | """ |
2031 | 97 | |||
2032 | 91 | def _parse(url): | 98 | def _parse(url): |
2033 | 92 | from urllib.parse import urlparse | 99 | from urllib.parse import urlparse |
2036 | 93 | if not re.match(r'^https.*', url): | 100 | |
2037 | 94 | logging.error('{0} is not a valid image URL'.format(url)) | 101 | if not re.match(r"^https.*", url): |
2038 | 102 | logging.error("{0} is not a valid image URL".format(url)) | ||
2039 | 95 | raise IndexError | 103 | raise IndexError |
2042 | 96 | comps = urlparse(url).path.split('/') | 104 | comps = urlparse(url).path.split("/") |
2043 | 97 | published = (comps[1] == 'partners') | 105 | published = comps[1] == "partners" |
2044 | 98 | if published: | 106 | if published: |
2045 | 99 | (proj, image_type) = (comps[2], comps[5]) | 107 | (proj, image_type) = (comps[2], comps[5]) |
2047 | 100 | (date, buildorder) = comps[4].split('-') | 108 | (date, buildorder) = comps[4].split("-") |
2048 | 101 | else: | 109 | else: |
2052 | 102 | (proj, date, buildorder, image_type) = \ | 110 | (proj, date, buildorder, image_type) = ( |
2053 | 103 | (comps[3], comps[4], comps[5], comps[7]) | 111 | comps[3], |
2054 | 104 | if 'dell-bto' in proj: | 112 | comps[4], |
2055 | 113 | comps[5], | ||
2056 | 114 | comps[7], | ||
2057 | 115 | ) | ||
2058 | 116 | if "dell-bto" in proj: | ||
2059 | 105 | manifest_ver_index = published and 6 or 8 | 117 | manifest_ver_index = published and 6 or 8 |
2060 | 106 | try: | 118 | try: |
2062 | 107 | manifest_ver = comps[manifest_ver_index].split('-')[-4] | 119 | manifest_ver = comps[manifest_ver_index].split("-")[-4] |
2063 | 108 | except IndexError: | 120 | except IndexError: |
2066 | 109 | logging.error('published:{0} manifest_ver_index:{1}' | 121 | logging.error( |
2067 | 110 | .format(published, manifest_ver_index)) | 122 | "published:{0} manifest_ver_index:{1}".format( |
2068 | 123 | published, manifest_ver_index | ||
2069 | 124 | ) | ||
2070 | 125 | ) | ||
2071 | 111 | raise IndexError | 126 | raise IndexError |
2072 | 112 | else: | 127 | else: |
2073 | 113 | manifest_ver = None | 128 | manifest_ver = None |
2078 | 114 | return (proj, date, buildorder, published, | 129 | return (proj, date, buildorder, published, manifest_ver, image_type) |
2079 | 115 | manifest_ver, image_type) | 130 | |
2080 | 116 | (project, date, buildorder, published, manifest_ver, image_type) = \ | 131 | (project, date, buildorder, published, manifest_ver, image_type) = _parse(url) |
2077 | 117 | _parse(url) | ||
2081 | 118 | obj = cls(project, date, buildorder, image_type) | 132 | obj = cls(project, date, buildorder, image_type) |
2082 | 119 | obj.manifest_ver = manifest_ver | 133 | obj.manifest_ver = manifest_ver |
2083 | 120 | obj.ispublished = published | 134 | obj.ispublished = published |
2084 | @@ -141,25 +155,24 @@ class ImageURI(object): | |||
2085 | 141 | 155 | ||
2086 | 142 | def __testself(): | 156 | def __testself(): |
2087 | 143 | import doctest | 157 | import doctest |
2088 | 158 | |||
2089 | 144 | doctest.testmod() | 159 | doctest.testmod() |
2090 | 145 | 160 | ||
2091 | 146 | 161 | ||
2092 | 147 | class RsyncURI(object): | 162 | class RsyncURI(object): |
2093 | 148 | |||
2094 | 149 | def __init__(self, imguri, username=None, siteurl=None): | 163 | def __init__(self, imguri, username=None, siteurl=None): |
2095 | 150 | self.imguri = imguri | 164 | self.imguri = imguri |
2099 | 151 | self.siteurl = siteurl or 'oem-share.canonical.com' | 165 | self.siteurl = siteurl or "oem-share.canonical.com" |
2100 | 152 | self.username = username or os.getenv('USER') | 166 | self.username = username or os.getenv("USER") |
2101 | 153 | self.rootdir = '/srv/oem-share.canonical.com/www' | 167 | self.rootdir = "/srv/oem-share.canonical.com/www" |
2102 | 154 | 168 | ||
2103 | 155 | @property | 169 | @property |
2104 | 156 | def isolink(self): | 170 | def isolink(self): |
2106 | 157 | return '{}@{}:{}'.format(self.username, self.siteurl, self.imagepath) | 171 | return "{}@{}:{}".format(self.username, self.siteurl, self.imagepath) |
2107 | 158 | 172 | ||
2108 | 159 | @property | 173 | @property |
2109 | 160 | def imagepath(self): | 174 | def imagepath(self): |
2112 | 161 | return self.rootdir + os.path.join(self.imguri.rootdir, | 175 | return self.rootdir + os.path.join(self.imguri.rootdir, self.imguri.imagepath) |
2111 | 162 | self.imguri.imagepath) | ||
2113 | 163 | 176 | ||
2114 | 164 | @classmethod | 177 | @classmethod |
2115 | 165 | def from_imguri(cls, imguri): | 178 | def from_imguri(cls, imguri): |
2116 | @@ -176,7 +189,6 @@ class RsyncURI(object): | |||
2117 | 176 | 189 | ||
2118 | 177 | 190 | ||
2119 | 178 | class _Downloader(object): | 191 | class _Downloader(object): |
2120 | 179 | |||
2121 | 180 | def __init__(self, url): | 192 | def __init__(self, url): |
2122 | 181 | """ | 193 | """ |
2123 | 182 | >>> o = _Downloader('https://oem-share.canonical.com/partners/somerville-oneiric-amd64/images/20111116-1/iso/somerville-oneiric-amd64-iso-20111116-1.iso') | 194 | >>> o = _Downloader('https://oem-share.canonical.com/partners/somerville-oneiric-amd64/images/20111116-1/iso/somerville-oneiric-amd64-iso-20111116-1.iso') |
2124 | @@ -185,8 +197,7 @@ class _Downloader(object): | |||
2125 | 185 | self.imguri = ImageURI.from_url(url) | 197 | self.imguri = ImageURI.from_url(url) |
2126 | 186 | 198 | ||
2127 | 187 | def find_lastdownloadediso(self): | 199 | def find_lastdownloadediso(self): |
2130 | 188 | proj_isos = filter(lambda iso: self.imguri.project in iso, | 200 | proj_isos = filter(lambda iso: self.imguri.project in iso, glob.glob("*.iso")) |
2129 | 189 | glob.glob('*.iso')) | ||
2131 | 190 | try: | 201 | try: |
2132 | 191 | return sorted_isos(proj_isos)[-1] | 202 | return sorted_isos(proj_isos)[-1] |
2133 | 192 | except IndexError: | 203 | except IndexError: |
2134 | @@ -205,12 +216,12 @@ class _Downloader(object): | |||
2135 | 205 | 216 | ||
2136 | 206 | def run(self): | 217 | def run(self): |
2137 | 207 | self.lastiso = self.find_lastdownloadediso() | 218 | self.lastiso = self.find_lastdownloadediso() |
2139 | 208 | print('Found last downloaded file:{}'.format(self.lastiso)) | 219 | print("Found last downloaded file:{}".format(self.lastiso)) |
2140 | 209 | self.isolink = RsyncURI.from_imguri(self.imguri).isolink | 220 | self.isolink = RsyncURI.from_imguri(self.imguri).isolink |
2141 | 210 | # if os.path.exists(self.imguri.isoname): | 221 | # if os.path.exists(self.imguri.isoname): |
2142 | 211 | # sys.exit("Image {} already be downloaded.".format( | 222 | # sys.exit("Image {} already be downloaded.".format( |
2143 | 212 | # self.imguri.isoname)) | 223 | # self.imguri.isoname)) |
2145 | 213 | print('Starting to download file:{}'.format(self.imguri.isoname)) | 224 | print("Starting to download file:{}".format(self.imguri.isoname)) |
2146 | 214 | self.proc = self.do_download() | 225 | self.proc = self.do_download() |
2147 | 215 | ret = self.proc.wait() | 226 | ret = self.proc.wait() |
2148 | 216 | self.post_download() | 227 | self.post_download() |
2149 | @@ -222,10 +233,7 @@ class RsyncDownloader(_Downloader): | |||
2150 | 222 | 233 | ||
2151 | 223 | def do_download(self): | 234 | def do_download(self): |
2152 | 224 | self.filename = self.lastiso and self.lastiso or self.imguri.isoname | 235 | self.filename = self.lastiso and self.lastiso or self.imguri.isoname |
2157 | 225 | cmd = ['rsync', '-Pv', | 236 | cmd = ["rsync", "-Pv", self.isolink, self.filename] |
2154 | 226 | self.isolink, | ||
2155 | 227 | self.filename | ||
2156 | 228 | ] | ||
2158 | 229 | 237 | ||
2159 | 230 | return subprocess.Popen(cmd) | 238 | return subprocess.Popen(cmd) |
2160 | 231 | 239 | ||
2161 | @@ -235,24 +243,27 @@ class RsyncDownloader(_Downloader): | |||
2162 | 235 | 243 | ||
2163 | 236 | 244 | ||
2164 | 237 | class ZsyncDownloader(_Downloader): | 245 | class ZsyncDownloader(_Downloader): |
2165 | 238 | |||
2166 | 239 | def __init__(self, url, auth_token): | 246 | def __init__(self, url, auth_token): |
2167 | 240 | super(ZsyncDownloader, self).__init__(url) | 247 | super(ZsyncDownloader, self).__init__(url) |
2168 | 241 | self.auth_token = auth_token | 248 | self.auth_token = auth_token |
2169 | 242 | 249 | ||
2170 | 243 | def do_download(self): | 250 | def do_download(self): |
2175 | 244 | cmd = ['zsync_curl', | 251 | cmd = [ |
2176 | 245 | self.imguri.isolink + '.zsync', | 252 | "zsync_curl", |
2177 | 246 | '-c pysid=' + self.auth_token | 253 | self.imguri.isolink + ".zsync", |
2178 | 247 | ] | 254 | "-c pysid=" + self.auth_token, |
2179 | 255 | ] | ||
2180 | 248 | if self.lastiso: | 256 | if self.lastiso: |
2182 | 249 | cmd.append('-i ' + self.lastiso) | 257 | cmd.append("-i " + self.lastiso) |
2183 | 250 | 258 | ||
2184 | 251 | return subprocess.Popen(" ".join(cmd), shell=True) | 259 | return subprocess.Popen(" ".join(cmd), shell=True) |
2185 | 252 | 260 | ||
2186 | 253 | def post_download(self): | 261 | def post_download(self): |
2189 | 254 | if self.lastiso and self.lastiso != self.imguri.isoname and \ | 262 | if ( |
2190 | 255 | os.path.exists(self.lastiso): | 263 | self.lastiso |
2191 | 264 | and self.lastiso != self.imguri.isoname | ||
2192 | 265 | and os.path.exists(self.lastiso) | ||
2193 | 266 | ): | ||
2194 | 256 | os.remove(self.lastiso) | 267 | os.remove(self.lastiso) |
2195 | 257 | if os.path.exists(self.imguri.isoname + ".zs-old"): | 268 | if os.path.exists(self.imguri.isoname + ".zs-old"): |
2196 | 258 | os.remove(self.imguri.isoname + ".zs-old") | 269 | os.remove(self.imguri.isoname + ".zs-old") |
2197 | @@ -267,19 +278,21 @@ def sorted_isos(isos): | |||
2198 | 267 | >>> sorted_isos(isos) | 278 | >>> sorted_isos(isos) |
2199 | 268 | ['watauga2-precise-amd64-norecovery-iso-20130121-0.iso', 'watauga2-precise-amd64-norecovery-iso-20130121-1.iso'] | 279 | ['watauga2-precise-amd64-norecovery-iso-20130121-0.iso', 'watauga2-precise-amd64-norecovery-iso-20130121-1.iso'] |
2200 | 269 | """ | 280 | """ |
2201 | 281 | |||
2202 | 270 | def _f(e): | 282 | def _f(e): |
2208 | 271 | e = e.replace('.iso', '') | 283 | e = e.replace(".iso", "") |
2209 | 272 | if e.startswith('dell'): | 284 | if e.startswith("dell"): |
2210 | 273 | e = e.replace('dell-bto-oneiric-', '') | 285 | e = e.replace("dell-bto-oneiric-", "") |
2211 | 274 | comps = e.split('-iso-') | 286 | comps = e.split("-iso-") |
2212 | 275 | comps = comps[1].split('-') | 287 | comps = comps[1].split("-") |
2213 | 276 | (date, order) = (comps[0], comps[1]) | 288 | (date, order) = (comps[0], comps[1]) |
2214 | 277 | else: | 289 | else: |
2215 | 278 | # Date and build number are guaranteed to be the last 2 fields | 290 | # Date and build number are guaranteed to be the last 2 fields |
2216 | 279 | # in the name of an ISO produced by Offspring | 291 | # in the name of an ISO produced by Offspring |
2218 | 280 | comps = e.rsplit('-', 2) | 292 | comps = e.rsplit("-", 2) |
2219 | 281 | (date, order) = (comps[1], comps[2]) | 293 | (date, order) = (comps[1], comps[2]) |
2221 | 282 | return int('{}{}'.format(date, order)) | 294 | return int("{}{}".format(date, order)) |
2222 | 295 | |||
2223 | 283 | return sorted(isos, key=_f) | 296 | return sorted(isos, key=_f) |
2224 | 284 | 297 | ||
2225 | 285 | 298 | ||
2226 | @@ -289,7 +302,8 @@ def iso_of_bugdesc(desc): | |||
2227 | 289 | ('somerville-oneiric-amd64', '20111116', '1') | 302 | ('somerville-oneiric-amd64', '20111116', '1') |
2228 | 290 | """ | 303 | """ |
2229 | 291 | import re | 304 | import re |
2231 | 292 | ret = re.findall('Image:\\s+((\\S+)-(\\d+)-(\\d)).*', desc) | 305 | |
2232 | 306 | ret = re.findall("Image:\\s+((\\S+)-(\\d+)-(\\d)).*", desc) | ||
2233 | 293 | if ret: | 307 | if ret: |
2234 | 294 | return (ret[0][1], ret[0][2], ret[0][3]) | 308 | return (ret[0][1], ret[0][2], ret[0][3]) |
2235 | 295 | else: | 309 | else: |
2236 | @@ -302,9 +316,10 @@ def isourl_of_bug(q): | |||
2237 | 302 | 'https://oem-share.canonical.com/oem/cesg-builds/stella-anaheim-precise-amd64/20130116/0/images/iso/stella-anaheim-precise-amd64-iso-20130116-0.iso' | 316 | 'https://oem-share.canonical.com/oem/cesg-builds/stella-anaheim-precise-amd64/20130116/0/images/iso/stella-anaheim-precise-amd64-iso-20130116-0.iso' |
2238 | 303 | """ | 317 | """ |
2239 | 304 | from launchpadlib.launchpad import Launchpad | 318 | from launchpadlib.launchpad import Launchpad |
2240 | 319 | |||
2241 | 305 | cachedir = os.path.join(os.environ["HOME"], ".launchpadlib/cache") | 320 | cachedir = os.path.join(os.environ["HOME"], ".launchpadlib/cache") |
2242 | 306 | try: | 321 | try: |
2244 | 307 | lp = Launchpad.login_with("oem-getiso", 'production', cachedir) | 322 | lp = Launchpad.login_with("oem-getiso", "production", cachedir) |
2245 | 308 | found_iso = iso_of_bugdesc(lp.bugs[q].description) | 323 | found_iso = iso_of_bugdesc(lp.bugs[q].description) |
2246 | 309 | except KeyboardInterrupt: | 324 | except KeyboardInterrupt: |
2247 | 310 | print("Terminated by user reqeust!") | 325 | print("Terminated by user reqeust!") |
2248 | @@ -317,20 +332,29 @@ def isourl_of_bug(q): | |||
2249 | 317 | return img.isolink | 332 | return img.isolink |
2250 | 318 | 333 | ||
2251 | 319 | 334 | ||
2255 | 320 | if __name__ == '__main__': | 335 | if __name__ == "__main__": |
2256 | 321 | usage = "usage: %prog --method [rsync|zsync --auth_token " \ | 336 | usage = ( |
2257 | 322 | "<auth_token>] iso_url|bug_number" | 337 | "usage: %prog --method [rsync|zsync --auth_token " |
2258 | 338 | "<auth_token>] iso_url|bug_number" | ||
2259 | 339 | ) | ||
2260 | 323 | parser = OptionParser(usage=usage) | 340 | parser = OptionParser(usage=usage) |
2261 | 324 | parser.add_option( | 341 | parser.add_option( |
2264 | 325 | "--method", dest="method", default="rsync", metavar="METHOD", | 342 | "--method", |
2265 | 326 | help="The METHOD of download. Defaults to 'rsync'.") | 343 | dest="method", |
2266 | 344 | default="rsync", | ||
2267 | 345 | metavar="METHOD", | ||
2268 | 346 | help="The METHOD of download. Defaults to 'rsync'.", | ||
2269 | 347 | ) | ||
2270 | 327 | parser.add_option( | 348 | parser.add_option( |
2273 | 328 | "--auth_token", dest="auth_token", metavar="AUTH_TOKEN", | 349 | "--auth_token", |
2274 | 329 | help="The authetication token needed to access oem-share.") | 350 | dest="auth_token", |
2275 | 351 | metavar="AUTH_TOKEN", | ||
2276 | 352 | help="The authetication token needed to access oem-share.", | ||
2277 | 353 | ) | ||
2278 | 330 | (options, args) = parser.parse_args() | 354 | (options, args) = parser.parse_args() |
2279 | 331 | 355 | ||
2280 | 332 | if __DEBUG__: | 356 | if __DEBUG__: |
2282 | 333 | print('self testing...') | 357 | print("self testing...") |
2283 | 334 | __testself() | 358 | __testself() |
2284 | 335 | sys.exit() | 359 | sys.exit() |
2285 | 336 | 360 | ||
2286 | @@ -353,8 +377,7 @@ if __name__ == '__main__': | |||
2287 | 353 | if options.auth_token is None: | 377 | if options.auth_token is None: |
2288 | 354 | sys.exit(parser.get_usage()) | 378 | sys.exit(parser.get_usage()) |
2289 | 355 | if not os.path.exists("/usr/bin/zsync_curl"): | 379 | if not os.path.exists("/usr/bin/zsync_curl"): |
2292 | 356 | sys.exit( | 380 | sys.exit("The zync-curl package must be installed to use this method.") |
2291 | 357 | "The zync-curl package must be installed to use this method.") | ||
2293 | 358 | runner = ZsyncDownloader(url, options.auth_token) | 381 | runner = ZsyncDownloader(url, options.auth_token) |
2294 | 359 | else: | 382 | else: |
2295 | 360 | sys.exit(parser.get_usage()) | 383 | sys.exit(parser.get_usage()) |
2296 | diff --git a/oem-meta-packages b/oem-meta-packages | |||
2297 | index 91b07ef..c98dee8 100755 | |||
2298 | --- a/oem-meta-packages | |||
2299 | +++ b/oem-meta-packages | |||
2300 | @@ -40,7 +40,8 @@ from string import Template | |||
2301 | 40 | from tempfile import TemporaryDirectory | 40 | from tempfile import TemporaryDirectory |
2302 | 41 | 41 | ||
2303 | 42 | 42 | ||
2305 | 43 | staging_copy_template = Template(f'''Operation: copy_package | 43 | staging_copy_template = Template( |
2306 | 44 | f'''Operation: copy_package | ||
2307 | 44 | Source: $source | 45 | Source: $source |
2308 | 45 | Destination: $destination | 46 | Destination: $destination |
2309 | 46 | Package: $packages | 47 | Package: $packages |
2310 | @@ -60,7 +61,8 @@ And then verify the production archive. | |||
2311 | 60 | """ | 61 | """ |
2312 | 61 | deb http://oem.archive.canonical.com $distribution $component | 62 | deb http://oem.archive.canonical.com $distribution $component |
2313 | 62 | deb-src http://oem.archive.canonical.com $distribution $component | 63 | deb-src http://oem.archive.canonical.com $distribution $component |
2315 | 63 | """''') | 64 | """''' |
2316 | 65 | ) | ||
2317 | 64 | 66 | ||
2318 | 65 | 67 | ||
2319 | 66 | class DataJSONEncoder(json.JSONEncoder): | 68 | class DataJSONEncoder(json.JSONEncoder): |
2320 | @@ -90,10 +92,11 @@ class PkgInfo: | |||
2321 | 90 | proposed_version: str | 92 | proposed_version: str |
2322 | 91 | 93 | ||
2323 | 92 | 94 | ||
2328 | 93 | pattern = re.compile(r'oem-([^-]*)-(.*)-meta') | 95 | pattern = re.compile(r"oem-([^-]*)-(.*)-meta") |
2329 | 94 | staging_pattern = re.compile(r'.*>(.*)/</a>') | 96 | staging_pattern = re.compile(r".*>(.*)/</a>") |
2330 | 95 | parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter, | 97 | parser = argparse.ArgumentParser( |
2331 | 96 | epilog=""" | 98 | formatter_class=argparse.RawDescriptionHelpFormatter, |
2332 | 99 | epilog=""" | ||
2333 | 97 | You need to have the [oem-scripts] section in ~/.config/oem-scripts/config.ini. | 100 | You need to have the [oem-scripts] section in ~/.config/oem-scripts/config.ini. |
2334 | 98 | Executing `launchpad-api get people/+me` will generate it. | 101 | Executing `launchpad-api get people/+me` will generate it. |
2335 | 99 | 102 | ||
2336 | @@ -113,35 +116,38 @@ url = SomewhereToProvideTheInformationForUsernameAndPassword | |||
2337 | 113 | username = UserName | 116 | username = UserName |
2338 | 114 | password = PassWord | 117 | password = PassWord |
2339 | 115 | archive = https://cesg.canonical.com/canonical | 118 | archive = https://cesg.canonical.com/canonical |
2362 | 116 | fingerprint = 54F1860295829CE3""") | 119 | fingerprint = 54F1860295829CE3""", |
2363 | 117 | 120 | ) | |
2364 | 118 | 121 | ||
2365 | 119 | parser.add_argument("-d", "--debug", | 122 | |
2366 | 120 | help="print debug messages", action="store_true") | 123 | parser.add_argument("-d", "--debug", help="print debug messages", action="store_true") |
2367 | 121 | parser.add_argument("-q", "--quiet", | 124 | parser.add_argument( |
2368 | 122 | help="Don't print info messages", action="store_true") | 125 | "-q", "--quiet", help="Don't print info messages", action="store_true" |
2369 | 123 | parser.add_argument("-v", "--verbose", action="store_true", | 126 | ) |
2370 | 124 | help="print verbose messages") | 127 | parser.add_argument( |
2371 | 125 | parser.add_argument("--dry-run", | 128 | "-v", "--verbose", action="store_true", help="print verbose messages" |
2372 | 126 | help="Dry run the process.", action="store_true") | 129 | ) |
2373 | 127 | parser.add_argument("--yes", | 130 | parser.add_argument("--dry-run", help="Dry run the process.", action="store_true") |
2374 | 128 | help="Say yes for all prompts.", action="store_true") | 131 | parser.add_argument("--yes", help="Say yes for all prompts.", action="store_true") |
2375 | 129 | parser.add_argument("--skip", | 132 | parser.add_argument( |
2376 | 130 | help="Skip some projects or some meta packages.", type=str, default="") | 133 | "--skip", help="Skip some projects or some meta packages.", type=str, default="" |
2377 | 131 | parser.add_argument("--only", | 134 | ) |
2378 | 132 | help="Specified the meta package. Skip others.", type=str) | 135 | parser.add_argument("--only", help="Specified the meta package. Skip others.", type=str) |
2379 | 133 | parser.add_argument("--since", | 136 | parser.add_argument( |
2380 | 134 | help="Begin from the specified meta package. Skip previous meta packages.", type=str) | 137 | "--since", |
2381 | 135 | parser.add_argument("--apt-dir", | 138 | help="Begin from the specified meta package. Skip previous meta packages.", |
2382 | 136 | type=str, | 139 | type=str, |
2383 | 137 | help="specify the dir for apt") | 140 | ) |
2384 | 141 | parser.add_argument("--apt-dir", type=str, help="specify the dir for apt") | ||
2385 | 138 | 142 | ||
2386 | 139 | 143 | ||
2387 | 140 | subparsers = parser.add_subparsers(dest="subcommand") | 144 | subparsers = parser.add_subparsers(dest="subcommand") |
2388 | 141 | 145 | ||
2392 | 142 | collect = subparsers.add_parser('collect', help='[-h] --json platforms.json | --meta oem-qemu-meta -o|--output meta-info.json', | 146 | collect = subparsers.add_parser( |
2393 | 143 | formatter_class=argparse.RawDescriptionHelpFormatter, | 147 | "collect", |
2394 | 144 | epilog=""" | 148 | help="[-h] --json platforms.json | --meta oem-qemu-meta -o|--output meta-info.json", |
2395 | 149 | formatter_class=argparse.RawDescriptionHelpFormatter, | ||
2396 | 150 | epilog=""" | ||
2397 | 145 | For example, | 151 | For example, |
2398 | 146 | oem-meta-packages collect --json platforms.json --output meta-info.json | 152 | oem-meta-packages collect --json platforms.json --output meta-info.json |
2399 | 147 | 153 | ||
2400 | @@ -179,24 +185,37 @@ platforms.json: | |||
2401 | 179 | 185 | ||
2402 | 180 | oem-meta-packages collect --meta oem-qemu-meta --output meta-info.json | 186 | oem-meta-packages collect --meta oem-qemu-meta --output meta-info.json |
2403 | 181 | 187 | ||
2422 | 182 | Collect the information of OEM metapackages in PPAs and devel/staging archives.""") | 188 | Collect the information of OEM metapackages in PPAs and devel/staging archives.""", |
2423 | 183 | 189 | ) | |
2424 | 184 | collect.add_argument("--json", | 190 | |
2425 | 185 | help="Specify the json file to read the platforms information.", | 191 | collect.add_argument( |
2426 | 186 | type=argparse.FileType('r', encoding='UTF-8')) | 192 | "--json", |
2427 | 187 | collect.add_argument("--meta", | 193 | help="Specify the json file to read the platforms information.", |
2428 | 188 | help="Specify the meta package to collect the information.") | 194 | type=argparse.FileType("r", encoding="UTF-8"), |
2429 | 189 | collect.add_argument("-o", "--output", required=True, | 195 | ) |
2430 | 190 | help="Specify a filename to write the meta information.", | 196 | collect.add_argument( |
2431 | 191 | type=argparse.FileType('w', encoding='UTF-8')) | 197 | "--meta", help="Specify the meta package to collect the information." |
2432 | 192 | 198 | ) | |
2433 | 193 | list_pkg = subparsers.add_parser('list', help='[-h]', | 199 | collect.add_argument( |
2434 | 194 | formatter_class=argparse.RawDescriptionHelpFormatter, | 200 | "-o", |
2435 | 195 | epilog="List all OEM meta packages in Ubuntu archive.") | 201 | "--output", |
2436 | 196 | 202 | required=True, | |
2437 | 197 | subscribe = subparsers.add_parser('subscribe', help='[-h]', | 203 | help="Specify a filename to write the meta information.", |
2438 | 198 | formatter_class=argparse.RawDescriptionHelpFormatter, | 204 | type=argparse.FileType("w", encoding="UTF-8"), |
2439 | 199 | epilog=""" | 205 | ) |
2440 | 206 | |||
2441 | 207 | list_pkg = subparsers.add_parser( | ||
2442 | 208 | "list", | ||
2443 | 209 | help="[-h]", | ||
2444 | 210 | formatter_class=argparse.RawDescriptionHelpFormatter, | ||
2445 | 211 | epilog="List all OEM meta packages in Ubuntu archive.", | ||
2446 | 212 | ) | ||
2447 | 213 | |||
2448 | 214 | subscribe = subparsers.add_parser( | ||
2449 | 215 | "subscribe", | ||
2450 | 216 | help="[-h]", | ||
2451 | 217 | formatter_class=argparse.RawDescriptionHelpFormatter, | ||
2452 | 218 | epilog=""" | ||
2453 | 200 | Make all bugs of all oem meta packages be subscribed by oem-solutions-engineers. | 219 | Make all bugs of all oem meta packages be subscribed by oem-solutions-engineers. |
2454 | 201 | (search current apt source for package lists) | 220 | (search current apt source for package lists) |
2455 | 202 | 221 | ||
2456 | @@ -205,56 +224,81 @@ Make all bugs of all oem meta packages be subscribed by oem-solutions-engineers. | |||
2457 | 205 | (search current apt source for package lists) | 224 | (search current apt source for package lists) |
2458 | 206 | 225 | ||
2459 | 207 | Check "To all bugs in oem-qemu-meta in Ubuntu:" on https://launchpad.net/ubuntu/+source/oem-qemu-meta/+subscribe for example. | 226 | Check "To all bugs in oem-qemu-meta in Ubuntu:" on https://launchpad.net/ubuntu/+source/oem-qemu-meta/+subscribe for example. |
2465 | 208 | """) | 227 | """, |
2466 | 209 | 228 | ) | |
2467 | 210 | unsubscribe = subparsers.add_parser('unsubscribe', help='[-h] pkgName', | 229 | |
2468 | 211 | formatter_class=argparse.RawDescriptionHelpFormatter, | 230 | unsubscribe = subparsers.add_parser( |
2469 | 212 | epilog=""" | 231 | "unsubscribe", |
2470 | 232 | help="[-h] pkgName", | ||
2471 | 233 | formatter_class=argparse.RawDescriptionHelpFormatter, | ||
2472 | 234 | epilog=""" | ||
2473 | 213 | Unsubscribe oem-solutions-engineers from oem-qemu-meta | 235 | Unsubscribe oem-solutions-engineers from oem-qemu-meta |
2474 | 214 | 236 | ||
2475 | 215 | For example, | 237 | For example, |
2476 | 216 | oem-meta-packages unsubscribe oem-qemu-meta | 238 | oem-meta-packages unsubscribe oem-qemu-meta |
2484 | 217 | """) | 239 | """, |
2485 | 218 | unsubscribe.add_argument("pkgName", type=str, | 240 | ) |
2486 | 219 | help="Specify the package name to unsubscribe.") | 241 | unsubscribe.add_argument( |
2487 | 220 | 242 | "pkgName", type=str, help="Specify the package name to unsubscribe." | |
2488 | 221 | staging_copy = subparsers.add_parser('staging-copy', help='[-h] [--ignore-staging-lock] --json meta-info.json | --meta oem-qemu-meta', | 243 | ) |
2489 | 222 | formatter_class=argparse.RawDescriptionHelpFormatter, | 244 | |
2490 | 223 | epilog=""" | 245 | staging_copy = subparsers.add_parser( |
2491 | 246 | "staging-copy", | ||
2492 | 247 | help="[-h] [--ignore-staging-lock] --json meta-info.json | --meta oem-qemu-meta", | ||
2493 | 248 | formatter_class=argparse.RawDescriptionHelpFormatter, | ||
2494 | 249 | epilog=""" | ||
2495 | 224 | For example, | 250 | For example, |
2496 | 225 | oem-meta-packages --dry-run staging-copy --meta oem-qemu-meta | 251 | oem-meta-packages --dry-run staging-copy --meta oem-qemu-meta |
2497 | 226 | or | 252 | or |
2498 | 227 | oem-meta-packages --dry-run staging-copy --json meta-info.json (generated by the 'collect' subcommand.) | 253 | oem-meta-packages --dry-run staging-copy --json meta-info.json (generated by the 'collect' subcommand.) |
2499 | 228 | 254 | ||
2513 | 229 | Copy the meta package from the devel archive into the staging archive.""") | 255 | Copy the meta package from the devel archive into the staging archive.""", |
2514 | 230 | 256 | ) | |
2515 | 231 | staging_copy.add_argument("--json", | 257 | |
2516 | 232 | help="Specify the json file to read the meta information.", | 258 | staging_copy.add_argument( |
2517 | 233 | type=argparse.FileType('r', encoding='UTF-8')) | 259 | "--json", |
2518 | 234 | staging_copy.add_argument("--meta", | 260 | help="Specify the json file to read the meta information.", |
2519 | 235 | help="Specify the meta package to copy.") | 261 | type=argparse.FileType("r", encoding="UTF-8"), |
2520 | 236 | staging_copy.add_argument("--ignore-staging-lock", | 262 | ) |
2521 | 237 | help="Ignore the staging-lock tag.", action="store_true") | 263 | staging_copy.add_argument("--meta", help="Specify the meta package to copy.") |
2522 | 238 | 264 | staging_copy.add_argument( | |
2523 | 239 | update = subparsers.add_parser('update', help='[-h] [--autopkgtest] --json meta-info.json | --meta oem-qemu-meta', | 265 | "--ignore-staging-lock", help="Ignore the staging-lock tag.", action="store_true" |
2524 | 240 | formatter_class=argparse.RawDescriptionHelpFormatter, | 266 | ) |
2525 | 241 | epilog=""" | 267 | |
2526 | 268 | update = subparsers.add_parser( | ||
2527 | 269 | "update", | ||
2528 | 270 | help="[-h] [--autopkgtest] --json meta-info.json | --meta oem-qemu-meta", | ||
2529 | 271 | formatter_class=argparse.RawDescriptionHelpFormatter, | ||
2530 | 272 | epilog=""" | ||
2531 | 242 | For example, | 273 | For example, |
2532 | 243 | oem-meta-packages --dry-run update --meta oem-qemu-meta --kernel linux-oem-20.04 | 274 | oem-meta-packages --dry-run update --meta oem-qemu-meta --kernel linux-oem-20.04 |
2533 | 244 | or | 275 | or |
2534 | 245 | oem-meta-packages --dry-run update --json meta-info.json (generated by the 'collect' subcommand.) | 276 | oem-meta-packages --dry-run update --json meta-info.json (generated by the 'collect' subcommand.) |
2535 | 246 | 277 | ||
2547 | 247 | Update the market name and the kernel flavour of the OEM meta package to the default kernel flavour, i.e. linux-generic-hwe-20.04.""") | 278 | Update the market name and the kernel flavour of the OEM meta package to the default kernel flavour, i.e. linux-generic-hwe-20.04.""", |
2548 | 248 | update.add_argument("--autopkgtest", action="store_true", | 279 | ) |
2549 | 249 | help="Run autopkgtest when checking the git repository.") | 280 | update.add_argument( |
2550 | 250 | update.add_argument("--json", type=argparse.FileType('r', encoding='UTF-8'), | 281 | "--autopkgtest", |
2551 | 251 | help="Specify the json file to read the meta information.") | 282 | action="store_true", |
2552 | 252 | update.add_argument("--meta", type=str, | 283 | help="Run autopkgtest when checking the git repository.", |
2553 | 253 | help="Specify the meta package to update.") | 284 | ) |
2554 | 254 | update.add_argument("--kernel", type=str, default="linux-generic-hwe-20.04", | 285 | update.add_argument( |
2555 | 255 | help="Specify the kernel meta to update. linux-generic-hwe-20.04|linux-oem-20.04b|linux-oem-20.04") | 286 | "--json", |
2556 | 256 | update.add_argument("--factory", action="store_true", | 287 | type=argparse.FileType("r", encoding="UTF-8"), |
2557 | 257 | help="Make the factory meta to depend on the kernel meta directly.") | 288 | help="Specify the json file to read the meta information.", |
2558 | 289 | ) | ||
2559 | 290 | update.add_argument("--meta", type=str, help="Specify the meta package to update.") | ||
2560 | 291 | update.add_argument( | ||
2561 | 292 | "--kernel", | ||
2562 | 293 | type=str, | ||
2563 | 294 | default="linux-generic-hwe-20.04", | ||
2564 | 295 | help="Specify the kernel meta to update. linux-generic-hwe-20.04|linux-oem-20.04b|linux-oem-20.04", | ||
2565 | 296 | ) | ||
2566 | 297 | update.add_argument( | ||
2567 | 298 | "--factory", | ||
2568 | 299 | action="store_true", | ||
2569 | 300 | help="Make the factory meta to depend on the kernel meta directly.", | ||
2570 | 301 | ) | ||
2571 | 258 | 302 | ||
2572 | 259 | args = parser.parse_args() | 303 | args = parser.parse_args() |
2573 | 260 | 304 | ||
2574 | @@ -263,18 +307,20 @@ setup_logging(debug=args.debug, quiet=args.quiet) | |||
2575 | 263 | if args.subcommand: | 307 | if args.subcommand: |
2576 | 264 | login = LaunchpadLogin() | 308 | login = LaunchpadLogin() |
2577 | 265 | lp = login.lp | 309 | lp = login.lp |
2579 | 266 | oem_archive = lp.people['oem-archive'] | 310 | oem_archive = lp.people["oem-archive"] |
2580 | 267 | 311 | ||
2581 | 268 | if args.apt_dir: | 312 | if args.apt_dir: |
2582 | 269 | apt_pkg.init_config() | 313 | apt_pkg.init_config() |
2583 | 270 | if args.debug: | 314 | if args.debug: |
2584 | 271 | old = apt_pkg.config.dump() | 315 | old = apt_pkg.config.dump() |
2585 | 272 | apt_pkg.config.set("Dir", args.apt_dir) | 316 | apt_pkg.config.set("Dir", args.apt_dir) |
2587 | 273 | apt_pkg.config.set("Dir::State::status", os.path.join(args.apt_dir, "var/lib/dpkg/status")) | 317 | apt_pkg.config.set( |
2588 | 318 | "Dir::State::status", os.path.join(args.apt_dir, "var/lib/dpkg/status") | ||
2589 | 319 | ) | ||
2590 | 274 | if args.debug: | 320 | if args.debug: |
2591 | 275 | new = apt_pkg.config.dump() | 321 | new = apt_pkg.config.dump() |
2592 | 276 | d = difflib.Differ() | 322 | d = difflib.Differ() |
2594 | 277 | diff = d.compare(old.split('\n'), new.split('\n')) | 323 | diff = d.compare(old.split("\n"), new.split("\n")) |
2595 | 278 | for line in diff: | 324 | for line in diff: |
2596 | 279 | debug(line.strip()) | 325 | debug(line.strip()) |
2597 | 280 | apt_pkg.init_system() | 326 | apt_pkg.init_system() |
2598 | @@ -288,7 +334,7 @@ def yes_or_ask(yes: bool, message: str) -> bool: | |||
2599 | 288 | res = input(f"> \033[1;34m{message}\033[1;0m (y/n) ").lower() | 334 | res = input(f"> \033[1;34m{message}\033[1;0m (y/n) ").lower() |
2600 | 289 | if res not in {"y", "n"}: | 335 | if res not in {"y", "n"}: |
2601 | 290 | continue | 336 | continue |
2603 | 291 | if res == 'y': | 337 | if res == "y": |
2604 | 292 | return True | 338 | return True |
2605 | 293 | else: | 339 | else: |
2606 | 294 | return False | 340 | return False |
2607 | @@ -302,21 +348,25 @@ def _debug_obj(pkg) -> None: | |||
2608 | 302 | debug(dir(pkg)) | 348 | debug(dir(pkg)) |
2609 | 303 | 349 | ||
2610 | 304 | for attr in dir(pkg): | 350 | for attr in dir(pkg): |
2612 | 305 | if not attr.startswith('__'): | 351 | if not attr.startswith("__"): |
2613 | 306 | if not isinstance(pkg.__getattribute__(attr), types.BuiltinFunctionType): | 352 | if not isinstance(pkg.__getattribute__(attr), types.BuiltinFunctionType): |
2614 | 307 | debug(f"{attr}: {pkg.__getattribute__(attr)}") | 353 | debug(f"{attr}: {pkg.__getattribute__(attr)}") |
2615 | 308 | 354 | ||
2616 | 309 | 355 | ||
2618 | 310 | def _run_command(command: list or tuple, returncode=(0,), env=None, silent=False) -> (str, str, int): | 356 | def _run_command( |
2619 | 357 | command: list or tuple, returncode=(0,), env=None, silent=False | ||
2620 | 358 | ) -> (str, str, int): | ||
2621 | 311 | if not silent: | 359 | if not silent: |
2622 | 312 | debug("$ " + " ".join(command)) | 360 | debug("$ " + " ".join(command)) |
2624 | 313 | proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env) | 361 | proc = subprocess.Popen( |
2625 | 362 | command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env | ||
2626 | 363 | ) | ||
2627 | 314 | out, err = proc.communicate() | 364 | out, err = proc.communicate() |
2628 | 315 | 365 | ||
2629 | 316 | if out: | 366 | if out: |
2631 | 317 | out = out.decode('utf-8').strip() | 367 | out = out.decode("utf-8").strip() |
2632 | 318 | if err: | 368 | if err: |
2634 | 319 | err = err.decode('utf-8').strip() | 369 | err = err.decode("utf-8").strip() |
2635 | 320 | 370 | ||
2636 | 321 | if proc.returncode not in returncode: | 371 | if proc.returncode not in returncode: |
2637 | 322 | critical(f"return {proc.returncode}") | 372 | critical(f"return {proc.returncode}") |
2638 | @@ -339,7 +389,11 @@ def get_oem_meta_packages(cache) -> list: | |||
2639 | 339 | pkg_list = [] | 389 | pkg_list = [] |
2640 | 340 | for pkg in cache.packages: | 390 | for pkg in cache.packages: |
2641 | 341 | name = pkg.name | 391 | name = pkg.name |
2643 | 342 | if not name.startswith('oem-') or not name.endswith('-meta') or '-factory-' in name: | 392 | if ( |
2644 | 393 | not name.startswith("oem-") | ||
2645 | 394 | or not name.endswith("-meta") | ||
2646 | 395 | or "-factory-" in name | ||
2647 | 396 | ): | ||
2648 | 343 | continue | 397 | continue |
2649 | 344 | pkg_list.append(name) | 398 | pkg_list.append(name) |
2650 | 345 | return sorted(pkg_list) | 399 | return sorted(pkg_list) |
2651 | @@ -347,8 +401,8 @@ def get_oem_meta_packages(cache) -> list: | |||
2652 | 347 | 401 | ||
2653 | 348 | def _grouping_market_names(market_names: list, maxsplit=1) -> str: | 402 | def _grouping_market_names(market_names: list, maxsplit=1) -> str: |
2654 | 349 | # Remove empty item | 403 | # Remove empty item |
2657 | 350 | while '' in market_names: | 404 | while "" in market_names: |
2658 | 351 | market_names.remove('') | 405 | market_names.remove("") |
2659 | 352 | tmp = collections.defaultdict(list) | 406 | tmp = collections.defaultdict(list) |
2660 | 353 | space_in_model = False | 407 | space_in_model = False |
2661 | 354 | try: | 408 | try: |
2662 | @@ -356,28 +410,28 @@ def _grouping_market_names(market_names: list, maxsplit=1) -> str: | |||
2663 | 356 | if maxsplit == 1: | 410 | if maxsplit == 1: |
2664 | 357 | name, model = market_name.split(maxsplit=maxsplit) | 411 | name, model = market_name.split(maxsplit=maxsplit) |
2665 | 358 | tmp[name].append(model) | 412 | tmp[name].append(model) |
2667 | 359 | if ' ' in model: | 413 | if " " in model: |
2668 | 360 | space_in_model = True | 414 | space_in_model = True |
2669 | 361 | elif maxsplit == 2: | 415 | elif maxsplit == 2: |
2670 | 362 | brand, name, model = market_name.split(maxsplit=maxsplit) | 416 | brand, name, model = market_name.split(maxsplit=maxsplit) |
2673 | 363 | tmp[brand + ' ' + name].append(model) | 417 | tmp[brand + " " + name].append(model) |
2674 | 364 | if ' ' in model: | 418 | if " " in model: |
2675 | 365 | space_in_model = True | 419 | space_in_model = True |
2676 | 366 | except ValueError: | 420 | except ValueError: |
2678 | 367 | return ', '.join(sorted(market_names)) | 421 | return ", ".join(sorted(market_names)) |
2679 | 368 | 422 | ||
2680 | 369 | if space_in_model: | 423 | if space_in_model: |
2682 | 370 | return ', '.join(f"{name} {', '.join(models)}" for name, models in tmp.items()) | 424 | return ", ".join(f"{name} {', '.join(models)}" for name, models in tmp.items()) |
2683 | 371 | else: | 425 | else: |
2685 | 372 | return ', '.join(f"{name} {'/'.join(models)}" for name, models in tmp.items()) | 426 | return ", ".join(f"{name} {'/'.join(models)}" for name, models in tmp.items()) |
2686 | 373 | 427 | ||
2687 | 374 | 428 | ||
2688 | 375 | def deal_with_description(git_dir, old, new) -> bool: | 429 | def deal_with_description(git_dir, old, new) -> bool: |
2689 | 376 | if not old or not new: | 430 | if not old or not new: |
2690 | 377 | return False | 431 | return False |
2691 | 378 | os.chdir(git_dir) | 432 | os.chdir(git_dir) |
2694 | 379 | file_path = os.path.join(git_dir, 'debian', 'control') | 433 | file_path = os.path.join(git_dir, "debian", "control") |
2695 | 380 | with open(file_path, 'r') as control: | 434 | with open(file_path, "r") as control: |
2696 | 381 | lines = control.readlines() | 435 | lines = control.readlines() |
2697 | 382 | changed = False | 436 | changed = False |
2698 | 383 | for i, line in enumerate(lines): | 437 | for i, line in enumerate(lines): |
2699 | @@ -385,7 +439,7 @@ def deal_with_description(git_dir, old, new) -> bool: | |||
2700 | 385 | changed = True | 439 | changed = True |
2701 | 386 | lines[i] = line.replace(old, new) | 440 | lines[i] = line.replace(old, new) |
2702 | 387 | info(f'"{old}" will be replaced by "{new}".') | 441 | info(f'"{old}" will be replaced by "{new}".') |
2704 | 388 | with open(file_path, 'w') as control: | 442 | with open(file_path, "w") as control: |
2705 | 389 | control.writelines(lines) | 443 | control.writelines(lines) |
2706 | 390 | _run_command(["git", "add", "debian/control"]) | 444 | _run_command(["git", "add", "debian/control"]) |
2707 | 391 | return changed | 445 | return changed |
2708 | @@ -395,39 +449,41 @@ def deal_with_kernel_flavour(pkg_name, branch, git_dir) -> bool: | |||
2709 | 395 | os.chdir(git_dir) | 449 | os.chdir(git_dir) |
2710 | 396 | idx = -1 | 450 | idx = -1 |
2711 | 397 | kernel_flavour = None | 451 | kernel_flavour = None |
2714 | 398 | file_path = os.path.join(git_dir, 'debian', 'control') | 452 | file_path = os.path.join(git_dir, "debian", "control") |
2715 | 399 | with open(file_path, 'r') as control: | 453 | with open(file_path, "r") as control: |
2716 | 400 | lines = control.readlines() | 454 | lines = control.readlines() |
2717 | 401 | for i, line in enumerate(lines): | 455 | for i, line in enumerate(lines): |
2720 | 402 | if line.startswith('XB-Ubuntu-OEM-Kernel-Flavour:'): | 456 | if line.startswith("XB-Ubuntu-OEM-Kernel-Flavour:"): |
2721 | 403 | kernel_flavour = line[len('XB-Ubuntu-OEM-Kernel-Flavour:'):].strip() | 457 | kernel_flavour = line[len("XB-Ubuntu-OEM-Kernel-Flavour:") :].strip() |
2722 | 404 | idx = i | 458 | idx = i |
2723 | 405 | break | 459 | break |
2724 | 406 | 460 | ||
2725 | 407 | if not kernel_flavour: | 461 | if not kernel_flavour: |
2727 | 408 | critical(f"There is no XB-Ubuntu-OEM-Kernel-Flavour in debian/control of {branch} for {pkg_name}.") | 462 | critical( |
2728 | 463 | f"There is no XB-Ubuntu-OEM-Kernel-Flavour in debian/control of {branch} for {pkg_name}." | ||
2729 | 464 | ) | ||
2730 | 409 | exit(1) | 465 | exit(1) |
2731 | 410 | 466 | ||
2732 | 411 | debug(f"XB-Ubuntu-OEM-Kernel-Flavour: {kernel_flavour}") | 467 | debug(f"XB-Ubuntu-OEM-Kernel-Flavour: {kernel_flavour}") |
2733 | 412 | 468 | ||
2736 | 413 | if args.kernel == 'linux-generic-hwe-20.04': | 469 | if args.kernel == "linux-generic-hwe-20.04": |
2737 | 414 | if kernel_flavour == 'default': | 470 | if kernel_flavour == "default": |
2738 | 415 | return False | 471 | return False |
2742 | 416 | kernel_flavour = 'default' | 472 | kernel_flavour = "default" |
2743 | 417 | elif args.kernel == 'linux-oem-20.04': | 473 | elif args.kernel == "linux-oem-20.04": |
2744 | 418 | if kernel_flavour == 'oem': | 474 | if kernel_flavour == "oem": |
2745 | 419 | return False | 475 | return False |
2749 | 420 | kernel_flavour = 'oem' | 476 | kernel_flavour = "oem" |
2750 | 421 | elif args.kernel == 'linux-oem-20.04b': | 477 | elif args.kernel == "linux-oem-20.04b": |
2751 | 422 | if kernel_flavour == 'oem': | 478 | if kernel_flavour == "oem": |
2752 | 423 | return False | 479 | return False |
2754 | 424 | kernel_flavour = 'oem' | 480 | kernel_flavour = "oem" |
2755 | 425 | else: | 481 | else: |
2756 | 426 | print(f"{args.kernel} is not supported.") | 482 | print(f"{args.kernel} is not supported.") |
2757 | 427 | exit(1) | 483 | exit(1) |
2758 | 428 | 484 | ||
2759 | 429 | lines[idx] = f"XB-Ubuntu-OEM-Kernel-Flavour: {kernel_flavour}\n" | 485 | lines[idx] = f"XB-Ubuntu-OEM-Kernel-Flavour: {kernel_flavour}\n" |
2761 | 430 | with open(file_path, 'w') as control: | 486 | with open(file_path, "w") as control: |
2762 | 431 | control.writelines(lines) | 487 | control.writelines(lines) |
2763 | 432 | _run_command(["git", "add", "debian/control"]) | 488 | _run_command(["git", "add", "debian/control"]) |
2764 | 433 | return True | 489 | return True |
2765 | @@ -436,12 +492,12 @@ def deal_with_kernel_flavour(pkg_name, branch, git_dir) -> bool: | |||
2766 | 436 | def deal_with_kernel_depends(pkg_name, branch, git_dir) -> bool: | 492 | def deal_with_kernel_depends(pkg_name, branch, git_dir) -> bool: |
2767 | 437 | os.chdir(git_dir) | 493 | os.chdir(git_dir) |
2768 | 438 | idx = -1 | 494 | idx = -1 |
2770 | 439 | file_path = os.path.join(git_dir, 'debian', 'control') | 495 | file_path = os.path.join(git_dir, "debian", "control") |
2771 | 440 | changed = False | 496 | changed = False |
2773 | 441 | with open(file_path, 'r') as control: | 497 | with open(file_path, "r") as control: |
2774 | 442 | lines = control.readlines() | 498 | lines = control.readlines() |
2775 | 443 | for i, line in enumerate(lines): | 499 | for i, line in enumerate(lines): |
2777 | 444 | if line.startswith('Depends:'): | 500 | if line.startswith("Depends:"): |
2778 | 445 | idx = i | 501 | idx = i |
2779 | 446 | break | 502 | break |
2780 | 447 | 503 | ||
2781 | @@ -452,24 +508,42 @@ def deal_with_kernel_depends(pkg_name, branch, git_dir) -> bool: | |||
2782 | 452 | debug(lines[idx].strip()) | 508 | debug(lines[idx].strip()) |
2783 | 453 | 509 | ||
2784 | 454 | # this only works for updating auto-generated code | 510 | # this only works for updating auto-generated code |
2788 | 455 | if args.kernel == 'linux-generic-hwe-20.04' and ', linux-generic-hwe-20.04,' not in lines[idx]: | 511 | if ( |
2789 | 456 | lines[idx] = lines[idx].replace(', linux-oem-20.04,', ', linux-generic-hwe-20.04,') | 512 | args.kernel == "linux-generic-hwe-20.04" |
2790 | 457 | lines[idx] = lines[idx].replace(', linux-oem-20.04b | linux-oem-20.04,', ', linux-generic-hwe-20.04,') | 513 | and ", linux-generic-hwe-20.04," not in lines[idx] |
2791 | 514 | ): | ||
2792 | 515 | lines[idx] = lines[idx].replace( | ||
2793 | 516 | ", linux-oem-20.04,", ", linux-generic-hwe-20.04," | ||
2794 | 517 | ) | ||
2795 | 518 | lines[idx] = lines[idx].replace( | ||
2796 | 519 | ", linux-oem-20.04b | linux-oem-20.04,", ", linux-generic-hwe-20.04," | ||
2797 | 520 | ) | ||
2798 | 458 | changed = True | 521 | changed = True |
2802 | 459 | elif args.kernel == 'linux-oem-20.04' and ', linux-oem-20.04,' not in lines[idx]: | 522 | elif args.kernel == "linux-oem-20.04" and ", linux-oem-20.04," not in lines[idx]: |
2803 | 460 | lines[idx] = lines[idx].replace(', linux-generic-hwe-20.04,', ', linux-oem-20.04,') | 523 | lines[idx] = lines[idx].replace( |
2804 | 461 | lines[idx] = lines[idx].replace(', linux-oem-20.04b | linux-oem-20.04,', ', linux-oem-20.04,') | 524 | ", linux-generic-hwe-20.04,", ", linux-oem-20.04," |
2805 | 525 | ) | ||
2806 | 526 | lines[idx] = lines[idx].replace( | ||
2807 | 527 | ", linux-oem-20.04b | linux-oem-20.04,", ", linux-oem-20.04," | ||
2808 | 528 | ) | ||
2809 | 462 | changed = True | 529 | changed = True |
2813 | 463 | elif args.kernel == 'linux-oem-20.04b' and ', linux-oem-20.04b | linux-oem-20.04,' not in lines[idx]: | 530 | elif ( |
2814 | 464 | lines[idx] = lines[idx].replace(', linux-generic-hwe-20.04,', ', linux-oem-20.04b | linux-oem-20.04,') | 531 | args.kernel == "linux-oem-20.04b" |
2815 | 465 | lines[idx] = lines[idx].replace(', linux-oem-20.04,', ', linux-oem-20.04b | linux-oem-20.04,') | 532 | and ", linux-oem-20.04b | linux-oem-20.04," not in lines[idx] |
2816 | 533 | ): | ||
2817 | 534 | lines[idx] = lines[idx].replace( | ||
2818 | 535 | ", linux-generic-hwe-20.04,", ", linux-oem-20.04b | linux-oem-20.04," | ||
2819 | 536 | ) | ||
2820 | 537 | lines[idx] = lines[idx].replace( | ||
2821 | 538 | ", linux-oem-20.04,", ", linux-oem-20.04b | linux-oem-20.04," | ||
2822 | 539 | ) | ||
2823 | 466 | changed = True | 540 | changed = True |
2824 | 467 | 541 | ||
2825 | 468 | if args.factory: | 542 | if args.factory: |
2826 | 469 | factory_idx = -1 | 543 | factory_idx = -1 |
2827 | 470 | # Find the factory depends. | 544 | # Find the factory depends. |
2828 | 471 | for i, line in enumerate(lines): | 545 | for i, line in enumerate(lines): |
2830 | 472 | if i > idx and line.startswith('Depends:'): | 546 | if i > idx and line.startswith("Depends:"): |
2831 | 473 | factory_idx = i | 547 | factory_idx = i |
2832 | 474 | depends_line = lines[factory_idx].strip() | 548 | depends_line = lines[factory_idx].strip() |
2833 | 475 | break | 549 | break |
2834 | @@ -478,7 +552,11 @@ def deal_with_kernel_depends(pkg_name, branch, git_dir) -> bool: | |||
2835 | 478 | critical("It can not find factory 'Depends' in debian/control.") | 552 | critical("It can not find factory 'Depends' in debian/control.") |
2836 | 479 | exit(1) | 553 | exit(1) |
2837 | 480 | 554 | ||
2839 | 481 | for kernel in ('linux-generic-hwe-20.04', 'linux-oem-20.04', 'linux-oem-20.04b'): | 555 | for kernel in ( |
2840 | 556 | "linux-generic-hwe-20.04", | ||
2841 | 557 | "linux-oem-20.04", | ||
2842 | 558 | "linux-oem-20.04b", | ||
2843 | 559 | ): | ||
2844 | 482 | if depends_line.endswith(kernel) or f"{kernel}," in depends_line: | 560 | if depends_line.endswith(kernel) or f"{kernel}," in depends_line: |
2845 | 483 | if kernel != args.kernel: | 561 | if kernel != args.kernel: |
2846 | 484 | lines[factory_idx] = lines[factory_idx].replace(kernel, args.kernel) | 562 | lines[factory_idx] = lines[factory_idx].replace(kernel, args.kernel) |
2847 | @@ -488,7 +566,7 @@ def deal_with_kernel_depends(pkg_name, branch, git_dir) -> bool: | |||
2848 | 488 | lines[factory_idx] = depends_line + f", {args.kernel}\n" | 566 | lines[factory_idx] = depends_line + f", {args.kernel}\n" |
2849 | 489 | changed = True | 567 | changed = True |
2850 | 490 | 568 | ||
2852 | 491 | with open(file_path, 'w') as control: | 569 | with open(file_path, "w") as control: |
2853 | 492 | control.writelines(lines) | 570 | control.writelines(lines) |
2854 | 493 | 571 | ||
2855 | 494 | if changed: | 572 | if changed: |
2856 | @@ -497,12 +575,14 @@ def deal_with_kernel_depends(pkg_name, branch, git_dir) -> bool: | |||
2857 | 497 | return changed | 575 | return changed |
2858 | 498 | 576 | ||
2859 | 499 | 577 | ||
2861 | 500 | def deal_with_debian_tests(pkg_name: str, git_dir: str, branch: str, bootstrap: bool) -> bool: | 578 | def deal_with_debian_tests( |
2862 | 579 | pkg_name: str, git_dir: str, branch: str, bootstrap: bool | ||
2863 | 580 | ) -> bool: | ||
2864 | 501 | os.chdir(git_dir) | 581 | os.chdir(git_dir) |
2865 | 502 | 582 | ||
2866 | 503 | changed = False | 583 | changed = False |
2867 | 504 | 584 | ||
2869 | 505 | tests_folder = os.path.join(git_dir, 'debian', 'tests') | 585 | tests_folder = os.path.join(git_dir, "debian", "tests") |
2870 | 506 | if not os.path.exists(tests_folder): | 586 | if not os.path.exists(tests_folder): |
2871 | 507 | os.mkdir(tests_folder) | 587 | os.mkdir(tests_folder) |
2872 | 508 | 588 | ||
2873 | @@ -510,17 +590,17 @@ def deal_with_debian_tests(pkg_name: str, git_dir: str, branch: str, bootstrap: | |||
2874 | 510 | Depends: @ | 590 | Depends: @ |
2875 | 511 | Restrictions: needs-root | 591 | Restrictions: needs-root |
2876 | 512 | """ | 592 | """ |
2878 | 513 | control = os.path.join(git_dir, 'debian', 'tests', 'control') | 593 | control = os.path.join(git_dir, "debian", "tests", "control") |
2879 | 514 | 594 | ||
2880 | 515 | if os.path.exists(control): | 595 | if os.path.exists(control): |
2882 | 516 | with open(control, 'r') as f: | 596 | with open(control, "r") as f: |
2883 | 517 | if f.read() != control_content: | 597 | if f.read() != control_content: |
2885 | 518 | with open(control, 'w') as fp: | 598 | with open(control, "w") as fp: |
2886 | 519 | fp.write(control_content) | 599 | fp.write(control_content) |
2887 | 520 | _run_command(["git", "add", "debian/tests/control"]) | 600 | _run_command(["git", "add", "debian/tests/control"]) |
2888 | 521 | changed = True | 601 | changed = True |
2889 | 522 | else: | 602 | else: |
2891 | 523 | with open(control, 'w') as fp: | 603 | with open(control, "w") as fp: |
2892 | 524 | fp.write(control_content) | 604 | fp.write(control_content) |
2893 | 525 | _run_command(["git", "add", "debian/tests/control"]) | 605 | _run_command(["git", "add", "debian/tests/control"]) |
2894 | 526 | changed = True | 606 | changed = True |
2895 | @@ -538,29 +618,37 @@ apt-get update | |||
2896 | 538 | apt-get full-upgrade --yes | 618 | apt-get full-upgrade --yes |
2897 | 539 | """ | 619 | """ |
2898 | 540 | if not bootstrap: | 620 | if not bootstrap: |
2901 | 541 | if 'oem' in args.kernel: | 621 | if "oem" in args.kernel: |
2902 | 542 | grub_flavour = 'oem' | 622 | grub_flavour = "oem" |
2903 | 543 | else: | 623 | else: |
2907 | 544 | grub_flavour = 'generic' | 624 | grub_flavour = "generic" |
2908 | 545 | meta_content += '\ngrep ^GRUB_FLAVOUR_ORDER=' + grub_flavour + '$ /etc/default/grub.d/oem-flavour.cfg\n' | 625 | meta_content += ( |
2909 | 546 | meta_content += '\ndpkg-query -W -f=\'${Status}\' ' + args.kernel + ' | grep "install ok installed"\n' | 626 | "\ngrep ^GRUB_FLAVOUR_ORDER=" |
2910 | 627 | + grub_flavour | ||
2911 | 628 | + "$ /etc/default/grub.d/oem-flavour.cfg\n" | ||
2912 | 629 | ) | ||
2913 | 630 | meta_content += ( | ||
2914 | 631 | "\ndpkg-query -W -f='${Status}' " | ||
2915 | 632 | + args.kernel | ||
2916 | 633 | + ' | grep "install ok installed"\n' | ||
2917 | 634 | ) | ||
2918 | 547 | meta_content += f"\napt-get autoremove --purge --yes {pkg_name}\n" | 635 | meta_content += f"\napt-get autoremove --purge --yes {pkg_name}\n" |
2921 | 548 | meta = os.path.join(git_dir, 'debian', 'tests', 'meta') | 636 | meta = os.path.join(git_dir, "debian", "tests", "meta") |
2922 | 549 | old_meta = os.path.join(git_dir, 'debian', 'tests', pkg_name) | 637 | old_meta = os.path.join(git_dir, "debian", "tests", pkg_name) |
2923 | 550 | 638 | ||
2924 | 551 | if os.path.exists(old_meta): | 639 | if os.path.exists(old_meta): |
2925 | 552 | _run_command(["git", "rm", "-f", f"debian/tests/{pkg_name}"]) | 640 | _run_command(["git", "rm", "-f", f"debian/tests/{pkg_name}"]) |
2926 | 553 | changed = True | 641 | changed = True |
2927 | 554 | 642 | ||
2928 | 555 | if os.path.exists(meta): | 643 | if os.path.exists(meta): |
2930 | 556 | with open(meta, 'r') as f: | 644 | with open(meta, "r") as f: |
2931 | 557 | if f.read() != meta_content: | 645 | if f.read() != meta_content: |
2933 | 558 | with open(meta, 'w') as fp: | 646 | with open(meta, "w") as fp: |
2934 | 559 | fp.write(meta_content) | 647 | fp.write(meta_content) |
2935 | 560 | _run_command(["git", "add", "debian/tests/meta"]) | 648 | _run_command(["git", "add", "debian/tests/meta"]) |
2936 | 561 | changed = True | 649 | changed = True |
2937 | 562 | else: | 650 | else: |
2939 | 563 | with open(meta, 'w') as fp: | 651 | with open(meta, "w") as fp: |
2940 | 564 | fp.write(meta_content) | 652 | fp.write(meta_content) |
2941 | 565 | _run_command(["git", "add", "debian/tests/meta"]) | 653 | _run_command(["git", "add", "debian/tests/meta"]) |
2942 | 566 | changed = True | 654 | changed = True |
2943 | @@ -570,29 +658,30 @@ apt-get full-upgrade --yes | |||
2944 | 570 | 658 | ||
2945 | 571 | def deal_with_gbp_conf(git_dir, branch) -> bool: | 659 | def deal_with_gbp_conf(git_dir, branch) -> bool: |
2946 | 572 | os.chdir(git_dir) | 660 | os.chdir(git_dir) |
2948 | 573 | file_path = os.path.join(git_dir, 'debian', 'gbp.conf') | 661 | file_path = os.path.join(git_dir, "debian", "gbp.conf") |
2949 | 574 | gbp_conf = f"""[DEFAULT] | 662 | gbp_conf = f"""[DEFAULT] |
2950 | 575 | pristine-tar = False | 663 | pristine-tar = False |
2951 | 576 | debian-branch = {branch} | 664 | debian-branch = {branch} |
2952 | 577 | debian-tag = {branch}_%(version)s | 665 | debian-tag = {branch}_%(version)s |
2953 | 578 | """ | 666 | """ |
2954 | 579 | if os.path.exists(file_path): | 667 | if os.path.exists(file_path): |
2956 | 580 | with open(file_path, 'r') as f: | 668 | with open(file_path, "r") as f: |
2957 | 581 | if f.read() == gbp_conf: | 669 | if f.read() == gbp_conf: |
2958 | 582 | return False | 670 | return False |
2960 | 583 | with open(file_path, 'w') as f: | 671 | with open(file_path, "w") as f: |
2961 | 584 | f.write(gbp_conf) | 672 | f.write(gbp_conf) |
2962 | 585 | _run_command(["git", "add", "debian/gbp.conf"]) | 673 | _run_command(["git", "add", "debian/gbp.conf"]) |
2963 | 586 | return True | 674 | return True |
2964 | 587 | 675 | ||
2965 | 588 | 676 | ||
2966 | 589 | def deal_with_maintainer_scripts(pkg_name, branch, git_dir) -> bool: | 677 | def deal_with_maintainer_scripts(pkg_name, branch, git_dir) -> bool: |
2969 | 590 | postinst_path = os.path.join(git_dir, 'debian', 'postinst') | 678 | postinst_path = os.path.join(git_dir, "debian", "postinst") |
2970 | 591 | postrm_path = os.path.join(git_dir, 'debian', 'postrm') | 679 | postrm_path = os.path.join(git_dir, "debian", "postrm") |
2971 | 592 | modified = False | 680 | modified = False |
2972 | 593 | 681 | ||
2975 | 594 | with open(postinst_path, 'w') as f: | 682 | with open(postinst_path, "w") as f: |
2976 | 595 | f.write(f'''#!/bin/sh | 683 | f.write( |
2977 | 684 | f"""#!/bin/sh | ||
2978 | 596 | 685 | ||
2979 | 597 | set -e | 686 | set -e |
2980 | 598 | 687 | ||
2981 | @@ -609,14 +698,16 @@ case "$1" in | |||
2982 | 609 | esac | 698 | esac |
2983 | 610 | 699 | ||
2984 | 611 | #DEBHELPER# | 700 | #DEBHELPER# |
2986 | 612 | ''') | 701 | """ |
2987 | 702 | ) | ||
2988 | 613 | _run_command(["git", "add", "debian/postinst"]) | 703 | _run_command(["git", "add", "debian/postinst"]) |
2989 | 614 | output, _, _ = _run_command(["git", "status", "--porcelain", "debian/postinst"]) | 704 | output, _, _ = _run_command(["git", "status", "--porcelain", "debian/postinst"]) |
2990 | 615 | if output: | 705 | if output: |
2991 | 616 | modified = True | 706 | modified = True |
2992 | 617 | 707 | ||
2995 | 618 | with open(postrm_path, 'w') as f: | 708 | with open(postrm_path, "w") as f: |
2996 | 619 | f.write('''#!/bin/sh | 709 | f.write( |
2997 | 710 | """#!/bin/sh | ||
2998 | 620 | 711 | ||
2999 | 621 | set -e | 712 | set -e |
3000 | 622 | 713 | ||
3001 | @@ -632,7 +723,8 @@ case "$1" in | |||
3002 | 632 | esac | 723 | esac |
3003 | 633 | 724 | ||
3004 | 634 | #DEBHELPER# | 725 | #DEBHELPER# |
3006 | 635 | ''') | 726 | """ |
3007 | 727 | ) | ||
3008 | 636 | _run_command(["git", "add", "debian/postrm"]) | 728 | _run_command(["git", "add", "debian/postrm"]) |
3009 | 637 | output, _, _ = _run_command(["git", "status", "--porcelain", "debian/postrm"]) | 729 | output, _, _ = _run_command(["git", "status", "--porcelain", "debian/postrm"]) |
3010 | 638 | if output: | 730 | if output: |
3011 | @@ -644,39 +736,41 @@ esac | |||
3012 | 644 | def deal_with_grub_flavour(pkg_name, branch, git_dir) -> bool: | 736 | def deal_with_grub_flavour(pkg_name, branch, git_dir) -> bool: |
3013 | 645 | os.chdir(git_dir) | 737 | os.chdir(git_dir) |
3014 | 646 | grub_flavour = None | 738 | grub_flavour = None |
3016 | 647 | file_path = os.path.join(git_dir, 'oem-flavour.cfg') | 739 | file_path = os.path.join(git_dir, "oem-flavour.cfg") |
3017 | 648 | if os.path.exists(file_path): | 740 | if os.path.exists(file_path): |
3019 | 649 | with open(file_path, 'r') as oem_flavour: | 741 | with open(file_path, "r") as oem_flavour: |
3020 | 650 | for line in oem_flavour: | 742 | for line in oem_flavour: |
3023 | 651 | if line.startswith('GRUB_FLAVOUR_ORDER='): | 743 | if line.startswith("GRUB_FLAVOUR_ORDER="): |
3024 | 652 | grub_flavour = line[len('GRUB_FLAVOUR_ORDER='):].strip() | 744 | grub_flavour = line[len("GRUB_FLAVOUR_ORDER=") :].strip() |
3025 | 653 | break | 745 | break |
3026 | 654 | 746 | ||
3029 | 655 | if args.kernel == 'linux-generic-hwe-20.04': | 747 | if args.kernel == "linux-generic-hwe-20.04": |
3030 | 656 | if grub_flavour == 'generic': | 748 | if grub_flavour == "generic": |
3031 | 657 | return False | 749 | return False |
3035 | 658 | grub_flavour = 'generic' | 750 | grub_flavour = "generic" |
3036 | 659 | elif args.kernel == 'linux-oem-20.04': | 751 | elif args.kernel == "linux-oem-20.04": |
3037 | 660 | if grub_flavour == 'oem': | 752 | if grub_flavour == "oem": |
3038 | 661 | return False | 753 | return False |
3042 | 662 | grub_flavour = 'oem' | 754 | grub_flavour = "oem" |
3043 | 663 | elif args.kernel == 'linux-oem-20.04b': | 755 | elif args.kernel == "linux-oem-20.04b": |
3044 | 664 | if grub_flavour == 'oem': | 756 | if grub_flavour == "oem": |
3045 | 665 | return False | 757 | return False |
3047 | 666 | grub_flavour = 'oem' | 758 | grub_flavour = "oem" |
3048 | 667 | else: | 759 | else: |
3049 | 668 | print(f"{args.kernel} is not supported.") | 760 | print(f"{args.kernel} is not supported.") |
3050 | 669 | exit(1) | 761 | exit(1) |
3051 | 670 | 762 | ||
3052 | 671 | if not os.path.exists(file_path): | 763 | if not os.path.exists(file_path): |
3054 | 672 | with open(os.path.join(git_dir, 'debian', 'install'), 'a') as f: | 764 | with open(os.path.join(git_dir, "debian", "install"), "a") as f: |
3055 | 673 | f.write(f"oem-flavour.cfg /usr/share/{pkg_name}/\n") | 765 | f.write(f"oem-flavour.cfg /usr/share/{pkg_name}/\n") |
3056 | 674 | _run_command(["git", "add", "debian/install"]) | 766 | _run_command(["git", "add", "debian/install"]) |
3057 | 675 | 767 | ||
3060 | 676 | with open(file_path, 'w') as f: | 768 | with open(file_path, "w") as f: |
3061 | 677 | f.write(f"""# This file is automatically generated by {pkg_name}, and changes will be overriden | 769 | f.write( |
3062 | 770 | f"""# This file is automatically generated by {pkg_name}, and changes will be overriden | ||
3063 | 678 | GRUB_FLAVOUR_ORDER={grub_flavour} | 771 | GRUB_FLAVOUR_ORDER={grub_flavour} |
3065 | 679 | """) | 772 | """ |
3066 | 773 | ) | ||
3067 | 680 | _run_command(["git", "add", "oem-flavour.cfg"]) | 774 | _run_command(["git", "add", "oem-flavour.cfg"]) |
3068 | 681 | 775 | ||
3069 | 682 | return True | 776 | return True |
3070 | @@ -684,29 +778,31 @@ GRUB_FLAVOUR_ORDER={grub_flavour} | |||
3071 | 684 | 778 | ||
3072 | 685 | # Python 3.9 supports this. | 779 | # Python 3.9 supports this. |
3073 | 686 | def remove_prefix(s, prefix): | 780 | def remove_prefix(s, prefix): |
3075 | 687 | return s[len(prefix):] if s.startswith(prefix) else s | 781 | return s[len(prefix) :] if s.startswith(prefix) else s |
3076 | 688 | 782 | ||
3077 | 689 | 783 | ||
3078 | 690 | # Python 3.9 supports this. | 784 | # Python 3.9 supports this. |
3079 | 691 | def remove_suffix(s, suffix): | 785 | def remove_suffix(s, suffix): |
3081 | 692 | return s[:-len(suffix)] if s.endswith(suffix) else s | 786 | return s[: -len(suffix)] if s.endswith(suffix) else s |
3082 | 693 | 787 | ||
3083 | 694 | 788 | ||
3084 | 695 | def remove_prefix_suffix(s, prefix, suffix): | 789 | def remove_prefix_suffix(s, prefix, suffix): |
3085 | 696 | return remove_suffix(remove_prefix(s, prefix), suffix) | 790 | return remove_suffix(remove_prefix(s, prefix), suffix) |
3086 | 697 | 791 | ||
3087 | 698 | 792 | ||
3089 | 699 | def search_ppa_and_version(project: str, group: str, platform: str, pkg_name: str, archive_name=None): | 793 | def search_ppa_and_version( |
3090 | 794 | project: str, group: str, platform: str, pkg_name: str, archive_name=None | ||
3091 | 795 | ): | ||
3092 | 700 | if archive_name: | 796 | if archive_name: |
3093 | 701 | archive = oem_archive.getPPAByName(name=archive_name) | 797 | archive = oem_archive.getPPAByName(name=archive_name) |
3095 | 702 | elif project == 'somerville': | 798 | elif project == "somerville": |
3096 | 703 | try: | 799 | try: |
3097 | 704 | archive = oem_archive.getPPAByName(name=f"{project}-fossa-{platform}") | 800 | archive = oem_archive.getPPAByName(name=f"{project}-fossa-{platform}") |
3098 | 705 | except lazr.restfulclient.errors.NotFound: | 801 | except lazr.restfulclient.errors.NotFound: |
3099 | 706 | archive = oem_archive.getPPAByName(name=f"{project}-{platform}") | 802 | archive = oem_archive.getPPAByName(name=f"{project}-{platform}") |
3101 | 707 | elif project == 'stella': | 803 | elif project == "stella": |
3102 | 708 | archive = oem_archive.getPPAByName(name=f"{project}-{group}-ouagadougou") | 804 | archive = oem_archive.getPPAByName(name=f"{project}-{group}-ouagadougou") |
3104 | 709 | elif project == 'sutton': | 805 | elif project == "sutton": |
3105 | 710 | try: | 806 | try: |
3106 | 711 | archive = oem_archive.getPPAByName(name=f"{project}-{group}-ouagadougou") | 807 | archive = oem_archive.getPPAByName(name=f"{project}-{group}-ouagadougou") |
3107 | 712 | except lazr.restfulclient.errors.NotFound: | 808 | except lazr.restfulclient.errors.NotFound: |
3108 | @@ -718,7 +814,7 @@ def search_ppa_and_version(project: str, group: str, platform: str, pkg_name: st | |||
3109 | 718 | archive.newSubscription(subscriber=lp.me) | 814 | archive.newSubscription(subscriber=lp.me) |
3110 | 719 | archive.lp_save() | 815 | archive.lp_save() |
3111 | 720 | except lazr.restfulclient.errors.BadRequest as e: | 816 | except lazr.restfulclient.errors.BadRequest as e: |
3113 | 721 | if 'already has a current subscription for' not in str(e): | 817 | if "already has a current subscription for" not in str(e): |
3114 | 722 | raise e | 818 | raise e |
3115 | 723 | _run_command(["get-private-ppa", f"ppa:oem-archive/{archive.name}"]) | 819 | _run_command(["get-private-ppa", f"ppa:oem-archive/{archive.name}"]) |
3116 | 724 | source_lists = "\n".join(lp.me.getArchiveSubscriptionURLs()) | 820 | source_lists = "\n".join(lp.me.getArchiveSubscriptionURLs()) |
3117 | @@ -726,8 +822,10 @@ def search_ppa_and_version(project: str, group: str, platform: str, pkg_name: st | |||
3118 | 726 | fingerprint = archive.signing_key_fingerprint | 822 | fingerprint = archive.signing_key_fingerprint |
3119 | 727 | version = "" | 823 | version = "" |
3120 | 728 | for source in sources: | 824 | for source in sources: |
3123 | 729 | if source.source_package_name == pkg_name and \ | 825 | if ( |
3124 | 730 | apt_pkg.version_compare(source.source_package_version, version) > 0: | 826 | source.source_package_name == pkg_name |
3125 | 827 | and apt_pkg.version_compare(source.source_package_version, version) > 0 | ||
3126 | 828 | ): | ||
3127 | 731 | version = source.source_package_version | 829 | version = source.source_package_version |
3128 | 732 | if version: | 830 | if version: |
3129 | 733 | return archive.name, version, fingerprint | 831 | return archive.name, version, fingerprint |
3130 | @@ -741,8 +839,8 @@ def get_debian_version_from_git(pkg_name: str) -> str: | |||
3131 | 741 | if not result: | 839 | if not result: |
3132 | 742 | return None | 840 | return None |
3133 | 743 | 841 | ||
3136 | 744 | if '.' in result.group(1): | 842 | if "." in result.group(1): |
3137 | 745 | project, group = result.group(1).split('.') | 843 | project, group = result.group(1).split(".") |
3138 | 746 | else: | 844 | else: |
3139 | 747 | project = result.group(1) | 845 | project = result.group(1) |
3140 | 748 | group = None | 846 | group = None |
3141 | @@ -756,137 +854,215 @@ def get_debian_version_from_git(pkg_name: str) -> str: | |||
3142 | 756 | ubuntu_branch = f"{platform}-focal-ubuntu" | 854 | ubuntu_branch = f"{platform}-focal-ubuntu" |
3143 | 757 | oem_branch = f"{platform}-focal-oem" | 855 | oem_branch = f"{platform}-focal-oem" |
3144 | 758 | 856 | ||
3147 | 759 | wget_changelog_command = ("wget", '-q', "-O", "changelog", | 857 | wget_changelog_command = ( |
3148 | 760 | f"https://git.launchpad.net/~oem-solutions-engineers/pc-enablement/+git/oem-{project}-projects-meta/plain/debian/changelog?h={ubuntu_branch}") | 858 | "wget", |
3149 | 859 | "-q", | ||
3150 | 860 | "-O", | ||
3151 | 861 | "changelog", | ||
3152 | 862 | f"https://git.launchpad.net/~oem-solutions-engineers/pc-enablement/+git/oem-{project}-projects-meta/plain/debian/changelog?h={ubuntu_branch}", | ||
3153 | 863 | ) | ||
3154 | 761 | 864 | ||
3155 | 762 | bootstrap_version = "" | 865 | bootstrap_version = "" |
3156 | 763 | with TemporaryDirectory() as tmpdir: | 866 | with TemporaryDirectory() as tmpdir: |
3157 | 764 | os.chdir(tmpdir) | 867 | os.chdir(tmpdir) |
3158 | 765 | _run_command(wget_changelog_command) | 868 | _run_command(wget_changelog_command) |
3163 | 766 | bootstrap_version, _, _ = _run_command(["dpkg-parsechangelog", "--show-field", "Version", "-l", f"changelog"]) | 869 | bootstrap_version, _, _ = _run_command( |
3164 | 767 | 870 | ["dpkg-parsechangelog", "--show-field", "Version", "-l", f"changelog"] | |
3165 | 768 | wget_changelog_command = ("wget", '-q', "-O", "changelog", | 871 | ) |
3166 | 769 | f"https://git.launchpad.net/~oem-solutions-engineers/pc-enablement/+git/oem-{project}-projects-meta/plain/debian/changelog?h={oem_branch}") | 872 | |
3167 | 873 | wget_changelog_command = ( | ||
3168 | 874 | "wget", | ||
3169 | 875 | "-q", | ||
3170 | 876 | "-O", | ||
3171 | 877 | "changelog", | ||
3172 | 878 | f"https://git.launchpad.net/~oem-solutions-engineers/pc-enablement/+git/oem-{project}-projects-meta/plain/debian/changelog?h={oem_branch}", | ||
3173 | 879 | ) | ||
3174 | 770 | 880 | ||
3175 | 771 | oem_version = "" | 881 | oem_version = "" |
3176 | 772 | with TemporaryDirectory() as tmpdir: | 882 | with TemporaryDirectory() as tmpdir: |
3177 | 773 | os.chdir(tmpdir) | 883 | os.chdir(tmpdir) |
3178 | 774 | _run_command(wget_changelog_command) | 884 | _run_command(wget_changelog_command) |
3180 | 775 | oem_version, _, _ = _run_command(["dpkg-parsechangelog", "--show-field", "Version", "-l", f"changelog"]) | 885 | oem_version, _, _ = _run_command( |
3181 | 886 | ["dpkg-parsechangelog", "--show-field", "Version", "-l", f"changelog"] | ||
3182 | 887 | ) | ||
3183 | 776 | 888 | ||
3184 | 777 | return bootstrap_version, oem_version | 889 | return bootstrap_version, oem_version |
3185 | 778 | 890 | ||
3186 | 779 | 891 | ||
3187 | 780 | def search_public_archive(pkg_name: str, project: str, codename: str) -> tuple: | 892 | def search_public_archive(pkg_name: str, project: str, codename: str) -> tuple: |
3190 | 781 | if project == 'somerville': | 893 | if project == "somerville": |
3191 | 782 | source_line = 'http://dell.archive.canonical.com/' | 894 | source_line = "http://dell.archive.canonical.com/" |
3192 | 783 | archive = f"somerville-{codename}" | 895 | archive = f"somerville-{codename}" |
3195 | 784 | elif project == 'stella': | 896 | elif project == "stella": |
3196 | 785 | source_line = 'http://hp.archive.canonical.com/' | 897 | source_line = "http://hp.archive.canonical.com/" |
3197 | 786 | archive = f"stella.{codename}" | 898 | archive = f"stella.{codename}" |
3200 | 787 | elif project == 'sutton': | 899 | elif project == "sutton": |
3201 | 788 | source_line = 'http://lenovo.archive.canonical.com/' | 900 | source_line = "http://lenovo.archive.canonical.com/" |
3202 | 789 | archive = f"sutton.{codename}" | 901 | archive = f"sutton.{codename}" |
3203 | 790 | oem_version = "" | 902 | oem_version = "" |
3204 | 791 | with TemporaryDirectory() as tmpdir: | 903 | with TemporaryDirectory() as tmpdir: |
3205 | 792 | os.chdir(tmpdir) | 904 | os.chdir(tmpdir) |
3217 | 793 | _run_command(['setup-apt-dir.sh', | 905 | _run_command( |
3218 | 794 | '-c', 'focal', | 906 | [ |
3219 | 795 | '--disable-base', | 907 | "setup-apt-dir.sh", |
3220 | 796 | '--disable-updates', | 908 | "-c", |
3221 | 797 | '--disable-backports', | 909 | "focal", |
3222 | 798 | '--apt-dir', tmpdir, | 910 | "--disable-base", |
3223 | 799 | '--extra-key', '59AC787C2A8C78BA5ECA0B2ED4D1EAED36962F69', | 911 | "--disable-updates", |
3224 | 800 | '--extra-repo', f"deb [signed-by={tmpdir}/59AC787C2A8C78BA5ECA0B2ED4D1EAED36962F69.pub arch=amd64] {source_line} focal {archive}"], | 912 | "--disable-backports", |
3225 | 801 | silent=True) | 913 | "--apt-dir", |
3226 | 802 | output, _, _ = _run_command(['pkg-list', '--long', '--apt-dir', tmpdir, pkg_name], returncode=(0, 1), silent=True) | 914 | tmpdir, |
3227 | 803 | for line in output.split('\n'): | 915 | "--extra-key", |
3228 | 916 | "59AC787C2A8C78BA5ECA0B2ED4D1EAED36962F69", | ||
3229 | 917 | "--extra-repo", | ||
3230 | 918 | f"deb [signed-by={tmpdir}/59AC787C2A8C78BA5ECA0B2ED4D1EAED36962F69.pub arch=amd64] {source_line} focal {archive}", | ||
3231 | 919 | ], | ||
3232 | 920 | silent=True, | ||
3233 | 921 | ) | ||
3234 | 922 | output, _, _ = _run_command( | ||
3235 | 923 | ["pkg-list", "--long", "--apt-dir", tmpdir, pkg_name], | ||
3236 | 924 | returncode=(0, 1), | ||
3237 | 925 | silent=True, | ||
3238 | 926 | ) | ||
3239 | 927 | for line in output.split("\n"): | ||
3240 | 804 | if pkg_name in line and source_line in line: | 928 | if pkg_name in line and source_line in line: |
3243 | 805 | oem_version = line.split(' ')[1] | 929 | oem_version = line.split(" ")[1] |
3244 | 806 | info(f"{pkg_name} {oem_version} exists in 'deb {source_line} focal {archive}'.") | 930 | info( |
3245 | 931 | f"{pkg_name} {oem_version} exists in 'deb {source_line} focal {archive}'." | ||
3246 | 932 | ) | ||
3247 | 807 | break | 933 | break |
3248 | 808 | ubuntu_version = "" | 934 | ubuntu_version = "" |
3249 | 809 | with TemporaryDirectory() as tmpdir: | 935 | with TemporaryDirectory() as tmpdir: |
3250 | 810 | os.chdir(tmpdir) | 936 | os.chdir(tmpdir) |
3260 | 811 | _run_command(['setup-apt-dir.sh', | 937 | _run_command( |
3261 | 812 | '-c', 'focal', | 938 | [ |
3262 | 813 | '--disable-backports', | 939 | "setup-apt-dir.sh", |
3263 | 814 | '--apt-dir', tmpdir], | 940 | "-c", |
3264 | 815 | silent=True) | 941 | "focal", |
3265 | 816 | output, _, _ = _run_command(['pkg-list', '--long', '--apt-dir', tmpdir, pkg_name], returncode=(0, 1), silent=True) | 942 | "--disable-backports", |
3266 | 817 | for line in output.split('\n'): | 943 | "--apt-dir", |
3267 | 818 | if pkg_name in line and 'http://archive.ubuntu.com/ubuntu' in line: | 944 | tmpdir, |
3268 | 819 | ubuntu_version = line.split(' ')[1] | 945 | ], |
3269 | 946 | silent=True, | ||
3270 | 947 | ) | ||
3271 | 948 | output, _, _ = _run_command( | ||
3272 | 949 | ["pkg-list", "--long", "--apt-dir", tmpdir, pkg_name], | ||
3273 | 950 | returncode=(0, 1), | ||
3274 | 951 | silent=True, | ||
3275 | 952 | ) | ||
3276 | 953 | for line in output.split("\n"): | ||
3277 | 954 | if pkg_name in line and "http://archive.ubuntu.com/ubuntu" in line: | ||
3278 | 955 | ubuntu_version = line.split(" ")[1] | ||
3279 | 820 | info(f"{pkg_name} {ubuntu_version} exists in Ubuntu archive.") | 956 | info(f"{pkg_name} {ubuntu_version} exists in Ubuntu archive.") |
3280 | 821 | break | 957 | break |
3281 | 822 | proposed_version = "" | 958 | proposed_version = "" |
3282 | 823 | with TemporaryDirectory() as tmpdir: | 959 | with TemporaryDirectory() as tmpdir: |
3283 | 824 | os.chdir(tmpdir) | 960 | os.chdir(tmpdir) |
3296 | 825 | _run_command(['setup-apt-dir.sh', | 961 | _run_command( |
3297 | 826 | '-c', 'focal', | 962 | [ |
3298 | 827 | '--proposed', | 963 | "setup-apt-dir.sh", |
3299 | 828 | '--disable-base', | 964 | "-c", |
3300 | 829 | '--disable-updates', | 965 | "focal", |
3301 | 830 | '--disable-backports', | 966 | "--proposed", |
3302 | 831 | '--apt-dir', tmpdir], | 967 | "--disable-base", |
3303 | 832 | silent=True) | 968 | "--disable-updates", |
3304 | 833 | output, _, _ = _run_command(['pkg-list', '--long', '--apt-dir', tmpdir, pkg_name], returncode=(0, 1), silent=True) | 969 | "--disable-backports", |
3305 | 834 | for line in output.split('\n'): | 970 | "--apt-dir", |
3306 | 835 | if pkg_name in line and 'http://archive.ubuntu.com/ubuntu' in line: | 971 | tmpdir, |
3307 | 836 | proposed_version = line.split(' ')[1] | 972 | ], |
3308 | 973 | silent=True, | ||
3309 | 974 | ) | ||
3310 | 975 | output, _, _ = _run_command( | ||
3311 | 976 | ["pkg-list", "--long", "--apt-dir", tmpdir, pkg_name], | ||
3312 | 977 | returncode=(0, 1), | ||
3313 | 978 | silent=True, | ||
3314 | 979 | ) | ||
3315 | 980 | for line in output.split("\n"): | ||
3316 | 981 | if pkg_name in line and "http://archive.ubuntu.com/ubuntu" in line: | ||
3317 | 982 | proposed_version = line.split(" ")[1] | ||
3318 | 837 | info(f"{pkg_name} {proposed_version} exists in focal-proposed.") | 983 | info(f"{pkg_name} {proposed_version} exists in focal-proposed.") |
3319 | 838 | break | 984 | break |
3320 | 839 | return ubuntu_version, proposed_version, oem_version, archive | 985 | return ubuntu_version, proposed_version, oem_version, archive |
3321 | 840 | 986 | ||
3322 | 841 | 987 | ||
3325 | 842 | def search_private_archive(pkg_name: str, project: str, platform: str, index: str, config: str, branch: str) -> tuple: | 988 | def search_private_archive( |
3326 | 843 | domain = config['archive'].split("://")[1].split("/")[0] | 989 | pkg_name: str, project: str, platform: str, index: str, config: str, branch: str |
3327 | 990 | ) -> tuple: | ||
3328 | 991 | domain = config["archive"].split("://")[1].split("/")[0] | ||
3329 | 844 | archive = None | 992 | archive = None |
3330 | 845 | version = None | 993 | version = None |
3333 | 846 | for line in index.split('\n'): | 994 | for line in index.split("\n"): |
3334 | 847 | if project in line and platform in line and f'focal-{branch}' in line: | 995 | if project in line and platform in line and f"focal-{branch}" in line: |
3335 | 848 | result = staging_pattern.match(line) | 996 | result = staging_pattern.match(line) |
3336 | 849 | if result: | 997 | if result: |
3337 | 850 | archive = result.group(1) | 998 | archive = result.group(1) |
3338 | 851 | with TemporaryDirectory() as tmpdir: | 999 | with TemporaryDirectory() as tmpdir: |
3339 | 852 | os.chdir(tmpdir) | 1000 | os.chdir(tmpdir) |
3351 | 853 | source_line = config['archive'].replace("https://", f"https://{config['username']}:{config['password']}@") | 1001 | source_line = config["archive"].replace( |
3352 | 854 | _run_command(['setup-apt-dir.sh', | 1002 | "https://", f"https://{config['username']}:{config['password']}@" |
3353 | 855 | '-c', 'focal', | 1003 | ) |
3354 | 856 | '--disable-updates', | 1004 | _run_command( |
3355 | 857 | '--disable-backports', | 1005 | [ |
3356 | 858 | '--apt-dir', tmpdir, | 1006 | "setup-apt-dir.sh", |
3357 | 859 | '--extra-key', config['fingerprint'], | 1007 | "-c", |
3358 | 860 | '--extra-repo', f"deb [signed-by={tmpdir}/{config['fingerprint']}.pub arch=amd64] {source_line} {archive} public"], | 1008 | "focal", |
3359 | 861 | silent=True) | 1009 | "--disable-updates", |
3360 | 862 | output, _, _ = _run_command(['pkg-list', '--long', '--apt-dir', tmpdir, pkg_name], returncode=(0, 1), silent=True) | 1010 | "--disable-backports", |
3361 | 863 | for line in output.split('\n'): | 1011 | "--apt-dir", |
3362 | 1012 | tmpdir, | ||
3363 | 1013 | "--extra-key", | ||
3364 | 1014 | config["fingerprint"], | ||
3365 | 1015 | "--extra-repo", | ||
3366 | 1016 | f"deb [signed-by={tmpdir}/{config['fingerprint']}.pub arch=amd64] {source_line} {archive} public", | ||
3367 | 1017 | ], | ||
3368 | 1018 | silent=True, | ||
3369 | 1019 | ) | ||
3370 | 1020 | output, _, _ = _run_command( | ||
3371 | 1021 | ["pkg-list", "--long", "--apt-dir", tmpdir, pkg_name], | ||
3372 | 1022 | returncode=(0, 1), | ||
3373 | 1023 | silent=True, | ||
3374 | 1024 | ) | ||
3375 | 1025 | for line in output.split("\n"): | ||
3376 | 864 | if pkg_name in line and domain in line: | 1026 | if pkg_name in line and domain in line: |
3378 | 865 | version = line.split(' ')[1] | 1027 | version = line.split(" ")[1] |
3379 | 866 | break | 1028 | break |
3380 | 867 | if version is None and project == "somerville": | 1029 | if version is None and project == "somerville": |
3381 | 868 | archive = f"somerville-focal-{branch}" | 1030 | archive = f"somerville-focal-{branch}" |
3382 | 869 | with TemporaryDirectory() as tmpdir: | 1031 | with TemporaryDirectory() as tmpdir: |
3383 | 870 | os.chdir(tmpdir) | 1032 | os.chdir(tmpdir) |
3395 | 871 | source_line = config['archive'].replace("https://", f"https://{config['username']}:{config['password']}@") | 1033 | source_line = config["archive"].replace( |
3396 | 872 | _run_command(['setup-apt-dir.sh', | 1034 | "https://", f"https://{config['username']}:{config['password']}@" |
3397 | 873 | '-c', 'focal', | 1035 | ) |
3398 | 874 | '--disable-updates', | 1036 | _run_command( |
3399 | 875 | '--disable-backports', | 1037 | [ |
3400 | 876 | '--apt-dir', tmpdir, | 1038 | "setup-apt-dir.sh", |
3401 | 877 | '--extra-key', config['fingerprint'], | 1039 | "-c", |
3402 | 878 | '--extra-repo', f"deb [signed-by={tmpdir}/{config['fingerprint']}.pub arch=amd64] {source_line} {archive} public"], | 1040 | "focal", |
3403 | 879 | silent=True) | 1041 | "--disable-updates", |
3404 | 880 | output, _, _ = _run_command(['pkg-list', '--long', '--apt-dir', tmpdir, pkg_name], silent=True) | 1042 | "--disable-backports", |
3405 | 881 | for line in output.split('\n'): | 1043 | "--apt-dir", |
3406 | 1044 | tmpdir, | ||
3407 | 1045 | "--extra-key", | ||
3408 | 1046 | config["fingerprint"], | ||
3409 | 1047 | "--extra-repo", | ||
3410 | 1048 | f"deb [signed-by={tmpdir}/{config['fingerprint']}.pub arch=amd64] {source_line} {archive} public", | ||
3411 | 1049 | ], | ||
3412 | 1050 | silent=True, | ||
3413 | 1051 | ) | ||
3414 | 1052 | output, _, _ = _run_command( | ||
3415 | 1053 | ["pkg-list", "--long", "--apt-dir", tmpdir, pkg_name], silent=True | ||
3416 | 1054 | ) | ||
3417 | 1055 | for line in output.split("\n"): | ||
3418 | 882 | if pkg_name in line and domain in line: | 1056 | if pkg_name in line and domain in line: |
3420 | 883 | version = line.split(' ')[1] | 1057 | version = line.split(" ")[1] |
3421 | 884 | break | 1058 | break |
3422 | 885 | 1059 | ||
3423 | 886 | return (archive, version) | 1060 | return (archive, version) |
3424 | 887 | 1061 | ||
3425 | 888 | 1062 | ||
3427 | 889 | def collect_pkg_info(data, check_private: bool = False, index=None, config=None) -> dict: | 1063 | def collect_pkg_info( |
3428 | 1064 | data, check_private: bool = False, index=None, config=None | ||
3429 | 1065 | ) -> dict: | ||
3430 | 890 | if type(data) is str: | 1066 | if type(data) is str: |
3431 | 891 | result = pattern.match(data) | 1067 | result = pattern.match(data) |
3432 | 892 | 1068 | ||
3433 | @@ -894,8 +1070,8 @@ def collect_pkg_info(data, check_private: bool = False, index=None, config=None) | |||
3434 | 894 | print(f"{data} is not supported.") | 1070 | print(f"{data} is not supported.") |
3435 | 895 | exit(1) | 1071 | exit(1) |
3436 | 896 | 1072 | ||
3439 | 897 | if '.' in result.group(1): | 1073 | if "." in result.group(1): |
3440 | 898 | project, group = result.group(1).split('.') | 1074 | project, group = result.group(1).split(".") |
3441 | 899 | else: | 1075 | else: |
3442 | 900 | project = result.group(1) | 1076 | project = result.group(1) |
3443 | 901 | group = "N/A" | 1077 | group = "N/A" |
3444 | @@ -917,14 +1093,16 @@ def collect_pkg_info(data, check_private: bool = False, index=None, config=None) | |||
3445 | 917 | print(f"{data} is not supported.") | 1093 | print(f"{data} is not supported.") |
3446 | 918 | exit(1) | 1094 | exit(1) |
3447 | 919 | 1095 | ||
3449 | 920 | json_data = json.loads(f"""[{{ | 1096 | json_data = json.loads( |
3450 | 1097 | f"""[{{ | ||
3451 | 921 | "Customer": "{customer}", | 1098 | "Customer": "{customer}", |
3452 | 922 | "Group": "{group}", | 1099 | "Group": "{group}", |
3453 | 923 | "Codename": "{codename}", | 1100 | "Codename": "{codename}", |
3454 | 924 | "Platform": "", | 1101 | "Platform": "", |
3455 | 925 | "MarketName": "", | 1102 | "MarketName": "", |
3456 | 926 | "PlatformLPTag": "{tag}" | 1103 | "PlatformLPTag": "{tag}" |
3458 | 927 | }}]""") | 1104 | }}]""" |
3459 | 1105 | ) | ||
3460 | 928 | else: | 1106 | else: |
3461 | 929 | json_data = json.load(data) | 1107 | json_data = json.load(data) |
3462 | 930 | 1108 | ||
3463 | @@ -935,27 +1113,27 @@ def collect_pkg_info(data, check_private: bool = False, index=None, config=None) | |||
3464 | 935 | sutton = dict() | 1113 | sutton = dict() |
3465 | 936 | 1114 | ||
3466 | 937 | for item in json_data: | 1115 | for item in json_data: |
3475 | 938 | customer = item['Customer'].lower() | 1116 | customer = item["Customer"].lower() |
3476 | 939 | platform = item['Platform'].lower() | 1117 | platform = item["Platform"].lower() |
3477 | 940 | codename = item['Codename'].lower() | 1118 | codename = item["Codename"].lower() |
3478 | 941 | group = item['Group'].lower() | 1119 | group = item["Group"].lower() |
3479 | 942 | market_name = item['MarketName'] | 1120 | market_name = item["MarketName"] |
3480 | 943 | lp_tag = item['PlatformLPTag'].lower() | 1121 | lp_tag = item["PlatformLPTag"].lower() |
3481 | 944 | if 'dell' in customer: | 1122 | if "dell" in customer: |
3482 | 945 | if 'somerville' in args.skip: | 1123 | if "somerville" in args.skip: |
3483 | 946 | continue | 1124 | continue |
3485 | 947 | platform = remove_prefix(lp_tag, 'fossa-') | 1125 | platform = remove_prefix(lp_tag, "fossa-") |
3486 | 948 | lst = somerville.get(platform, []) | 1126 | lst = somerville.get(platform, []) |
3487 | 949 | lst.append(market_name) | 1127 | lst.append(market_name) |
3488 | 950 | somerville[platform] = lst | 1128 | somerville[platform] = lst |
3491 | 951 | elif 'hp' in customer: | 1129 | elif "hp" in customer: |
3492 | 952 | if 'stella' in args.skip: | 1130 | if "stella" in args.skip: |
3493 | 953 | continue | 1131 | continue |
3494 | 954 | lst = stella.get(f"{group}-{codename}", []) | 1132 | lst = stella.get(f"{group}-{codename}", []) |
3495 | 955 | lst.append(market_name) | 1133 | lst.append(market_name) |
3496 | 956 | stella[f"{group}-{codename}"] = lst | 1134 | stella[f"{group}-{codename}"] = lst |
3499 | 957 | elif 'lenovo' in customer: | 1135 | elif "lenovo" in customer: |
3500 | 958 | if 'sutton' in args.skip: | 1136 | if "sutton" in args.skip: |
3501 | 959 | continue | 1137 | continue |
3502 | 960 | lst = sutton.get(f"{group}-{codename}", []) | 1138 | lst = sutton.get(f"{group}-{codename}", []) |
3503 | 961 | lst.append(market_name) | 1139 | lst.append(market_name) |
3504 | @@ -975,37 +1153,58 @@ def collect_pkg_info(data, check_private: bool = False, index=None, config=None) | |||
3505 | 975 | info("Finding the corresponding PPAs...") | 1153 | info("Finding the corresponding PPAs...") |
3506 | 976 | 1154 | ||
3507 | 977 | for codename, v in somerville.items(): | 1155 | for codename, v in somerville.items(): |
3509 | 978 | pkg_name = 'oem-somerville-' + codename + '-meta' | 1156 | pkg_name = "oem-somerville-" + codename + "-meta" |
3510 | 979 | if args.only and pkg_name != args.only: | 1157 | if args.only and pkg_name != args.only: |
3511 | 980 | warning(f"Skip {pkg_name}") | 1158 | warning(f"Skip {pkg_name}") |
3512 | 981 | continue | 1159 | continue |
3513 | 982 | if pkg_name in args.skip: | 1160 | if pkg_name in args.skip: |
3514 | 983 | warning(f"Skip {pkg_name}") | 1161 | warning(f"Skip {pkg_name}") |
3515 | 984 | continue | 1162 | continue |
3517 | 985 | ppa_archive, ppa_version, fingerprint = search_ppa_and_version("somerville", None, codename, pkg_name) | 1163 | ppa_archive, ppa_version, fingerprint = search_ppa_and_version( |
3518 | 1164 | "somerville", None, codename, pkg_name | ||
3519 | 1165 | ) | ||
3520 | 986 | if ppa_archive is None: | 1166 | if ppa_archive is None: |
3522 | 987 | ppa_archive, ppa_version, fingerprint = search_ppa_and_version("somerville", None, codename, pkg_name, "somerville") | 1167 | ppa_archive, ppa_version, fingerprint = search_ppa_and_version( |
3523 | 1168 | "somerville", None, codename, pkg_name, "somerville" | ||
3524 | 1169 | ) | ||
3525 | 988 | if ppa_archive is None: | 1170 | if ppa_archive is None: |
3526 | 989 | critical(f"It can not find any private PPA that contains {pkg_name}.") | 1171 | critical(f"It can not find any private PPA that contains {pkg_name}.") |
3527 | 990 | exit(1) | 1172 | exit(1) |
3528 | 991 | 1173 | ||
3529 | 992 | bootstrap_version, real_version = get_debian_version_from_git(pkg_name) | 1174 | bootstrap_version, real_version = get_debian_version_from_git(pkg_name) |
3530 | 993 | if ppa_version != real_version: | 1175 | if ppa_version != real_version: |
3532 | 994 | warning(f"{pkg_name}'s version in Git is {real_version}, but the version in ppa:oem-archive/{ppa_archive} is {ppa_version}.") | 1176 | warning( |
3533 | 1177 | f"{pkg_name}'s version in Git is {real_version}, but the version in ppa:oem-archive/{ppa_archive} is {ppa_version}." | ||
3534 | 1178 | ) | ||
3535 | 995 | info(f"{pkg_name} {ppa_version} exists in ppa:oem-archive/{ppa_archive}.") | 1179 | info(f"{pkg_name} {ppa_version} exists in ppa:oem-archive/{ppa_archive}.") |
3536 | 996 | 1180 | ||
3546 | 997 | ubuntu_version, proposed_version, oem_version, oem_archive = search_public_archive(pkg_name, "somerville", codename) | 1181 | ubuntu_version, proposed_version, oem_version, oem_archive = search_public_archive( |
3547 | 998 | 1182 | pkg_name, "somerville", codename | |
3548 | 999 | pkgInfo[pkg_name] = PkgInfo(ppa_archive=ppa_archive, ppa_version=ppa_version, | 1183 | ) |
3549 | 1000 | bootstrap_version=bootstrap_version, real_version=real_version, git_version=real_version, | 1184 | |
3550 | 1001 | old_desc="", new_desc="", fingerprint=fingerprint, | 1185 | pkgInfo[pkg_name] = PkgInfo( |
3551 | 1002 | staging_archive="", staging_version="", | 1186 | ppa_archive=ppa_archive, |
3552 | 1003 | devel_archive="", devel_version="", | 1187 | ppa_version=ppa_version, |
3553 | 1004 | oem_archive=oem_archive, oem_version=oem_version, | 1188 | bootstrap_version=bootstrap_version, |
3554 | 1005 | ubuntu_version=ubuntu_version, proposed_version=proposed_version) | 1189 | real_version=real_version, |
3555 | 1190 | git_version=real_version, | ||
3556 | 1191 | old_desc="", | ||
3557 | 1192 | new_desc="", | ||
3558 | 1193 | fingerprint=fingerprint, | ||
3559 | 1194 | staging_archive="", | ||
3560 | 1195 | staging_version="", | ||
3561 | 1196 | devel_archive="", | ||
3562 | 1197 | devel_version="", | ||
3563 | 1198 | oem_archive=oem_archive, | ||
3564 | 1199 | oem_version=oem_version, | ||
3565 | 1200 | ubuntu_version=ubuntu_version, | ||
3566 | 1201 | proposed_version=proposed_version, | ||
3567 | 1202 | ) | ||
3568 | 1006 | 1203 | ||
3569 | 1007 | if check_private: | 1204 | if check_private: |
3571 | 1008 | staging_archive, staging_version = search_private_archive(pkg_name, "somerville", codename, index, config, branch="staging") | 1205 | staging_archive, staging_version = search_private_archive( |
3572 | 1206 | pkg_name, "somerville", codename, index, config, branch="staging" | ||
3573 | 1207 | ) | ||
3574 | 1009 | pkgInfo[pkg_name].staging_archive = staging_archive | 1208 | pkgInfo[pkg_name].staging_archive = staging_archive |
3575 | 1010 | if staging_version: | 1209 | if staging_version: |
3576 | 1011 | info(f"{pkg_name} {staging_version} exists in {staging_archive}.") | 1210 | info(f"{pkg_name} {staging_version} exists in {staging_archive}.") |
3577 | @@ -1013,7 +1212,9 @@ def collect_pkg_info(data, check_private: bool = False, index=None, config=None) | |||
3578 | 1013 | else: | 1212 | else: |
3579 | 1014 | debug(f"{pkg_name} doesn't exist in {staging_archive} yet.") | 1213 | debug(f"{pkg_name} doesn't exist in {staging_archive} yet.") |
3580 | 1015 | 1214 | ||
3582 | 1016 | devel_archive, devel_version = search_private_archive(pkg_name, "somerville", codename, index, config, branch="devel") | 1215 | devel_archive, devel_version = search_private_archive( |
3583 | 1216 | pkg_name, "somerville", codename, index, config, branch="devel" | ||
3584 | 1217 | ) | ||
3585 | 1017 | pkgInfo[pkg_name].devel_archive = devel_archive | 1218 | pkgInfo[pkg_name].devel_archive = devel_archive |
3586 | 1018 | if devel_version: | 1219 | if devel_version: |
3587 | 1019 | info(f"{pkg_name} {devel_version} exists in {devel_archive}.") | 1220 | info(f"{pkg_name} {devel_version} exists in {devel_archive}.") |
3588 | @@ -1021,7 +1222,7 @@ def collect_pkg_info(data, check_private: bool = False, index=None, config=None) | |||
3589 | 1021 | else: | 1222 | else: |
3590 | 1022 | debug(f"{pkg_name} doesn't exist in {devel_archive} yet.") | 1223 | debug(f"{pkg_name} doesn't exist in {devel_archive} yet.") |
3591 | 1023 | 1224 | ||
3593 | 1024 | if pkg_name in pkgNamesInArchive and ''.join(v): | 1225 | if pkg_name in pkgNamesInArchive and "".join(v): |
3594 | 1025 | new_desc = _grouping_market_names(v) | 1226 | new_desc = _grouping_market_names(v) |
3595 | 1026 | if "Dell" not in new_desc: | 1227 | if "Dell" not in new_desc: |
3596 | 1027 | new_desc = "Dell " + new_desc | 1228 | new_desc = "Dell " + new_desc |
3597 | @@ -1029,37 +1230,56 @@ def collect_pkg_info(data, check_private: bool = False, index=None, config=None) | |||
3598 | 1029 | pkgInfo[pkg_name].new_desc = new_desc | 1230 | pkgInfo[pkg_name].new_desc = new_desc |
3599 | 1030 | 1231 | ||
3600 | 1031 | for k, v in stella.items(): | 1232 | for k, v in stella.items(): |
3602 | 1032 | pkg_name = 'oem-stella.' + k + '-meta' | 1233 | pkg_name = "oem-stella." + k + "-meta" |
3603 | 1033 | if args.only and pkg_name != args.only: | 1234 | if args.only and pkg_name != args.only: |
3604 | 1034 | warning(f"Skip {pkg_name}") | 1235 | warning(f"Skip {pkg_name}") |
3605 | 1035 | continue | 1236 | continue |
3606 | 1036 | if pkg_name in args.skip: | 1237 | if pkg_name in args.skip: |
3607 | 1037 | warning(f"Skip {pkg_name}") | 1238 | warning(f"Skip {pkg_name}") |
3608 | 1038 | continue | 1239 | continue |
3610 | 1039 | group, codename = k.split('-', 1) | 1240 | group, codename = k.split("-", 1) |
3611 | 1040 | 1241 | ||
3613 | 1041 | ppa_archive, ppa_version, fingerprint = search_ppa_and_version("stella", group, codename, pkg_name) | 1242 | ppa_archive, ppa_version, fingerprint = search_ppa_and_version( |
3614 | 1243 | "stella", group, codename, pkg_name | ||
3615 | 1244 | ) | ||
3616 | 1042 | if ppa_archive is None: | 1245 | if ppa_archive is None: |
3617 | 1043 | critical(f"It can not find any private PPA that contains {pkg_name}.") | 1246 | critical(f"It can not find any private PPA that contains {pkg_name}.") |
3618 | 1044 | exit(1) | 1247 | exit(1) |
3619 | 1045 | 1248 | ||
3620 | 1046 | bootstrap_version, real_version = get_debian_version_from_git(pkg_name) | 1249 | bootstrap_version, real_version = get_debian_version_from_git(pkg_name) |
3621 | 1047 | if ppa_version != real_version: | 1250 | if ppa_version != real_version: |
3623 | 1048 | warning(f"{pkg_name}'s version in Git is {real_version}, but the version in ppa:oem-archive/{ppa_archive} is {ppa_version}.") | 1251 | warning( |
3624 | 1252 | f"{pkg_name}'s version in Git is {real_version}, but the version in ppa:oem-archive/{ppa_archive} is {ppa_version}." | ||
3625 | 1253 | ) | ||
3626 | 1049 | info(f"{pkg_name} {ppa_version} exists in ppa:oem-archive/{ppa_archive}") | 1254 | info(f"{pkg_name} {ppa_version} exists in ppa:oem-archive/{ppa_archive}") |
3627 | 1050 | 1255 | ||
3637 | 1051 | ubuntu_version, proposed_version, oem_version, oem_archive = search_public_archive(pkg_name, "stella", group) | 1256 | ubuntu_version, proposed_version, oem_version, oem_archive = search_public_archive( |
3638 | 1052 | 1257 | pkg_name, "stella", group | |
3639 | 1053 | pkgInfo[pkg_name] = PkgInfo(ppa_archive=ppa_archive, ppa_version=ppa_version, | 1258 | ) |
3640 | 1054 | bootstrap_version=bootstrap_version, real_version=real_version, git_version=real_version, | 1259 | |
3641 | 1055 | old_desc="", new_desc="", fingerprint=fingerprint, | 1260 | pkgInfo[pkg_name] = PkgInfo( |
3642 | 1056 | staging_archive="", staging_version="", | 1261 | ppa_archive=ppa_archive, |
3643 | 1057 | devel_archive="", devel_version="", | 1262 | ppa_version=ppa_version, |
3644 | 1058 | oem_archive=oem_archive, oem_version=oem_version, | 1263 | bootstrap_version=bootstrap_version, |
3645 | 1059 | ubuntu_version=ubuntu_version, proposed_version=proposed_version) | 1264 | real_version=real_version, |
3646 | 1265 | git_version=real_version, | ||
3647 | 1266 | old_desc="", | ||
3648 | 1267 | new_desc="", | ||
3649 | 1268 | fingerprint=fingerprint, | ||
3650 | 1269 | staging_archive="", | ||
3651 | 1270 | staging_version="", | ||
3652 | 1271 | devel_archive="", | ||
3653 | 1272 | devel_version="", | ||
3654 | 1273 | oem_archive=oem_archive, | ||
3655 | 1274 | oem_version=oem_version, | ||
3656 | 1275 | ubuntu_version=ubuntu_version, | ||
3657 | 1276 | proposed_version=proposed_version, | ||
3658 | 1277 | ) | ||
3659 | 1060 | 1278 | ||
3660 | 1061 | if check_private: | 1279 | if check_private: |
3662 | 1062 | staging_archive, staging_version = search_private_archive(pkg_name, "stella", group, index, config, branch="staging") | 1280 | staging_archive, staging_version = search_private_archive( |
3663 | 1281 | pkg_name, "stella", group, index, config, branch="staging" | ||
3664 | 1282 | ) | ||
3665 | 1063 | pkgInfo[pkg_name].staging_archive = staging_archive | 1283 | pkgInfo[pkg_name].staging_archive = staging_archive |
3666 | 1064 | if staging_version: | 1284 | if staging_version: |
3667 | 1065 | info(f"{pkg_name} {staging_version} exists in {staging_archive}.") | 1285 | info(f"{pkg_name} {staging_version} exists in {staging_archive}.") |
3668 | @@ -1067,7 +1287,9 @@ def collect_pkg_info(data, check_private: bool = False, index=None, config=None) | |||
3669 | 1067 | else: | 1287 | else: |
3670 | 1068 | debug(f"{pkg_name} doesn't exist in {staging_archive} yet.") | 1288 | debug(f"{pkg_name} doesn't exist in {staging_archive} yet.") |
3671 | 1069 | 1289 | ||
3673 | 1070 | devel_archive, devel_version = search_private_archive(pkg_name, "stella", group, index, config, branch="devel") | 1290 | devel_archive, devel_version = search_private_archive( |
3674 | 1291 | pkg_name, "stella", group, index, config, branch="devel" | ||
3675 | 1292 | ) | ||
3676 | 1071 | pkgInfo[pkg_name].devel_archive = devel_archive | 1293 | pkgInfo[pkg_name].devel_archive = devel_archive |
3677 | 1072 | if devel_version: | 1294 | if devel_version: |
3678 | 1073 | info(f"{pkg_name} {devel_version} exists in {devel_archive}.") | 1295 | info(f"{pkg_name} {devel_version} exists in {devel_archive}.") |
3679 | @@ -1075,45 +1297,66 @@ def collect_pkg_info(data, check_private: bool = False, index=None, config=None) | |||
3680 | 1075 | else: | 1297 | else: |
3681 | 1076 | debug(f"{pkg_name} doesn't exist in {devel_archive} yet.") | 1298 | debug(f"{pkg_name} doesn't exist in {devel_archive} yet.") |
3682 | 1077 | 1299 | ||
3684 | 1078 | if pkg_name in pkgNamesInArchive and ''.join(v): | 1300 | if pkg_name in pkgNamesInArchive and "".join(v): |
3685 | 1079 | new_desc = _grouping_market_names(v, maxsplit=2) | 1301 | new_desc = _grouping_market_names(v, maxsplit=2) |
3686 | 1080 | if "HP" not in new_desc: | 1302 | if "HP" not in new_desc: |
3687 | 1081 | new_desc = "HP " + new_desc | 1303 | new_desc = "HP " + new_desc |
3689 | 1082 | pkgInfo[pkg_name].old_desc = f"Stella {group.title()} {codename.title()} platform" | 1304 | pkgInfo[ |
3690 | 1305 | pkg_name | ||
3691 | 1306 | ].old_desc = f"Stella {group.title()} {codename.title()} platform" | ||
3692 | 1083 | pkgInfo[pkg_name].new_desc = new_desc | 1307 | pkgInfo[pkg_name].new_desc = new_desc |
3693 | 1084 | 1308 | ||
3694 | 1085 | for k, v in sutton.items(): | 1309 | for k, v in sutton.items(): |
3696 | 1086 | pkg_name = 'oem-sutton.' + k + '-meta' | 1310 | pkg_name = "oem-sutton." + k + "-meta" |
3697 | 1087 | if args.only and pkg_name != args.only: | 1311 | if args.only and pkg_name != args.only: |
3698 | 1088 | warning(f"Skip {pkg_name}") | 1312 | warning(f"Skip {pkg_name}") |
3699 | 1089 | continue | 1313 | continue |
3700 | 1090 | if pkg_name in args.skip: | 1314 | if pkg_name in args.skip: |
3701 | 1091 | warning(f"Skip {pkg_name}") | 1315 | warning(f"Skip {pkg_name}") |
3702 | 1092 | continue | 1316 | continue |
3704 | 1093 | group, codename = k.split('-', 1) | 1317 | group, codename = k.split("-", 1) |
3705 | 1094 | 1318 | ||
3707 | 1095 | ppa_archive, ppa_version, fingerprint = search_ppa_and_version("sutton", group, codename, pkg_name) | 1319 | ppa_archive, ppa_version, fingerprint = search_ppa_and_version( |
3708 | 1320 | "sutton", group, codename, pkg_name | ||
3709 | 1321 | ) | ||
3710 | 1096 | if ppa_archive is None: | 1322 | if ppa_archive is None: |
3711 | 1097 | critical(f"It can not find any private PPA that contains {pkg_name}.") | 1323 | critical(f"It can not find any private PPA that contains {pkg_name}.") |
3712 | 1098 | exit(1) | 1324 | exit(1) |
3713 | 1099 | 1325 | ||
3714 | 1100 | bootstrap_version, real_version = get_debian_version_from_git(pkg_name) | 1326 | bootstrap_version, real_version = get_debian_version_from_git(pkg_name) |
3715 | 1101 | if ppa_version != real_version: | 1327 | if ppa_version != real_version: |
3717 | 1102 | warning(f"{pkg_name}'s version in Git is {real_version}, but the version in ppa:oem-archive/{ppa_archive} is {ppa_version}.") | 1328 | warning( |
3718 | 1329 | f"{pkg_name}'s version in Git is {real_version}, but the version in ppa:oem-archive/{ppa_archive} is {ppa_version}." | ||
3719 | 1330 | ) | ||
3720 | 1103 | info(f"{pkg_name} {ppa_version} exists in ppa:oem-archive/{ppa_archive}") | 1331 | info(f"{pkg_name} {ppa_version} exists in ppa:oem-archive/{ppa_archive}") |
3721 | 1104 | 1332 | ||
3731 | 1105 | ubuntu_version, proposed_version, oem_version, oem_archive = search_public_archive(pkg_name, "sutton", group) | 1333 | ubuntu_version, proposed_version, oem_version, oem_archive = search_public_archive( |
3732 | 1106 | 1334 | pkg_name, "sutton", group | |
3733 | 1107 | pkgInfo[pkg_name] = PkgInfo(ppa_archive=ppa_archive, ppa_version=ppa_version, | 1335 | ) |
3734 | 1108 | bootstrap_version=bootstrap_version, real_version=real_version, git_version=real_version, | 1336 | |
3735 | 1109 | old_desc="", new_desc="", fingerprint=fingerprint, | 1337 | pkgInfo[pkg_name] = PkgInfo( |
3736 | 1110 | staging_archive="", staging_version="", | 1338 | ppa_archive=ppa_archive, |
3737 | 1111 | devel_archive="", devel_version="", | 1339 | ppa_version=ppa_version, |
3738 | 1112 | oem_archive=oem_archive, oem_version=oem_version, | 1340 | bootstrap_version=bootstrap_version, |
3739 | 1113 | ubuntu_version=ubuntu_version, proposed_version=proposed_version) | 1341 | real_version=real_version, |
3740 | 1342 | git_version=real_version, | ||
3741 | 1343 | old_desc="", | ||
3742 | 1344 | new_desc="", | ||
3743 | 1345 | fingerprint=fingerprint, | ||
3744 | 1346 | staging_archive="", | ||
3745 | 1347 | staging_version="", | ||
3746 | 1348 | devel_archive="", | ||
3747 | 1349 | devel_version="", | ||
3748 | 1350 | oem_archive=oem_archive, | ||
3749 | 1351 | oem_version=oem_version, | ||
3750 | 1352 | ubuntu_version=ubuntu_version, | ||
3751 | 1353 | proposed_version=proposed_version, | ||
3752 | 1354 | ) | ||
3753 | 1114 | 1355 | ||
3754 | 1115 | if check_private: | 1356 | if check_private: |
3756 | 1116 | staging_archive, staging_version = search_private_archive(pkg_name, "sutton", group, index, config, branch="staging") | 1357 | staging_archive, staging_version = search_private_archive( |
3757 | 1358 | pkg_name, "sutton", group, index, config, branch="staging" | ||
3758 | 1359 | ) | ||
3759 | 1117 | pkgInfo[pkg_name].staging_archive = staging_archive | 1360 | pkgInfo[pkg_name].staging_archive = staging_archive |
3760 | 1118 | if staging_version: | 1361 | if staging_version: |
3761 | 1119 | info(f"{pkg_name} {staging_version} exists in {staging_archive}.") | 1362 | info(f"{pkg_name} {staging_version} exists in {staging_archive}.") |
3762 | @@ -1121,7 +1364,9 @@ def collect_pkg_info(data, check_private: bool = False, index=None, config=None) | |||
3763 | 1121 | else: | 1364 | else: |
3764 | 1122 | debug(f"{pkg_name} doesn't exist in {staging_archive} yet.") | 1365 | debug(f"{pkg_name} doesn't exist in {staging_archive} yet.") |
3765 | 1123 | 1366 | ||
3767 | 1124 | devel_archive, devel_version = search_private_archive(pkg_name, "sutton", group, index, config, branch="devel") | 1367 | devel_archive, devel_version = search_private_archive( |
3768 | 1368 | pkg_name, "sutton", group, index, config, branch="devel" | ||
3769 | 1369 | ) | ||
3770 | 1125 | pkgInfo[pkg_name].devel_archive = devel_archive | 1370 | pkgInfo[pkg_name].devel_archive = devel_archive |
3771 | 1126 | if devel_version: | 1371 | if devel_version: |
3772 | 1127 | info(f"{pkg_name} {devel_version} exists in {devel_archive}.") | 1372 | info(f"{pkg_name} {devel_version} exists in {devel_archive}.") |
3773 | @@ -1129,11 +1374,13 @@ def collect_pkg_info(data, check_private: bool = False, index=None, config=None) | |||
3774 | 1129 | else: | 1374 | else: |
3775 | 1130 | debug(f"{pkg_name} doesn't exist in {devel_archive} yet.") | 1375 | debug(f"{pkg_name} doesn't exist in {devel_archive} yet.") |
3776 | 1131 | 1376 | ||
3778 | 1132 | if pkg_name in pkgNamesInArchive and ''.join(v): | 1377 | if pkg_name in pkgNamesInArchive and "".join(v): |
3779 | 1133 | new_desc = _grouping_market_names(v) | 1378 | new_desc = _grouping_market_names(v) |
3780 | 1134 | if "Lenovo" not in new_desc: | 1379 | if "Lenovo" not in new_desc: |
3781 | 1135 | new_desc = "Lenovo " + new_desc | 1380 | new_desc = "Lenovo " + new_desc |
3783 | 1136 | pkgInfo[pkg_name].old_desc = f"Sutton {group.title()} {codename.title()} platform" | 1381 | pkgInfo[ |
3784 | 1382 | pkg_name | ||
3785 | 1383 | ].old_desc = f"Sutton {group.title()} {codename.title()} platform" | ||
3786 | 1137 | pkgInfo[pkg_name].new_desc = new_desc | 1384 | pkgInfo[pkg_name].new_desc = new_desc |
3787 | 1138 | 1385 | ||
3788 | 1139 | debug(json.dumps(pkgInfo, indent=4, sort_keys=True, cls=DataJSONEncoder)) | 1386 | debug(json.dumps(pkgInfo, indent=4, sort_keys=True, cls=DataJSONEncoder)) |
3789 | @@ -1145,22 +1392,24 @@ def load_pkg_info(data) -> dict: | |||
3790 | 1145 | pkgInfo = dict() | 1392 | pkgInfo = dict() |
3791 | 1146 | data = json.load(data) | 1393 | data = json.load(data) |
3792 | 1147 | for meta in data.keys(): | 1394 | for meta in data.keys(): |
3809 | 1148 | pkgInfo[meta] = PkgInfo(ppa_archive=data[meta]['ppa_archive'], | 1395 | pkgInfo[meta] = PkgInfo( |
3810 | 1149 | ppa_version=data[meta]['ppa_version'], | 1396 | ppa_archive=data[meta]["ppa_archive"], |
3811 | 1150 | git_version=data[meta]['git_version'], | 1397 | ppa_version=data[meta]["ppa_version"], |
3812 | 1151 | bootstrap_version=data[meta]['bootstrap_version'], | 1398 | git_version=data[meta]["git_version"], |
3813 | 1152 | real_version=data[meta]['real_version'], | 1399 | bootstrap_version=data[meta]["bootstrap_version"], |
3814 | 1153 | old_desc=data[meta]['old_desc'], | 1400 | real_version=data[meta]["real_version"], |
3815 | 1154 | new_desc=data[meta]['new_desc'], | 1401 | old_desc=data[meta]["old_desc"], |
3816 | 1155 | fingerprint=data[meta]['fingerprint'], | 1402 | new_desc=data[meta]["new_desc"], |
3817 | 1156 | staging_archive=data[meta]['staging_archive'], | 1403 | fingerprint=data[meta]["fingerprint"], |
3818 | 1157 | staging_version=data[meta]['staging_version'], | 1404 | staging_archive=data[meta]["staging_archive"], |
3819 | 1158 | devel_archive=data[meta]['devel_archive'], | 1405 | staging_version=data[meta]["staging_version"], |
3820 | 1159 | devel_version=data[meta]['devel_version'], | 1406 | devel_archive=data[meta]["devel_archive"], |
3821 | 1160 | oem_archive=data[meta]['oem_archive'], | 1407 | devel_version=data[meta]["devel_version"], |
3822 | 1161 | oem_version=data[meta]['oem_version'], | 1408 | oem_archive=data[meta]["oem_archive"], |
3823 | 1162 | ubuntu_version=data[meta]['ubuntu_version'], | 1409 | oem_version=data[meta]["oem_version"], |
3824 | 1163 | proposed_version=data[meta]['proposed_version']) | 1410 | ubuntu_version=data[meta]["ubuntu_version"], |
3825 | 1411 | proposed_version=data[meta]["proposed_version"], | ||
3826 | 1412 | ) | ||
3827 | 1164 | 1413 | ||
3828 | 1165 | debug(json.dumps(pkgInfo, indent=4, sort_keys=True, cls=DataJSONEncoder)) | 1414 | debug(json.dumps(pkgInfo, indent=4, sort_keys=True, cls=DataJSONEncoder)) |
3829 | 1166 | return pkgInfo | 1415 | return pkgInfo |
3830 | @@ -1184,8 +1433,8 @@ def deal_with_meta_git(pkg_name: str, pkg_info: PkgInfo, bootstrap: bool) -> boo | |||
3831 | 1184 | if not result: | 1433 | if not result: |
3832 | 1185 | return | 1434 | return |
3833 | 1186 | 1435 | ||
3836 | 1187 | if '.' in result.group(1): | 1436 | if "." in result.group(1): |
3837 | 1188 | project, group = result.group(1).split('.') | 1437 | project, group = result.group(1).split(".") |
3838 | 1189 | else: | 1438 | else: |
3839 | 1190 | project = result.group(1) | 1439 | project = result.group(1) |
3840 | 1191 | group = None | 1440 | group = None |
3841 | @@ -1203,15 +1452,34 @@ def deal_with_meta_git(pkg_name: str, pkg_info: PkgInfo, bootstrap: bool) -> boo | |||
3842 | 1203 | else: | 1452 | else: |
3843 | 1204 | branch = f"{platform}-focal-oem" | 1453 | branch = f"{platform}-focal-oem" |
3844 | 1205 | 1454 | ||
3846 | 1206 | git_command = ("git", "clone", "--depth", "1", "-b", branch, f"https://git.launchpad.net/~oem-solutions-engineers/pc-enablement/+git/oem-{project}-projects-meta", pkg_name) | 1455 | git_command = ( |
3847 | 1456 | "git", | ||
3848 | 1457 | "clone", | ||
3849 | 1458 | "--depth", | ||
3850 | 1459 | "1", | ||
3851 | 1460 | "-b", | ||
3852 | 1461 | branch, | ||
3853 | 1462 | f"https://git.launchpad.net/~oem-solutions-engineers/pc-enablement/+git/oem-{project}-projects-meta", | ||
3854 | 1463 | pkg_name, | ||
3855 | 1464 | ) | ||
3856 | 1207 | 1465 | ||
3857 | 1208 | with TemporaryDirectory() as tmpdir: | 1466 | with TemporaryDirectory() as tmpdir: |
3858 | 1209 | messages = list() | 1467 | messages = list() |
3859 | 1210 | os.chdir(tmpdir) | 1468 | os.chdir(tmpdir) |
3860 | 1211 | _run_command(git_command) | 1469 | _run_command(git_command) |
3862 | 1212 | git_version, _, _ = _run_command(["dpkg-parsechangelog", "--show-field", "Version", "-l", f"{pkg_name}/debian/changelog"]) | 1470 | git_version, _, _ = _run_command( |
3863 | 1471 | [ | ||
3864 | 1472 | "dpkg-parsechangelog", | ||
3865 | 1473 | "--show-field", | ||
3866 | 1474 | "Version", | ||
3867 | 1475 | "-l", | ||
3868 | 1476 | f"{pkg_name}/debian/changelog", | ||
3869 | 1477 | ] | ||
3870 | 1478 | ) | ||
3871 | 1213 | if git_version != pkg_info.ppa_version: | 1479 | if git_version != pkg_info.ppa_version: |
3873 | 1214 | critical(f"{pkg_name}'s version is {pkg_info.ppa_version} in ppa:oem-archive/{pkg_info.ppa_archive} but the version in Git repository is {git_version}.") | 1480 | critical( |
3874 | 1481 | f"{pkg_name}'s version is {pkg_info.ppa_version} in ppa:oem-archive/{pkg_info.ppa_archive} but the version in Git repository is {git_version}." | ||
3875 | 1482 | ) | ||
3876 | 1215 | exit(1) | 1483 | exit(1) |
3877 | 1216 | git_dir = os.path.join(tmpdir, pkg_name) | 1484 | git_dir = os.path.join(tmpdir, pkg_name) |
3878 | 1217 | 1485 | ||
3879 | @@ -1240,29 +1508,41 @@ def deal_with_meta_git(pkg_name: str, pkg_info: PkgInfo, bootstrap: bool) -> boo | |||
3880 | 1240 | return False | 1508 | return False |
3881 | 1241 | 1509 | ||
3882 | 1242 | # Prepare the changelog and commit the changes. | 1510 | # Prepare the changelog and commit the changes. |
3885 | 1243 | commit_message = 'Update the ' + ' and'.join(', '.join(messages).rsplit(',', 1)) + f' for {args.kernel}.' | 1511 | commit_message = ( |
3886 | 1244 | _run_command(['dch', '--increment', commit_message]) | 1512 | "Update the " |
3887 | 1513 | + " and".join(", ".join(messages).rsplit(",", 1)) | ||
3888 | 1514 | + f" for {args.kernel}." | ||
3889 | 1515 | ) | ||
3890 | 1516 | _run_command(["dch", "--increment", commit_message]) | ||
3891 | 1245 | _run_command(["git", "add", "debian/changelog"]) | 1517 | _run_command(["git", "add", "debian/changelog"]) |
3896 | 1246 | _run_command(['git', 'commit', '-a', '-m', f"{commit_message}\n\nUpdated by oem-scripts {oem_scripts.__version__:.2f}."]) | 1518 | _run_command( |
3897 | 1247 | 1519 | [ | |
3898 | 1248 | out, _, _ = _run_command(['git', 'show', '--color=always']) | 1520 | "git", |
3899 | 1249 | if out != b'': | 1521 | "commit", |
3900 | 1522 | "-a", | ||
3901 | 1523 | "-m", | ||
3902 | 1524 | f"{commit_message}\n\nUpdated by oem-scripts {oem_scripts.__version__:.2f}.", | ||
3903 | 1525 | ] | ||
3904 | 1526 | ) | ||
3905 | 1527 | |||
3906 | 1528 | out, _, _ = _run_command(["git", "show", "--color=always"]) | ||
3907 | 1529 | if out != b"": | ||
3908 | 1250 | debug(f"({pkg_name}:{branch}) $ git show") | 1530 | debug(f"({pkg_name}:{branch}) $ git show") |
3909 | 1251 | debug(out) | 1531 | debug(out) |
3910 | 1252 | 1532 | ||
3911 | 1253 | # Run autopkgtest | 1533 | # Run autopkgtest |
3912 | 1254 | if args.autopkgtest: | 1534 | if args.autopkgtest: |
3914 | 1255 | with open(f'{pkg_name}.list', 'r') as f: | 1535 | with open(f"{pkg_name}.list", "r") as f: |
3915 | 1256 | source_list = f.read().strip() | 1536 | source_list = f.read().strip() |
3916 | 1257 | 1537 | ||
3917 | 1258 | archives = set() | 1538 | archives = set() |
3918 | 1259 | archives.add(pkg_info.ppa_archive) | 1539 | archives.add(pkg_info.ppa_archive) |
3919 | 1260 | 1540 | ||
3921 | 1261 | if project == 'somerville': | 1541 | if project == "somerville": |
3922 | 1262 | common_archive = oem_archive.getPPAByName(name=project) | 1542 | common_archive = oem_archive.getPPAByName(name=project) |
3923 | 1263 | fingerprint = common_archive.signing_key_fingerprint | 1543 | fingerprint = common_archive.signing_key_fingerprint |
3924 | 1264 | archives.add(f"{project}") | 1544 | archives.add(f"{project}") |
3926 | 1265 | elif project == 'stella' or project == 'sutton': | 1545 | elif project == "stella" or project == "sutton": |
3927 | 1266 | common_archive = oem_archive.getPPAByName(name=f"{project}-ouagadougou") | 1546 | common_archive = oem_archive.getPPAByName(name=f"{project}-ouagadougou") |
3928 | 1267 | fingerprint = common_archive.signing_key_fingerprint | 1547 | fingerprint = common_archive.signing_key_fingerprint |
3929 | 1268 | archives.add(f"{project}-ouagadougou") | 1548 | archives.add(f"{project}-ouagadougou") |
3930 | @@ -1286,8 +1566,9 @@ def deal_with_meta_git(pkg_name: str, pkg_info: PkgInfo, bootstrap: bool) -> boo | |||
3931 | 1286 | if f"oem-archive/{ppa}/ubuntu" in url: | 1566 | if f"oem-archive/{ppa}/ubuntu" in url: |
3932 | 1287 | source_list += "\ndeb " + url + " focal main" | 1567 | source_list += "\ndeb " + url + " focal main" |
3933 | 1288 | 1568 | ||
3936 | 1289 | with open(f'autopkgtest-{pkg_name}-auto', 'w') as f: | 1569 | with open(f"autopkgtest-{pkg_name}-auto", "w") as f: |
3937 | 1290 | f.write(f'''#!/bin/bash | 1570 | f.write( |
3938 | 1571 | f"""#!/bin/bash | ||
3939 | 1291 | 1572 | ||
3940 | 1292 | set -euo pipefail | 1573 | set -euo pipefail |
3941 | 1293 | IFS=$'\n\t' | 1574 | IFS=$'\n\t' |
3942 | @@ -1319,54 +1600,83 @@ true | |||
3943 | 1319 | ENDLINE | 1600 | ENDLINE |
3944 | 1320 | chmod 755 "\\$root/usr/sbin/update-grub" | 1601 | chmod 755 "\\$root/usr/sbin/update-grub" |
3945 | 1321 | END | 1602 | END |
3947 | 1322 | ''') | 1603 | """ |
3948 | 1604 | ) | ||
3949 | 1323 | if args.debug: | 1605 | if args.debug: |
3952 | 1324 | _run_command(['cat', f'autopkgtest-{pkg_name}-auto']) | 1606 | _run_command(["cat", f"autopkgtest-{pkg_name}-auto"]) |
3953 | 1325 | os.chmod(f'autopkgtest-{pkg_name}-auto', 0o755) | 1607 | os.chmod(f"autopkgtest-{pkg_name}-auto", 0o755) |
3954 | 1326 | info(f"({pkg_name}:{branch}) $ run-autopkgtest lxc focal -C") | 1608 | info(f"({pkg_name}:{branch}) $ run-autopkgtest lxc focal -C") |
3958 | 1327 | _run_command(['run-autopkgtest', 'lxc', 'focal', '-C']) | 1609 | _run_command(["run-autopkgtest", "lxc", "focal", "-C"]) |
3959 | 1328 | _run_command(['git', 'reset', '--hard', 'HEAD']) | 1610 | _run_command(["git", "reset", "--hard", "HEAD"]) |
3960 | 1329 | _run_command(['git', 'clean', '-x', '-d', '-f']) | 1611 | _run_command(["git", "clean", "-x", "-d", "-f"]) |
3961 | 1330 | 1612 | ||
3962 | 1331 | # Don't use UNRELEASED in the real meta. | 1613 | # Don't use UNRELEASED in the real meta. |
3963 | 1332 | if not bootstrap: | 1614 | if not bootstrap: |
3966 | 1333 | _run_command(['sed', '-i', 's/UNRELEASED/focal/', 'debian/changelog']) | 1615 | _run_command(["sed", "-i", "s/UNRELEASED/focal/", "debian/changelog"]) |
3967 | 1334 | _run_command(['git', 'commit', '-a', '--amend', '--no-edit']) | 1616 | _run_command(["git", "commit", "-a", "--amend", "--no-edit"]) |
3968 | 1335 | 1617 | ||
3969 | 1336 | # Tag and find it out. | 1618 | # Tag and find it out. |
3972 | 1337 | out, _, _ = _run_command(['gbp', 'tag']) | 1619 | out, _, _ = _run_command(["gbp", "tag"]) |
3973 | 1338 | if out != b'': | 1620 | if out != b"": |
3974 | 1339 | info(out) | 1621 | info(out) |
3977 | 1340 | out, _, _ = _run_command(['git', 'describe']) | 1622 | out, _, _ = _run_command(["git", "describe"]) |
3978 | 1341 | if out != b'': | 1623 | if out != b"": |
3979 | 1342 | tag = out.strip() | 1624 | tag = out.strip() |
3980 | 1343 | info(tag) | 1625 | info(tag) |
3981 | 1344 | 1626 | ||
3982 | 1345 | # Build Debian binary packages | 1627 | # Build Debian binary packages |
3986 | 1346 | _run_command(['gbp', 'buildpackage', '-us', '-uc']) | 1628 | _run_command(["gbp", "buildpackage", "-us", "-uc"]) |
3987 | 1347 | _run_command(['git', 'reset', '--hard', 'HEAD']) | 1629 | _run_command(["git", "reset", "--hard", "HEAD"]) |
3988 | 1348 | _run_command(['git', 'clean', '-x', '-d', '-f']) | 1630 | _run_command(["git", "clean", "-x", "-d", "-f"]) |
3989 | 1349 | 1631 | ||
3990 | 1350 | # Build Debian source packages | 1632 | # Build Debian source packages |
3994 | 1351 | _run_command(['gbp', 'buildpackage', '-S', '-us', '-uc']) | 1633 | _run_command(["gbp", "buildpackage", "-S", "-us", "-uc"]) |
3995 | 1352 | _run_command(['git', 'reset', '--hard', 'HEAD']) | 1634 | _run_command(["git", "reset", "--hard", "HEAD"]) |
3996 | 1353 | _run_command(['git', 'clean', '-x', '-d', '-f']) | 1635 | _run_command(["git", "clean", "-x", "-d", "-f"]) |
3997 | 1354 | 1636 | ||
3998 | 1355 | # Show the commit | 1637 | # Show the commit |
4001 | 1356 | out, _, _ = _run_command(['git', 'show', '--color=always']) | 1638 | out, _, _ = _run_command(["git", "show", "--color=always"]) |
4002 | 1357 | if out != b'': | 1639 | if out != b"": |
4003 | 1358 | warning(f"({pkg_name}:{branch}) $ git show") | 1640 | warning(f"({pkg_name}:{branch}) $ git show") |
4004 | 1359 | print(out) | 1641 | print(out) |
4007 | 1360 | version, _, _ = _run_command(["dpkg-parsechangelog", "--show-field", "Version", "-l", f"debian/changelog"]) | 1642 | version, _, _ = _run_command( |
4008 | 1361 | if not args.dry_run and yes_or_ask(args.yes, f"Would you like to commit and push the changes of {version} into {pkg_name}'s git {branch} branch?"): | 1643 | [ |
4009 | 1644 | "dpkg-parsechangelog", | ||
4010 | 1645 | "--show-field", | ||
4011 | 1646 | "Version", | ||
4012 | 1647 | "-l", | ||
4013 | 1648 | f"debian/changelog", | ||
4014 | 1649 | ] | ||
4015 | 1650 | ) | ||
4016 | 1651 | if not args.dry_run and yes_or_ask( | ||
4017 | 1652 | args.yes, | ||
4018 | 1653 | f"Would you like to commit and push the changes of {version} into {pkg_name}'s git {branch} branch?", | ||
4019 | 1654 | ): | ||
4020 | 1362 | os.chdir(git_dir) | 1655 | os.chdir(git_dir) |
4028 | 1363 | _run_command(['git', 'remote', 'add', 'oem-solutions-engineers', f"git+ssh://{lp.me.name}@git.launchpad.net/~oem-solutions-engineers/pc-enablement/+git/oem-{project}-projects-meta"]) | 1656 | _run_command( |
4029 | 1364 | _run_command(['git', 'push', 'oem-solutions-engineers']) | 1657 | [ |
4030 | 1365 | _run_command(['git', 'push', 'oem-solutions-engineers', tag]) | 1658 | "git", |
4031 | 1366 | if not args.dry_run and yes_or_ask(args.yes, f"Would you like to dput Debian source package into ppa:oem-archive/{pkg_info.ppa_archive}?"): | 1659 | "remote", |
4032 | 1367 | os.chdir(os.path.join(git_dir, '..')) | 1660 | "add", |
4033 | 1368 | _run_command(['debsign', f'{pkg_name}_{version}_source.changes']) | 1661 | "oem-solutions-engineers", |
4034 | 1369 | _run_command(['dput', f'ppa:oem-archive/{pkg_info.ppa_archive}', f'{pkg_name}_{version}_source.changes']) | 1662 | f"git+ssh://{lp.me.name}@git.launchpad.net/~oem-solutions-engineers/pc-enablement/+git/oem-{project}-projects-meta", |
4035 | 1663 | ] | ||
4036 | 1664 | ) | ||
4037 | 1665 | _run_command(["git", "push", "oem-solutions-engineers"]) | ||
4038 | 1666 | _run_command(["git", "push", "oem-solutions-engineers", tag]) | ||
4039 | 1667 | if not args.dry_run and yes_or_ask( | ||
4040 | 1668 | args.yes, | ||
4041 | 1669 | f"Would you like to dput Debian source package into ppa:oem-archive/{pkg_info.ppa_archive}?", | ||
4042 | 1670 | ): | ||
4043 | 1671 | os.chdir(os.path.join(git_dir, "..")) | ||
4044 | 1672 | _run_command(["debsign", f"{pkg_name}_{version}_source.changes"]) | ||
4045 | 1673 | _run_command( | ||
4046 | 1674 | [ | ||
4047 | 1675 | "dput", | ||
4048 | 1676 | f"ppa:oem-archive/{pkg_info.ppa_archive}", | ||
4049 | 1677 | f"{pkg_name}_{version}_source.changes", | ||
4050 | 1678 | ] | ||
4051 | 1679 | ) | ||
4052 | 1370 | 1680 | ||
4053 | 1371 | 1681 | ||
4054 | 1372 | def check_meta_git(pkg_name: str, pkg_info: PkgInfo, skip_bootstrap: False) -> None: | 1682 | def check_meta_git(pkg_name: str, pkg_info: PkgInfo, skip_bootstrap: False) -> None: |
4055 | @@ -1383,84 +1693,119 @@ def check_meta_git(pkg_name: str, pkg_info: PkgInfo, skip_bootstrap: False) -> N | |||
4056 | 1383 | 1693 | ||
4057 | 1384 | cache = apt_pkg.Cache(progress=None) | 1694 | cache = apt_pkg.Cache(progress=None) |
4058 | 1385 | 1695 | ||
4060 | 1386 | if args.subcommand == 'list': | 1696 | if args.subcommand == "list": |
4061 | 1387 | for name in get_oem_meta_packages(cache): | 1697 | for name in get_oem_meta_packages(cache): |
4062 | 1388 | print(name) | 1698 | print(name) |
4064 | 1389 | elif args.subcommand == 'subscribe': | 1699 | elif args.subcommand == "subscribe": |
4065 | 1390 | for name in get_oem_meta_packages(cache): | 1700 | for name in get_oem_meta_packages(cache): |
4066 | 1391 | info(f"Checking the subscriptions for {name}...") | 1701 | info(f"Checking the subscriptions for {name}...") |
4069 | 1392 | source = lp.distributions['ubuntu'].getSourcePackage(name=name) | 1702 | source = lp.distributions["ubuntu"].getSourcePackage(name=name) |
4070 | 1393 | if 'oem-solutions-engineers' in map(lambda x: x.subscriber.name, source.getSubscriptions()): | 1703 | if "oem-solutions-engineers" in map( |
4071 | 1704 | lambda x: x.subscriber.name, source.getSubscriptions() | ||
4072 | 1705 | ): | ||
4073 | 1394 | info(f"ubuntu/{name} has subscribed oem-solutions-engineers.") | 1706 | info(f"ubuntu/{name} has subscribed oem-solutions-engineers.") |
4074 | 1395 | continue | 1707 | continue |
4075 | 1396 | warning(f"ubuntu/{name} didn't subscribe oem-solutions-engineers yet.") | 1708 | warning(f"ubuntu/{name} didn't subscribe oem-solutions-engineers yet.") |
4077 | 1397 | if yes_or_ask(args.yes, f"Would you like to subscribe 'oem-solutions-engineers' for ubuntu/{name}?"): | 1709 | if yes_or_ask( |
4078 | 1710 | args.yes, | ||
4079 | 1711 | f"Would you like to subscribe 'oem-solutions-engineers' for ubuntu/{name}?", | ||
4080 | 1712 | ): | ||
4081 | 1398 | try: | 1713 | try: |
4082 | 1399 | # When a person is subscribed to a source package, one actually subscribe all bugs for it. | 1714 | # When a person is subscribed to a source package, one actually subscribe all bugs for it. |
4084 | 1400 | source.addBugSubscription(subscriber=lp.people['oem-solutions-engineers']) | 1715 | source.addBugSubscription( |
4085 | 1716 | subscriber=lp.people["oem-solutions-engineers"] | ||
4086 | 1717 | ) | ||
4087 | 1401 | except lazr.restfulclient.errors.Unauthorized as e: | 1718 | except lazr.restfulclient.errors.Unauthorized as e: |
4089 | 1402 | error(f"{lp.me.name} does not have permission to subscribe oem-solutions-engineers.") | 1719 | error( |
4090 | 1720 | f"{lp.me.name} does not have permission to subscribe oem-solutions-engineers." | ||
4091 | 1721 | ) | ||
4092 | 1403 | if args.verbose: | 1722 | if args.verbose: |
4093 | 1404 | print(e) | 1723 | print(e) |
4094 | 1405 | exit(1) | 1724 | exit(1) |
4097 | 1406 | elif args.subcommand == 'unsubscribe': | 1725 | elif args.subcommand == "unsubscribe": |
4098 | 1407 | source = lp.distributions['ubuntu'].getSourcePackage(name=args.pkgName) | 1726 | source = lp.distributions["ubuntu"].getSourcePackage(name=args.pkgName) |
4099 | 1408 | subscriptions = source.getSubscriptions() | 1727 | subscriptions = source.getSubscriptions() |
4100 | 1409 | for subscription in subscriptions: | 1728 | for subscription in subscriptions: |
4102 | 1410 | if subscription.subscriber.name == 'oem-solutions-engineers': | 1729 | if subscription.subscriber.name == "oem-solutions-engineers": |
4103 | 1411 | info(f"ubuntu/{args.pkgName} has subscribed oem-solutions-engineers.") | 1730 | info(f"ubuntu/{args.pkgName} has subscribed oem-solutions-engineers.") |
4105 | 1412 | if yes_or_ask(args.yes, f"Would you like to unsubscribe 'oem-solutions-engineers' for ubuntu/{args.pkgName}?"): | 1731 | if yes_or_ask( |
4106 | 1732 | args.yes, | ||
4107 | 1733 | f"Would you like to unsubscribe 'oem-solutions-engineers' for ubuntu/{args.pkgName}?", | ||
4108 | 1734 | ): | ||
4109 | 1413 | try: | 1735 | try: |
4111 | 1414 | source.removeBugSubscription(subscriber=lp.people['oem-solutions-engineers']) | 1736 | source.removeBugSubscription( |
4112 | 1737 | subscriber=lp.people["oem-solutions-engineers"] | ||
4113 | 1738 | ) | ||
4114 | 1415 | except lazr.restfulclient.errors.Unauthorized as e: | 1739 | except lazr.restfulclient.errors.Unauthorized as e: |
4116 | 1416 | error(f"{lp.me.name} does not have permission to unsubscribe oem-solutions-engineers.") | 1740 | error( |
4117 | 1741 | f"{lp.me.name} does not have permission to unsubscribe oem-solutions-engineers." | ||
4118 | 1742 | ) | ||
4119 | 1417 | if args.verbose: | 1743 | if args.verbose: |
4120 | 1418 | print(e) | 1744 | print(e) |
4121 | 1419 | exit(1) | 1745 | exit(1) |
4122 | 1420 | exit(0) | 1746 | exit(0) |
4126 | 1421 | elif args.subcommand == 'update': | 1747 | elif args.subcommand == "update": |
4127 | 1422 | oem_scripts_config_ini = os.path.join(os.environ["HOME"], | 1748 | oem_scripts_config_ini = os.path.join( |
4128 | 1423 | ".config/oem-scripts/config.ini") | 1749 | os.environ["HOME"], ".config/oem-scripts/config.ini" |
4129 | 1750 | ) | ||
4130 | 1424 | oem_scripts_config = ConfigParser() | 1751 | oem_scripts_config = ConfigParser() |
4131 | 1425 | oem_scripts_config.read(oem_scripts_config_ini) | 1752 | oem_scripts_config.read(oem_scripts_config_ini) |
4133 | 1426 | config = oem_scripts_config['private'] | 1753 | config = oem_scripts_config["private"] |
4134 | 1427 | if args.json: | 1754 | if args.json: |
4135 | 1428 | pkgInfo = load_pkg_info(args.json) | 1755 | pkgInfo = load_pkg_info(args.json) |
4136 | 1429 | elif args.meta: | 1756 | elif args.meta: |
4139 | 1430 | r = requests.get(config['archive'] + "/dists/", auth=(config['username'], config['password'])) | 1757 | r = requests.get( |
4140 | 1431 | pkgInfo = collect_pkg_info(args.meta, check_private=True, index=r.text, config=config) | 1758 | config["archive"] + "/dists/", auth=(config["username"], config["password"]) |
4141 | 1759 | ) | ||
4142 | 1760 | pkgInfo = collect_pkg_info( | ||
4143 | 1761 | args.meta, check_private=True, index=r.text, config=config | ||
4144 | 1762 | ) | ||
4145 | 1432 | else: | 1763 | else: |
4146 | 1433 | print("You needto use --json or --meta.") | 1764 | print("You needto use --json or --meta.") |
4147 | 1434 | exit(1) | 1765 | exit(1) |
4148 | 1435 | process_update_task(pkgInfo) | 1766 | process_update_task(pkgInfo) |
4152 | 1436 | elif args.subcommand == 'collect': | 1767 | elif args.subcommand == "collect": |
4153 | 1437 | oem_scripts_config_ini = os.path.join(os.environ["HOME"], | 1768 | oem_scripts_config_ini = os.path.join( |
4154 | 1438 | ".config/oem-scripts/config.ini") | 1769 | os.environ["HOME"], ".config/oem-scripts/config.ini" |
4155 | 1770 | ) | ||
4156 | 1439 | oem_scripts_config = ConfigParser() | 1771 | oem_scripts_config = ConfigParser() |
4157 | 1440 | oem_scripts_config.read(oem_scripts_config_ini) | 1772 | oem_scripts_config.read(oem_scripts_config_ini) |
4160 | 1441 | config = oem_scripts_config['private'] | 1773 | config = oem_scripts_config["private"] |
4161 | 1442 | r = requests.get(config['archive'] + "/dists/", auth=(config['username'], config['password'])) | 1774 | r = requests.get( |
4162 | 1775 | config["archive"] + "/dists/", auth=(config["username"], config["password"]) | ||
4163 | 1776 | ) | ||
4164 | 1443 | if args.json: | 1777 | if args.json: |
4166 | 1444 | pkgInfo = collect_pkg_info(args.json, check_private=True, index=r.text, config=config) | 1778 | pkgInfo = collect_pkg_info( |
4167 | 1779 | args.json, check_private=True, index=r.text, config=config | ||
4168 | 1780 | ) | ||
4169 | 1445 | elif args.meta: | 1781 | elif args.meta: |
4171 | 1446 | pkgInfo = collect_pkg_info(args.meta, check_private=True, index=r.text, config=config) | 1782 | pkgInfo = collect_pkg_info( |
4172 | 1783 | args.meta, check_private=True, index=r.text, config=config | ||
4173 | 1784 | ) | ||
4174 | 1447 | else: | 1785 | else: |
4175 | 1448 | print("You need to use --json or --meta.") | 1786 | print("You need to use --json or --meta.") |
4176 | 1449 | exit(1) | 1787 | exit(1) |
4178 | 1450 | args.output.write(json.dumps(pkgInfo, indent=4, sort_keys=True, cls=DataJSONEncoder)) | 1788 | args.output.write( |
4179 | 1789 | json.dumps(pkgInfo, indent=4, sort_keys=True, cls=DataJSONEncoder) | ||
4180 | 1790 | ) | ||
4181 | 1451 | args.output.write("\n") | 1791 | args.output.write("\n") |
4185 | 1452 | elif args.subcommand == 'staging-copy': | 1792 | elif args.subcommand == "staging-copy": |
4186 | 1453 | oem_scripts_config_ini = os.path.join(os.environ["HOME"], | 1793 | oem_scripts_config_ini = os.path.join( |
4187 | 1454 | ".config/oem-scripts/config.ini") | 1794 | os.environ["HOME"], ".config/oem-scripts/config.ini" |
4188 | 1795 | ) | ||
4189 | 1455 | oem_scripts_config = ConfigParser() | 1796 | oem_scripts_config = ConfigParser() |
4190 | 1456 | oem_scripts_config.read(oem_scripts_config_ini) | 1797 | oem_scripts_config.read(oem_scripts_config_ini) |
4192 | 1457 | config = oem_scripts_config['private'] | 1798 | config = oem_scripts_config["private"] |
4193 | 1458 | 1799 | ||
4194 | 1459 | if args.json: | 1800 | if args.json: |
4195 | 1460 | pkgInfo = load_pkg_info(args.json) | 1801 | pkgInfo = load_pkg_info(args.json) |
4196 | 1461 | elif args.meta: | 1802 | elif args.meta: |
4199 | 1462 | r = requests.get(config['archive'] + "/dists/", auth=(config['username'], config['password'])) | 1803 | r = requests.get( |
4200 | 1463 | pkgInfo = collect_pkg_info(args.meta, check_private=True, index=r.text, config=config) | 1804 | config["archive"] + "/dists/", auth=(config["username"], config["password"]) |
4201 | 1805 | ) | ||
4202 | 1806 | pkgInfo = collect_pkg_info( | ||
4203 | 1807 | args.meta, check_private=True, index=r.text, config=config | ||
4204 | 1808 | ) | ||
4205 | 1464 | else: | 1809 | else: |
4206 | 1465 | print("You need to use --json or --meta.") | 1810 | print("You need to use --json or --meta.") |
4207 | 1466 | exit(1) | 1811 | exit(1) |
4208 | @@ -1470,12 +1815,21 @@ elif args.subcommand == 'staging-copy': | |||
4209 | 1470 | staging_locked = set() | 1815 | staging_locked = set() |
4210 | 1471 | for pkg_name in sorted(pkgInfo.keys()): | 1816 | for pkg_name in sorted(pkgInfo.keys()): |
4211 | 1472 | pkg_info = pkgInfo[pkg_name] | 1817 | pkg_info = pkgInfo[pkg_name] |
4213 | 1473 | debug(f"{pkg_name} ppa: {pkg_info.ppa_version}, devel: {pkg_info.devel_version}, staging: {pkg_info.staging_version}.") | 1818 | debug( |
4214 | 1819 | f"{pkg_name} ppa: {pkg_info.ppa_version}, devel: {pkg_info.devel_version}, staging: {pkg_info.staging_version}." | ||
4215 | 1820 | ) | ||
4216 | 1474 | if pkg_info.ppa_version != pkg_info.devel_version: | 1821 | if pkg_info.ppa_version != pkg_info.devel_version: |
4218 | 1475 | warning(f"{pkg_name} versions are not synced between ppa:oem-archive/{pkg_info.ppa_archive} and {pkg_info.devel_archive}.") | 1822 | warning( |
4219 | 1823 | f"{pkg_name} versions are not synced between ppa:oem-archive/{pkg_info.ppa_archive} and {pkg_info.devel_archive}." | ||
4220 | 1824 | ) | ||
4221 | 1476 | elif pkg_info.staging_version == pkg_info.devel_version: | 1825 | elif pkg_info.staging_version == pkg_info.devel_version: |
4224 | 1477 | info(f"{pkg_name} {pkg_info.devel_version} (devel) == {pkg_info.staging_version} (staging) so it doesn't need to copy.") | 1826 | info( |
4225 | 1478 | elif apt_pkg.version_compare(pkg_info.staging_version, pkg_info.devel_version) > 0: | 1827 | f"{pkg_name} {pkg_info.devel_version} (devel) == {pkg_info.staging_version} (staging) so it doesn't need to copy." |
4226 | 1828 | ) | ||
4227 | 1829 | elif ( | ||
4228 | 1830 | apt_pkg.version_compare(pkg_info.staging_version, pkg_info.devel_version) | ||
4229 | 1831 | > 0 | ||
4230 | 1832 | ): | ||
4231 | 1479 | critical(f"This should never happen.") | 1833 | critical(f"This should never happen.") |
4232 | 1480 | exit(1) | 1834 | exit(1) |
4233 | 1481 | else: | 1835 | else: |
4234 | @@ -1485,65 +1839,90 @@ elif args.subcommand == 'staging-copy': | |||
4235 | 1485 | jobs[identity] = list() | 1839 | jobs[identity] = list() |
4236 | 1486 | jobs[identity].append(pkg_name) | 1840 | jobs[identity].append(pkg_name) |
4237 | 1487 | debug(json.dumps(jobs, indent=4, sort_keys=True)) | 1841 | debug(json.dumps(jobs, indent=4, sort_keys=True)) |
4240 | 1488 | cloudberry = lp.projects['cloudberry'] | 1842 | cloudberry = lp.projects["cloudberry"] |
4241 | 1489 | assignee = lp.people['oem-archive'] | 1843 | assignee = lp.people["oem-archive"] |
4242 | 1490 | tasks = cloudberry.searchTasks( | 1844 | tasks = cloudberry.searchTasks( |
4245 | 1491 | status=['New', 'Triaged', 'Confirmed', 'In Progress', 'Fix Committed'], | 1845 | status=["New", "Triaged", "Confirmed", "In Progress", "Fix Committed"], |
4246 | 1492 | search_text='request of') | 1846 | search_text="request of", |
4247 | 1847 | ) | ||
4248 | 1493 | for task in tasks: | 1848 | for task in tasks: |
4249 | 1494 | bug = task.bug | 1849 | bug = task.bug |
4250 | 1495 | for staging in sorted(dest): | 1850 | for staging in sorted(dest): |
4252 | 1496 | if staging in bug.description and 'staging-lock' in bug.tags and 'cqa-verified-staging' not in bug.tags: | 1851 | if ( |
4253 | 1852 | staging in bug.description | ||
4254 | 1853 | and "staging-lock" in bug.tags | ||
4255 | 1854 | and "cqa-verified-staging" not in bug.tags | ||
4256 | 1855 | ): | ||
4257 | 1497 | debug(bug.description) | 1856 | debug(bug.description) |
4258 | 1498 | tags = ",".join(bug.tags) | 1857 | tags = ",".join(bug.tags) |
4261 | 1499 | for line in bug.description.split('\n'): | 1858 | for line in bug.description.split("\n"): |
4262 | 1500 | if line.startswith('Package: '): | 1859 | if line.startswith("Package: "): |
4263 | 1501 | package = line | 1860 | package = line |
4265 | 1502 | warning(f"https://bugs.launchpad.net/bugs/{bug.id}\n\t({staging})\n\t[{tags}]\n\t{bug.title}\n\t{package}") | 1861 | warning( |
4266 | 1862 | f"https://bugs.launchpad.net/bugs/{bug.id}\n\t({staging})\n\t[{tags}]\n\t{bug.title}\n\t{package}" | ||
4267 | 1863 | ) | ||
4268 | 1503 | staging_locked.add(staging) | 1864 | staging_locked.add(staging) |
4269 | 1504 | for job in jobs: | 1865 | for job in jobs: |
4271 | 1505 | source, dest = job.split(':') | 1866 | source, dest = job.split(":") |
4272 | 1506 | if dest and dest in staging_locked and not args.ignore_staging_lock: | 1867 | if dest and dest in staging_locked and not args.ignore_staging_lock: |
4274 | 1507 | warning(f"The following OEM metapackages will be skipped due to the staging-lock of {dest}.\n" + '\n'.join(jobs[job])) | 1868 | warning( |
4275 | 1869 | f"The following OEM metapackages will be skipped due to the staging-lock of {dest}.\n" | ||
4276 | 1870 | + "\n".join(jobs[job]) | ||
4277 | 1871 | ) | ||
4278 | 1508 | else: | 1872 | else: |
4279 | 1509 | title = f"request of copy_package [{source}]" | 1873 | title = f"request of copy_package [{source}]" |
4281 | 1510 | tags = ["archive-request", "via-request-script", f"oem-scripts-{oem_scripts.__version__:.2f}", "oem-metapackages"] | 1874 | tags = [ |
4282 | 1875 | "archive-request", | ||
4283 | 1876 | "via-request-script", | ||
4284 | 1877 | f"oem-scripts-{oem_scripts.__version__:.2f}", | ||
4285 | 1878 | "oem-metapackages", | ||
4286 | 1879 | ] | ||
4287 | 1511 | if args.dry_run: | 1880 | if args.dry_run: |
4288 | 1512 | info(f"TITLE: {title}") | 1881 | info(f"TITLE: {title}") |
4290 | 1513 | info("TAGS: " + ','.join(tags)) | 1882 | info("TAGS: " + ",".join(tags)) |
4291 | 1514 | else: | 1883 | else: |
4292 | 1515 | debug(f"TITLE: {title}") | 1884 | debug(f"TITLE: {title}") |
4294 | 1516 | debug("TAGS: " + ','.join(tags)) | 1885 | debug("TAGS: " + ",".join(tags)) |
4295 | 1517 | packages = list() | 1886 | packages = list() |
4296 | 1518 | for pkg_name in jobs[job]: | 1887 | for pkg_name in jobs[job]: |
4297 | 1519 | pkg_info = pkgInfo[pkg_name] | 1888 | pkg_info = pkgInfo[pkg_name] |
4298 | 1520 | packages.append(f"{pkg_name} (=={pkg_info.devel_version})") | 1889 | packages.append(f"{pkg_name} (=={pkg_info.devel_version})") |
4300 | 1521 | packages = ', '.join(packages) | 1890 | packages = ", ".join(packages) |
4301 | 1522 | distribution = "focal" | 1891 | distribution = "focal" |
4304 | 1523 | if dest.startswith('somerville'): | 1892 | if dest.startswith("somerville"): |
4305 | 1524 | component = remove_suffix(dest, f"-{distribution}-staging").replace('-fossa', '') | 1893 | component = remove_suffix(dest, f"-{distribution}-staging").replace( |
4306 | 1894 | "-fossa", "" | ||
4307 | 1895 | ) | ||
4308 | 1525 | else: | 1896 | else: |
4309 | 1526 | debug(dest) | 1897 | debug(dest) |
4311 | 1527 | project, group, _ = dest.split('-', 2) | 1898 | project, group, _ = dest.split("-", 2) |
4312 | 1528 | component = f"{project}.{group}" | 1899 | component = f"{project}.{group}" |
4314 | 1529 | production = distribution + '-' + component | 1900 | production = distribution + "-" + component |
4315 | 1530 | description = staging_copy_template.substitute( | 1901 | description = staging_copy_template.substitute( |
4316 | 1531 | source=source, | 1902 | source=source, |
4317 | 1532 | destination=dest, | 1903 | destination=dest, |
4318 | 1533 | packages=packages, | 1904 | packages=packages, |
4319 | 1534 | production=production, | 1905 | production=production, |
4322 | 1535 | username=config['username'], | 1906 | username=config["username"], |
4323 | 1536 | url=config['url'], | 1907 | url=config["url"], |
4324 | 1537 | distribution=distribution, | 1908 | distribution=distribution, |
4326 | 1538 | component=component) | 1909 | component=component, |
4327 | 1910 | ) | ||
4328 | 1539 | print(description) | 1911 | print(description) |
4331 | 1540 | if not args.dry_run and yes_or_ask(args.yes, f"Would you like to create a cloudberry bug to copy the {packages} from {source} to {dest}?"): | 1912 | if not args.dry_run and yes_or_ask( |
4332 | 1541 | bug = lp.bugs.createBug(description=description, target=cloudberry, title=title, tags=tags) | 1913 | args.yes, |
4333 | 1914 | f"Would you like to create a cloudberry bug to copy the {packages} from {source} to {dest}?", | ||
4334 | 1915 | ): | ||
4335 | 1916 | bug = lp.bugs.createBug( | ||
4336 | 1917 | description=description, target=cloudberry, title=title, tags=tags | ||
4337 | 1918 | ) | ||
4338 | 1542 | for task in bug.bug_tasks: | 1919 | for task in bug.bug_tasks: |
4340 | 1543 | task.importance = 'High' | 1920 | task.importance = "High" |
4341 | 1544 | task.assignee = assignee | 1921 | task.assignee = assignee |
4342 | 1545 | task.lp_save() | 1922 | task.lp_save() |
4343 | 1546 | bug.lp_save() | 1923 | bug.lp_save() |
4345 | 1547 | print(f"The cloudberry staging copy bug has been created on {bug.web_link}.\n") | 1924 | print( |
4346 | 1925 | f"The cloudberry staging copy bug has been created on {bug.web_link}.\n" | ||
4347 | 1926 | ) | ||
4348 | 1548 | else: | 1927 | else: |
4349 | 1549 | parser.print_help() | 1928 | parser.print_help() |
4350 | diff --git a/oem_scripts/LaunchpadLogin.py b/oem_scripts/LaunchpadLogin.py | |||
4351 | index 0583ad2..01bae8b 100644 | |||
4352 | --- a/oem_scripts/LaunchpadLogin.py | |||
4353 | +++ b/oem_scripts/LaunchpadLogin.py | |||
4354 | @@ -8,20 +8,28 @@ import logging | |||
4355 | 8 | import os | 8 | import os |
4356 | 9 | 9 | ||
4357 | 10 | 10 | ||
4359 | 11 | class ShutUpAndTakeMyTokenAuthorizationEngine(credentials.RequestTokenAuthorizationEngine): | 11 | class ShutUpAndTakeMyTokenAuthorizationEngine( |
4360 | 12 | credentials.RequestTokenAuthorizationEngine | ||
4361 | 13 | ): | ||
4362 | 12 | """This stub class prevents launchpadlib from nulling out consumer_name | 14 | """This stub class prevents launchpadlib from nulling out consumer_name |
4363 | 13 | in its demented campaign to force the use of desktop integration. """ | 15 | in its demented campaign to force the use of desktop integration. """ |
4364 | 14 | 16 | ||
4367 | 15 | def __init__(self, service_root, application_name=None, consumer_name=None, | 17 | def __init__( |
4368 | 16 | credential_save_failed=None, allow_access_levels=None): | 18 | self, |
4369 | 19 | service_root, | ||
4370 | 20 | application_name=None, | ||
4371 | 21 | consumer_name=None, | ||
4372 | 22 | credential_save_failed=None, | ||
4373 | 23 | allow_access_levels=None, | ||
4374 | 24 | ): | ||
4375 | 17 | super(ShutUpAndTakeMyTokenAuthorizationEngine, self).__init__( | 25 | super(ShutUpAndTakeMyTokenAuthorizationEngine, self).__init__( |
4378 | 18 | service_root, application_name, consumer_name, | 26 | service_root, application_name, consumer_name, credential_save_failed |
4379 | 19 | credential_save_failed) | 27 | ) |
4380 | 20 | 28 | ||
4381 | 21 | 29 | ||
4383 | 22 | def launchpad_login(pkg, service_root='production', version='devel'): | 30 | def launchpad_login(pkg, service_root="production", version="devel"): |
4384 | 23 | """Log into Launchpad API with stored credentials.""" | 31 | """Log into Launchpad API with stored credentials.""" |
4386 | 24 | creds_dir = os.path.expanduser(os.path.join('~', '.' + pkg)) | 32 | creds_dir = os.path.expanduser(os.path.join("~", "." + pkg)) |
4387 | 25 | if not os.path.exists(creds_dir): | 33 | if not os.path.exists(creds_dir): |
4388 | 26 | os.makedirs(creds_dir, 0o700) | 34 | os.makedirs(creds_dir, 0o700) |
4389 | 27 | os.chmod(creds_dir, 0o700) | 35 | os.chmod(creds_dir, 0o700) |
4390 | @@ -29,69 +37,83 @@ def launchpad_login(pkg, service_root='production', version='devel'): | |||
4391 | 29 | consumer_name = pkg | 37 | consumer_name = pkg |
4392 | 30 | return Launchpad.login_with( | 38 | return Launchpad.login_with( |
4393 | 31 | consumer_name=consumer_name, | 39 | consumer_name=consumer_name, |
4395 | 32 | credentials_file=os.path.join(creds_dir, 'launchpad.credentials'), | 40 | credentials_file=os.path.join(creds_dir, "launchpad.credentials"), |
4396 | 33 | service_root=api_endpoint, | 41 | service_root=api_endpoint, |
4397 | 34 | version=version, | 42 | version=version, |
4398 | 35 | authorization_engine=ShutUpAndTakeMyTokenAuthorizationEngine( | 43 | authorization_engine=ShutUpAndTakeMyTokenAuthorizationEngine( |
4401 | 36 | service_root=api_endpoint, | 44 | service_root=api_endpoint, consumer_name=consumer_name |
4400 | 37 | consumer_name=consumer_name, | ||
4402 | 38 | ), | 45 | ), |
4403 | 39 | ) | 46 | ) |
4404 | 40 | 47 | ||
4405 | 41 | 48 | ||
4407 | 42 | class LaunchpadLogin(): | 49 | class LaunchpadLogin: |
4408 | 43 | """Try to unify all Launchpad login""" | 50 | """Try to unify all Launchpad login""" |
4412 | 44 | def __init__(self, application_name='oem-scripts', | 51 | |
4413 | 45 | service_root=None, launchpadlib_dir=None, | 52 | def __init__( |
4414 | 46 | version="devel", bot=False): | 53 | self, |
4415 | 54 | application_name="oem-scripts", | ||
4416 | 55 | service_root=None, | ||
4417 | 56 | launchpadlib_dir=None, | ||
4418 | 57 | version="devel", | ||
4419 | 58 | bot=False, | ||
4420 | 59 | ): | ||
4421 | 47 | 60 | ||
4422 | 48 | if launchpadlib_dir is None: | 61 | if launchpadlib_dir is None: |
4423 | 49 | launchpadlib_dir = os.path.join(os.environ["HOME"], ".launchpadlib/cache") | 62 | launchpadlib_dir = os.path.join(os.environ["HOME"], ".launchpadlib/cache") |
4424 | 50 | 63 | ||
4425 | 51 | if service_root is None: | 64 | if service_root is None: |
4428 | 52 | if os.environ.get('LAUNCHPAD_API') == lookup_service_root('staging'): | 65 | if os.environ.get("LAUNCHPAD_API") == lookup_service_root("staging"): |
4429 | 53 | service_root = 'staging' | 66 | service_root = "staging" |
4430 | 54 | else: | 67 | else: |
4432 | 55 | service_root = 'production' | 68 | service_root = "production" |
4433 | 56 | 69 | ||
4436 | 57 | oem_scripts_config_ini = os.path.join(os.environ["HOME"], | 70 | oem_scripts_config_ini = os.path.join( |
4437 | 58 | ".config/oem-scripts/config.ini") | 71 | os.environ["HOME"], ".config/oem-scripts/config.ini" |
4438 | 72 | ) | ||
4439 | 59 | launchpad_token = os.environ.get("LAUNCHPAD_TOKEN") | 73 | launchpad_token = os.environ.get("LAUNCHPAD_TOKEN") |
4440 | 60 | 74 | ||
4441 | 61 | if bot: | 75 | if bot: |
4442 | 62 | logging.info("Using oem-taipei-bot credentials") | 76 | logging.info("Using oem-taipei-bot credentials") |
4444 | 63 | self.lp = launchpad_login('/', service_root) | 77 | self.lp = launchpad_login("/", service_root) |
4445 | 64 | 78 | ||
4446 | 65 | elif launchpad_token: | 79 | elif launchpad_token: |
4447 | 66 | if launchpad_token == "::": | 80 | if launchpad_token == "::": |
4448 | 67 | logging.info("Using anonymously login") | 81 | logging.info("Using anonymously login") |
4449 | 68 | self.lp = Launchpad.login_anonymously(application_name, service_root) | 82 | self.lp = Launchpad.login_anonymously(application_name, service_root) |
4450 | 69 | elif ":" in launchpad_token: | 83 | elif ":" in launchpad_token: |
4458 | 70 | oauth_token, oauth_token_secret, oauth_consumer_key = launchpad_token.split(":", maxsplit=2) | 84 | oauth_token, oauth_token_secret, oauth_consumer_key = launchpad_token.split( |
4459 | 71 | self.lp = Launchpad.login(oauth_consumer_key, | 85 | ":", maxsplit=2 |
4460 | 72 | oauth_token, | 86 | ) |
4461 | 73 | oauth_token_secret, | 87 | self.lp = Launchpad.login( |
4462 | 74 | service_root=service_root, | 88 | oauth_consumer_key, |
4463 | 75 | cache=launchpadlib_dir, | 89 | oauth_token, |
4464 | 76 | version=version) | 90 | oauth_token_secret, |
4465 | 91 | service_root=service_root, | ||
4466 | 92 | cache=launchpadlib_dir, | ||
4467 | 93 | version=version, | ||
4468 | 94 | ) | ||
4469 | 77 | else: | 95 | else: |
4470 | 78 | logging.error(f"invalid LAUNCHPAD_TOKEN '{launchpad_token}'") | 96 | logging.error(f"invalid LAUNCHPAD_TOKEN '{launchpad_token}'") |
4471 | 79 | exit(1) | 97 | exit(1) |
4472 | 80 | 98 | ||
4474 | 81 | elif os.environ.get('LAUNCHPAD_API') and os.path.exists(oem_scripts_config_ini): | 99 | elif os.environ.get("LAUNCHPAD_API") and os.path.exists(oem_scripts_config_ini): |
4475 | 82 | logging.info("Using oem-scripts oauth token") | 100 | logging.info("Using oem-scripts oauth token") |
4476 | 83 | oem_scripts_config = ConfigParser() | 101 | oem_scripts_config = ConfigParser() |
4477 | 84 | oem_scripts_config.read(oem_scripts_config_ini) | 102 | oem_scripts_config.read(oem_scripts_config_ini) |
4485 | 85 | config = oem_scripts_config['oem-scripts'] | 103 | config = oem_scripts_config["oem-scripts"] |
4486 | 86 | self.lp = Launchpad.login(config['oauth_consumer_key'], | 104 | self.lp = Launchpad.login( |
4487 | 87 | config['oauth_token'], | 105 | config["oauth_consumer_key"], |
4488 | 88 | config['oauth_token_secret'], | 106 | config["oauth_token"], |
4489 | 89 | service_root=service_root, | 107 | config["oauth_token_secret"], |
4490 | 90 | cache=launchpadlib_dir, | 108 | service_root=service_root, |
4491 | 91 | version=version) | 109 | cache=launchpadlib_dir, |
4492 | 110 | version=version, | ||
4493 | 111 | ) | ||
4494 | 92 | else: | 112 | else: |
4495 | 93 | logging.info("Using oem-scripts login") | 113 | logging.info("Using oem-scripts login") |
4500 | 94 | self.lp = Launchpad.login_with(application_name=application_name, | 114 | self.lp = Launchpad.login_with( |
4501 | 95 | service_root=service_root, | 115 | application_name=application_name, |
4502 | 96 | launchpadlib_dir=launchpadlib_dir, | 116 | service_root=service_root, |
4503 | 97 | version=version) | 117 | launchpadlib_dir=launchpadlib_dir, |
4504 | 118 | version=version, | ||
4505 | 119 | ) | ||
4506 | diff --git a/oem_scripts/logging.py b/oem_scripts/logging.py | |||
4507 | index 5351f86..0571d3c 100644 | |||
4508 | --- a/oem_scripts/logging.py | |||
4509 | +++ b/oem_scripts/logging.py | |||
4510 | @@ -22,29 +22,36 @@ import sys | |||
4511 | 22 | 22 | ||
4512 | 23 | 23 | ||
4513 | 24 | def setup_logging(debug=False, quiet=False): | 24 | def setup_logging(debug=False, quiet=False): |
4529 | 25 | logging.addLevelName(logging.DEBUG, | 25 | logging.addLevelName( |
4530 | 26 | "\033[1;96m%s\033[1;0m" % | 26 | logging.DEBUG, "\033[1;96m%s\033[1;0m" % logging.getLevelName(logging.DEBUG) |
4531 | 27 | logging.getLevelName(logging.DEBUG)) | 27 | ) |
4532 | 28 | logging.addLevelName(logging.INFO, | 28 | logging.addLevelName( |
4533 | 29 | "\033[1;32m%s\033[1;0m" % | 29 | logging.INFO, "\033[1;32m%s\033[1;0m" % logging.getLevelName(logging.INFO) |
4534 | 30 | logging.getLevelName(logging.INFO)) | 30 | ) |
4535 | 31 | logging.addLevelName(logging.WARNING, | 31 | logging.addLevelName( |
4536 | 32 | "\033[1;33m%s\033[1;0m" % | 32 | logging.WARNING, "\033[1;33m%s\033[1;0m" % logging.getLevelName(logging.WARNING) |
4537 | 33 | logging.getLevelName(logging.WARNING)) | 33 | ) |
4538 | 34 | logging.addLevelName(logging.ERROR, | 34 | logging.addLevelName( |
4539 | 35 | "\033[1;31m%s\033[1;0m" % | 35 | logging.ERROR, "\033[1;31m%s\033[1;0m" % logging.getLevelName(logging.ERROR) |
4540 | 36 | logging.getLevelName(logging.ERROR)) | 36 | ) |
4541 | 37 | logging.addLevelName(logging.CRITICAL, | 37 | logging.addLevelName( |
4542 | 38 | "\033[1;41m%s\033[1;0m" % | 38 | logging.CRITICAL, |
4543 | 39 | logging.getLevelName(logging.CRITICAL)) | 39 | "\033[1;41m%s\033[1;0m" % logging.getLevelName(logging.CRITICAL), |
4544 | 40 | ) | ||
4545 | 40 | if debug: | 41 | if debug: |
4549 | 41 | logging.basicConfig(format='<%(levelname)s> %(message)s', | 42 | logging.basicConfig( |
4550 | 42 | level=logging.DEBUG, | 43 | format="<%(levelname)s> %(message)s", |
4551 | 43 | handlers=[logging.StreamHandler(sys.stdout)]) | 44 | level=logging.DEBUG, |
4552 | 45 | handlers=[logging.StreamHandler(sys.stdout)], | ||
4553 | 46 | ) | ||
4554 | 44 | elif not quiet: | 47 | elif not quiet: |
4558 | 45 | logging.basicConfig(format='<%(levelname)s> %(message)s', | 48 | logging.basicConfig( |
4559 | 46 | level=logging.INFO, | 49 | format="<%(levelname)s> %(message)s", |
4560 | 47 | handlers=[logging.StreamHandler(sys.stdout)]) | 50 | level=logging.INFO, |
4561 | 51 | handlers=[logging.StreamHandler(sys.stdout)], | ||
4562 | 52 | ) | ||
4563 | 48 | else: | 53 | else: |
4566 | 49 | logging.basicConfig(format='<%(levelname)s> %(message)s', | 54 | logging.basicConfig( |
4567 | 50 | handlers=[logging.StreamHandler(sys.stdout)]) | 55 | format="<%(levelname)s> %(message)s", |
4568 | 56 | handlers=[logging.StreamHandler(sys.stdout)], | ||
4569 | 57 | ) | ||
4570 | diff --git a/pkg-list b/pkg-list | |||
4571 | index e5700ac..c5eeca0 100755 | |||
4572 | --- a/pkg-list | |||
4573 | +++ b/pkg-list | |||
4574 | @@ -29,8 +29,9 @@ from logging import debug, error, critical, info, warning | |||
4575 | 29 | from urllib.parse import urljoin | 29 | from urllib.parse import urljoin |
4576 | 30 | 30 | ||
4577 | 31 | 31 | ||
4580 | 32 | parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter, | 32 | parser = argparse.ArgumentParser( |
4581 | 33 | epilog=""" | 33 | formatter_class=argparse.RawDescriptionHelpFormatter, |
4582 | 34 | epilog=""" | ||
4583 | 34 | examples: | 35 | examples: |
4584 | 35 | pkg-list ubuntu-desktop --recommends > ubuntu-desktop.list | 36 | pkg-list ubuntu-desktop --recommends > ubuntu-desktop.list |
4585 | 36 | pkg-list dkms --exclude ubuntu-desktop.list > dkms.list | 37 | pkg-list dkms --exclude ubuntu-desktop.list > dkms.list |
4586 | @@ -39,63 +40,74 @@ examples: | |||
4587 | 39 | pkg-list linux-generic --exclude all.list | 40 | pkg-list linux-generic --exclude all.list |
4588 | 40 | pkg-list linux-generic-hwe-20.04 --exclude all.list | 41 | pkg-list linux-generic-hwe-20.04 --exclude all.list |
4589 | 41 | pkg-list linux-oem-20.04 --exclude all.list | 42 | pkg-list linux-oem-20.04 --exclude all.list |
4621 | 42 | pkg-list linux-oem-20.04-edge --exclude all.list""") | 43 | pkg-list linux-oem-20.04-edge --exclude all.list""", |
4622 | 43 | 44 | ) | |
4623 | 44 | parser.add_argument("-d", "--debug", | 45 | |
4624 | 45 | action="store_true", | 46 | parser.add_argument("-d", "--debug", action="store_true", help="print debug messages") |
4625 | 46 | help="print debug messages") | 47 | parser.add_argument( |
4626 | 47 | parser.add_argument("-l", "--long", | 48 | "-l", |
4627 | 48 | action="store_true", | 49 | "--long", |
4628 | 49 | help="print long list including the URL, MD5, SHA1 and SHA256.") | 50 | action="store_true", |
4629 | 50 | parser.add_argument("--apt-dir", | 51 | help="print long list including the URL, MD5, SHA1 and SHA256.", |
4630 | 51 | type=str, | 52 | ) |
4631 | 52 | help="specify the dir for apt") | 53 | parser.add_argument("--apt-dir", type=str, help="specify the dir for apt") |
4632 | 53 | parser.add_argument("--recommends", | 54 | parser.add_argument( |
4633 | 54 | action="store_true", | 55 | "--recommends", action="store_true", help="include recommends packages" |
4634 | 55 | help="include recommends packages") | 56 | ) |
4635 | 56 | parser.add_argument("--suggests", | 57 | parser.add_argument("--suggests", action="store_true", help="include suggests packages") |
4636 | 57 | action="store_true", | 58 | parser.add_argument( |
4637 | 58 | help="include suggests packages") | 59 | "--non-installed", |
4638 | 59 | parser.add_argument("--non-installed", | 60 | action="store_true", |
4639 | 60 | action="store_true", | 61 | help="only get non-installed packages per check current running environments", |
4640 | 61 | help="only get non-installed packages per check current running environments") | 62 | ) |
4641 | 62 | parser.add_argument("--fail-unavailable", | 63 | parser.add_argument( |
4642 | 63 | action="store_true", | 64 | "--fail-unavailable", |
4643 | 64 | help="Return error when any package is unavailable.") | 65 | action="store_true", |
4644 | 65 | parser.add_argument("--exclude", | 66 | help="Return error when any package is unavailable.", |
4645 | 66 | metavar='pkg.list', | 67 | ) |
4646 | 67 | type=argparse.FileType('r', encoding='UTF-8'), | 68 | parser.add_argument( |
4647 | 68 | help="package names and versions to exclude.") | 69 | "--exclude", |
4648 | 69 | parser.add_argument('pkgs', | 70 | metavar="pkg.list", |
4649 | 70 | metavar='PKG_NAME', | 71 | type=argparse.FileType("r", encoding="UTF-8"), |
4650 | 71 | type=str, nargs='+', | 72 | help="package names and versions to exclude.", |
4651 | 72 | help='the names of Debian binary packages') | 73 | ) |
4652 | 74 | parser.add_argument( | ||
4653 | 75 | "pkgs", | ||
4654 | 76 | metavar="PKG_NAME", | ||
4655 | 77 | type=str, | ||
4656 | 78 | nargs="+", | ||
4657 | 79 | help="the names of Debian binary packages", | ||
4658 | 80 | ) | ||
4659 | 73 | 81 | ||
4660 | 74 | args = parser.parse_args() | 82 | args = parser.parse_args() |
4661 | 75 | 83 | ||
4677 | 76 | logging.addLevelName(logging.DEBUG, | 84 | logging.addLevelName( |
4678 | 77 | "\033[1;96m%s\033[1;0m" % | 85 | logging.DEBUG, "\033[1;96m%s\033[1;0m" % logging.getLevelName(logging.DEBUG) |
4679 | 78 | logging.getLevelName(logging.DEBUG)) | 86 | ) |
4680 | 79 | logging.addLevelName(logging.INFO, | 87 | logging.addLevelName( |
4681 | 80 | "\033[1;32m%s\033[1;0m" % | 88 | logging.INFO, "\033[1;32m%s\033[1;0m" % logging.getLevelName(logging.INFO) |
4682 | 81 | logging.getLevelName(logging.INFO)) | 89 | ) |
4683 | 82 | logging.addLevelName(logging.WARNING, | 90 | logging.addLevelName( |
4684 | 83 | "\033[1;33m%s\033[1;0m" % | 91 | logging.WARNING, "\033[1;33m%s\033[1;0m" % logging.getLevelName(logging.WARNING) |
4685 | 84 | logging.getLevelName(logging.WARNING)) | 92 | ) |
4686 | 85 | logging.addLevelName(logging.ERROR, | 93 | logging.addLevelName( |
4687 | 86 | "\033[1;31m%s\033[1;0m" % | 94 | logging.ERROR, "\033[1;31m%s\033[1;0m" % logging.getLevelName(logging.ERROR) |
4688 | 87 | logging.getLevelName(logging.ERROR)) | 95 | ) |
4689 | 88 | logging.addLevelName(logging.CRITICAL, | 96 | logging.addLevelName( |
4690 | 89 | "\033[1;41m%s\033[1;0m" % | 97 | logging.CRITICAL, "\033[1;41m%s\033[1;0m" % logging.getLevelName(logging.CRITICAL) |
4691 | 90 | logging.getLevelName(logging.CRITICAL)) | 98 | ) |
4692 | 91 | 99 | ||
4693 | 92 | if args.debug: | 100 | if args.debug: |
4697 | 93 | logging.basicConfig(format='<%(levelname)s> %(message)s', | 101 | logging.basicConfig( |
4698 | 94 | level=logging.DEBUG, | 102 | format="<%(levelname)s> %(message)s", |
4699 | 95 | handlers=[logging.StreamHandler(sys.stdout)]) | 103 | level=logging.DEBUG, |
4700 | 104 | handlers=[logging.StreamHandler(sys.stdout)], | ||
4701 | 105 | ) | ||
4702 | 96 | else: | 106 | else: |
4705 | 97 | logging.basicConfig(format='<%(levelname)s> %(message)s', | 107 | logging.basicConfig( |
4706 | 98 | handlers=[logging.StreamHandler(sys.stdout)]) | 108 | format="<%(levelname)s> %(message)s", |
4707 | 109 | handlers=[logging.StreamHandler(sys.stdout)], | ||
4708 | 110 | ) | ||
4709 | 99 | 111 | ||
4710 | 100 | 112 | ||
4711 | 101 | def _debug_pkg(pkg: str) -> None: | 113 | def _debug_pkg(pkg: str) -> None: |
4712 | @@ -106,12 +118,18 @@ def _debug_pkg(pkg: str) -> None: | |||
4713 | 106 | debug(dir(pkg)) | 118 | debug(dir(pkg)) |
4714 | 107 | 119 | ||
4715 | 108 | for attr in dir(pkg): | 120 | for attr in dir(pkg): |
4717 | 109 | if not attr.startswith('__'): | 121 | if not attr.startswith("__"): |
4718 | 110 | if not isinstance(pkg.__getattribute__(attr), types.BuiltinFunctionType): | 122 | if not isinstance(pkg.__getattribute__(attr), types.BuiltinFunctionType): |
4719 | 111 | debug(f"{attr}: {pkg.__getattribute__(attr)}") | 123 | debug(f"{attr}: {pkg.__getattribute__(attr)}") |
4720 | 112 | 124 | ||
4721 | 113 | 125 | ||
4723 | 114 | def get_depends(pkg_name: str, depends_list: list, recommends: bool, suggests: bool, non_installed: bool) -> bool: | 126 | def get_depends( |
4724 | 127 | pkg_name: str, | ||
4725 | 128 | depends_list: list, | ||
4726 | 129 | recommends: bool, | ||
4727 | 130 | suggests: bool, | ||
4728 | 131 | non_installed: bool, | ||
4729 | 132 | ) -> bool: | ||
4730 | 115 | """Recursively get all dependencies. | 133 | """Recursively get all dependencies. |
4731 | 116 | 134 | ||
4732 | 117 | Args: | 135 | Args: |
4733 | @@ -158,25 +176,34 @@ def get_depends(pkg_name: str, depends_list: list, recommends: bool, suggests: b | |||
4734 | 158 | if pkg_name in map(lambda x: x[0], depends_list): | 176 | if pkg_name in map(lambda x: x[0], depends_list): |
4735 | 159 | continue | 177 | continue |
4736 | 160 | 178 | ||
4738 | 161 | if any(pkg_name == name and version.ver_str == ver for name, ver in exclude_list): | 179 | if any( |
4739 | 180 | pkg_name == name and version.ver_str == ver for name, ver in exclude_list | ||
4740 | 181 | ): | ||
4741 | 162 | break | 182 | break |
4742 | 163 | 183 | ||
4743 | 164 | if non_installed and pkg.current_ver == version: | 184 | if non_installed and pkg.current_ver == version: |
4744 | 165 | break | 185 | break |
4745 | 166 | 186 | ||
4746 | 167 | for pfile in version.file_list: | 187 | for pfile in version.file_list: |
4749 | 168 | if pfile[0].filename != '/var/lib/dpkg/status' and record.lookup(pfile): | 188 | if pfile[0].filename != "/var/lib/dpkg/status" and record.lookup(pfile): |
4750 | 169 | url = urljoin("http://" + pfile[0].site, 'ubuntu/' + record.filename) | 189 | url = urljoin("http://" + pfile[0].site, "ubuntu/" + record.filename) |
4751 | 170 | break | 190 | break |
4752 | 171 | 191 | ||
4753 | 172 | debug(f"{pkg_name} {version.ver_str} {pkg.architecture} {url}") | 192 | debug(f"{pkg_name} {version.ver_str} {pkg.architecture} {url}") |
4755 | 173 | item = (pkg_name, version.ver_str, url, record.hashes.find("MD5Sum"), record.hashes.find("SHA1"), record.hashes.find("SHA256")) | 193 | item = ( |
4756 | 194 | pkg_name, | ||
4757 | 195 | version.ver_str, | ||
4758 | 196 | url, | ||
4759 | 197 | record.hashes.find("MD5Sum"), | ||
4760 | 198 | record.hashes.find("SHA1"), | ||
4761 | 199 | record.hashes.find("SHA256"), | ||
4762 | 200 | ) | ||
4763 | 174 | depends_list.append(item) | 201 | depends_list.append(item) |
4764 | 175 | 202 | ||
4767 | 176 | for target in ('PreDepends', 'Depends', 'Recommends', 'Suggests'): | 203 | for target in ("PreDepends", "Depends", "Recommends", "Suggests"): |
4768 | 177 | if target == 'Recommends' and not recommends: | 204 | if target == "Recommends" and not recommends: |
4769 | 178 | continue | 205 | continue |
4771 | 179 | if target == 'Suggests' and not suggests: | 206 | if target == "Suggests" and not suggests: |
4772 | 180 | continue | 207 | continue |
4773 | 181 | if target not in version.depends_list_str: | 208 | if target not in version.depends_list_str: |
4774 | 182 | continue | 209 | continue |
4775 | @@ -188,10 +215,16 @@ def get_depends(pkg_name: str, depends_list: list, recommends: bool, suggests: b | |||
4776 | 188 | if name in map(lambda x: x[0], depends_list): | 215 | if name in map(lambda x: x[0], depends_list): |
4777 | 189 | found = True | 216 | found = True |
4778 | 190 | break | 217 | break |
4780 | 191 | if arch == 'i386' and pkg.architecture == 'amd64': | 218 | if arch == "i386" and pkg.architecture == "amd64": |
4781 | 192 | name = name + ":i386" | 219 | name = name + ":i386" |
4782 | 193 | pkg = cache[name] | 220 | pkg = cache[name] |
4784 | 194 | found = get_depends(name, depends_list, recommends=recommends, suggests=suggests, non_installed=non_installed) | 221 | found = get_depends( |
4785 | 222 | name, | ||
4786 | 223 | depends_list, | ||
4787 | 224 | recommends=recommends, | ||
4788 | 225 | suggests=suggests, | ||
4789 | 226 | non_installed=non_installed, | ||
4790 | 227 | ) | ||
4791 | 195 | if found: | 228 | if found: |
4792 | 196 | break | 229 | break |
4793 | 197 | if not found and args.fail_unavailable: | 230 | if not found and args.fail_unavailable: |
4794 | @@ -205,11 +238,13 @@ if args.apt_dir: | |||
4795 | 205 | if args.debug: | 238 | if args.debug: |
4796 | 206 | old = apt_pkg.config.dump() | 239 | old = apt_pkg.config.dump() |
4797 | 207 | apt_pkg.config.set("Dir", args.apt_dir) | 240 | apt_pkg.config.set("Dir", args.apt_dir) |
4799 | 208 | apt_pkg.config.set("Dir::State::status", os.path.join(args.apt_dir, "var/lib/dpkg/status")) | 241 | apt_pkg.config.set( |
4800 | 242 | "Dir::State::status", os.path.join(args.apt_dir, "var/lib/dpkg/status") | ||
4801 | 243 | ) | ||
4802 | 209 | if args.debug: | 244 | if args.debug: |
4803 | 210 | new = apt_pkg.config.dump() | 245 | new = apt_pkg.config.dump() |
4804 | 211 | d = difflib.Differ() | 246 | d = difflib.Differ() |
4806 | 212 | diff = d.compare(old.split('\n'), new.split('\n')) | 247 | diff = d.compare(old.split("\n"), new.split("\n")) |
4807 | 213 | for line in diff: | 248 | for line in diff: |
4808 | 214 | debug(line.strip()) | 249 | debug(line.strip()) |
4809 | 215 | apt_pkg.init_system() | 250 | apt_pkg.init_system() |
4810 | @@ -225,12 +260,18 @@ exclude_list = [] | |||
4811 | 225 | 260 | ||
4812 | 226 | if args.exclude: | 261 | if args.exclude: |
4813 | 227 | for line in args.exclude.readlines(): | 262 | for line in args.exclude.readlines(): |
4815 | 228 | (name, ver) = line.strip().split(' ') | 263 | (name, ver) = line.strip().split(" ") |
4816 | 229 | if not any(name == _name and ver == _ver for _name, _ver in exclude_list): | 264 | if not any(name == _name and ver == _ver for _name, _ver in exclude_list): |
4817 | 230 | exclude_list.append((name, ver)) | 265 | exclude_list.append((name, ver)) |
4818 | 231 | 266 | ||
4819 | 232 | for pkg in args.pkgs: | 267 | for pkg in args.pkgs: |
4821 | 233 | get_depends(pkg, pkg_list, recommends=args.recommends, suggests=args.suggests, non_installed=args.non_installed) | 268 | get_depends( |
4822 | 269 | pkg, | ||
4823 | 270 | pkg_list, | ||
4824 | 271 | recommends=args.recommends, | ||
4825 | 272 | suggests=args.suggests, | ||
4826 | 273 | non_installed=args.non_installed, | ||
4827 | 274 | ) | ||
4828 | 234 | 275 | ||
4829 | 235 | for pkg, ver, url, md5, sha1, sha256 in sorted(pkg_list): | 276 | for pkg, ver, url, md5, sha1, sha256 in sorted(pkg_list): |
4830 | 236 | if args.long: | 277 | if args.long: |
4831 | diff --git a/pkg-oem-meta b/pkg-oem-meta | |||
4832 | index e5e3cf5..c924bfa 100755 | |||
4833 | --- a/pkg-oem-meta | |||
4834 | +++ b/pkg-oem-meta | |||
4835 | @@ -16,155 +16,164 @@ program_name = os.path.basename(sys.argv[0]) | |||
4836 | 16 | 16 | ||
4837 | 17 | setup_logging() | 17 | setup_logging() |
4838 | 18 | 18 | ||
4841 | 19 | if program_name == 'pkg-somerville-meta': | 19 | if program_name == "pkg-somerville-meta": |
4842 | 20 | codename = 'somerville' | 20 | codename = "somerville" |
4843 | 21 | Codename = codename.title() | 21 | Codename = codename.title() |
4845 | 22 | brand = 'dell' | 22 | brand = "dell" |
4846 | 23 | parser = argparse.ArgumentParser( | 23 | parser = argparse.ArgumentParser( |
4847 | 24 | formatter_class=argparse.RawTextHelpFormatter, | 24 | formatter_class=argparse.RawTextHelpFormatter, |
4848 | 25 | description=f"{Codename} platform meta package generator.", | 25 | description=f"{Codename} platform meta package generator.", |
4849 | 26 | epilog=f"Ex. {program_name} --public-bug 1868254 -s focal -k" | 26 | epilog=f"Ex. {program_name} --public-bug 1868254 -s focal -k" |
4853 | 27 | " oem -p three-eyed-raven 0962") | 27 | " oem -p three-eyed-raven 0962", |
4854 | 28 | elif program_name == 'pkg-stella-meta': | 28 | ) |
4855 | 29 | codename = 'stella' | 29 | elif program_name == "pkg-stella-meta": |
4856 | 30 | codename = "stella" | ||
4857 | 30 | Codename = codename.title() | 31 | Codename = codename.title() |
4859 | 31 | brand = 'hp' | 32 | brand = "hp" |
4860 | 32 | parser = argparse.ArgumentParser( | 33 | parser = argparse.ArgumentParser( |
4861 | 33 | formatter_class=argparse.RawTextHelpFormatter, | 34 | formatter_class=argparse.RawTextHelpFormatter, |
4862 | 34 | description=f"{Codename} platform meta package generator.", | 35 | description=f"{Codename} platform meta package generator.", |
4867 | 35 | epilog=f"Ex. {program_name} -s focal -k oem" | 36 | epilog=f"Ex. {program_name} -s focal -k oem" " -g cmit -p beedrill 8594", |
4868 | 36 | " -g cmit -p beedrill 8594") | 37 | ) |
4869 | 37 | elif program_name == 'pkg-sutton-meta': | 38 | elif program_name == "pkg-sutton-meta": |
4870 | 38 | codename = 'sutton' | 39 | codename = "sutton" |
4871 | 39 | Codename = codename.title() | 40 | Codename = codename.title() |
4873 | 40 | brand = 'lenovo' | 41 | brand = "lenovo" |
4874 | 41 | parser = argparse.ArgumentParser( | 42 | parser = argparse.ArgumentParser( |
4875 | 42 | formatter_class=argparse.RawTextHelpFormatter, | 43 | formatter_class=argparse.RawTextHelpFormatter, |
4876 | 43 | description=f"{Codename} platform meta package generator.", | 44 | description=f"{Codename} platform meta package generator.", |
4879 | 44 | epilog=f"Ex. {program_name} -s focal -k oem" | 45 | epilog=f"Ex. {program_name} -s focal -k oem" " -g bachman -p banaing S08", |
4880 | 45 | " -g bachman -p banaing S08") | 46 | ) |
4881 | 46 | else: | 47 | else: |
4883 | 47 | error('This program can not be executed.') | 48 | error("This program can not be executed.") |
4884 | 48 | exit(1) | 49 | exit(1) |
4885 | 49 | 50 | ||
4890 | 50 | parser.add_argument('-k', '--kernel', | 51 | parser.add_argument( |
4891 | 51 | choices=['linux-oem-20.04', 'linux-oem-20.04b', 'linux-oem-20.04c', 'linux-generic-hwe-20.04'], | 52 | "-k", |
4892 | 52 | default='linux-oem-20.04', | 53 | "--kernel", |
4893 | 53 | help="Specify the kernel meta. [linux-oem-20.04|linux-oem-20.04b|linux-oem-20.04c|linux-generic-hwe-20.04]") | 54 | choices=[ |
4894 | 55 | "linux-oem-20.04", | ||
4895 | 56 | "linux-oem-20.04b", | ||
4896 | 57 | "linux-oem-20.04c", | ||
4897 | 58 | "linux-generic-hwe-20.04", | ||
4898 | 59 | ], | ||
4899 | 60 | default="linux-oem-20.04", | ||
4900 | 61 | help="Specify the kernel meta. [linux-oem-20.04|linux-oem-20.04b|linux-oem-20.04c|linux-generic-hwe-20.04]", | ||
4901 | 62 | ) | ||
4902 | 54 | info = UbuntuDistroInfo() | 63 | info = UbuntuDistroInfo() |
4903 | 55 | try: | 64 | try: |
4904 | 56 | # for bionic | 65 | # for bionic |
4906 | 57 | series = lsb_release.get_lsb_information()['CODENAME'] | 66 | series = lsb_release.get_lsb_information()["CODENAME"] |
4907 | 58 | except AttributeError: | 67 | except AttributeError: |
4908 | 59 | # for focal | 68 | # for focal |
4936 | 60 | series = lsb_release.get_os_release()['CODENAME'] | 69 | series = lsb_release.get_os_release()["CODENAME"] |
4937 | 61 | 70 | ||
4938 | 62 | parser.add_argument('-s', '--series', choices=info.supported(), default=series, | 71 | parser.add_argument( |
4939 | 63 | help=f"Ubuntu series, such as {series} by default.") | 72 | "-s", |
4940 | 64 | parser.add_argument('--public-bug', | 73 | "--series", |
4941 | 65 | help="Launchpad public bug number.", | 74 | choices=info.supported(), |
4942 | 66 | type=int) | 75 | default=series, |
4943 | 67 | parser.add_argument('--private-bug', | 76 | help=f"Ubuntu series, such as {series} by default.", |
4944 | 68 | help="Launchpad private bug number.", | 77 | ) |
4945 | 69 | type=int) | 78 | parser.add_argument("--public-bug", help="Launchpad public bug number.", type=int) |
4946 | 70 | 79 | parser.add_argument("--private-bug", help="Launchpad private bug number.", type=int) | |
4947 | 71 | if codename == 'somerville': | 80 | |
4948 | 72 | parser.add_argument('bios_id', nargs='+', help="BIOS ID") | 81 | if codename == "somerville": |
4949 | 73 | parser.add_argument('-p', '--platform', help="platform tag", required=True) | 82 | parser.add_argument("bios_id", nargs="+", help="BIOS ID") |
4950 | 74 | elif codename == 'stella': | 83 | parser.add_argument("-p", "--platform", help="platform tag", required=True) |
4951 | 75 | parser.add_argument('-g', '--group', help="OEM-group", required=True) | 84 | elif codename == "stella": |
4952 | 76 | parser.add_argument('-p', '--platform', help="platform-codename", | 85 | parser.add_argument("-g", "--group", help="OEM-group", required=True) |
4953 | 77 | required=True) | 86 | parser.add_argument("-p", "--platform", help="platform-codename", required=True) |
4954 | 78 | parser.add_argument('sd_id', nargs='+', | 87 | parser.add_argument("sd_id", nargs="+", help="subsystem device ID, such as 0962") |
4955 | 79 | help="subsystem device ID, such as 0962") | 88 | elif codename == "sutton": |
4956 | 80 | elif codename == 'sutton': | 89 | parser.add_argument("-g", "--group", help="OEM-group", required=True) |
4957 | 81 | parser.add_argument('-g', '--group', help="OEM-group", required=True) | 90 | parser.add_argument("-p", "--platform", help="platform-codename", required=True) |
4958 | 82 | parser.add_argument('-p', '--platform', help="platform-codename", | 91 | parser.add_argument( |
4959 | 83 | required=True) | 92 | "bios_ver", |
4960 | 84 | parser.add_argument('bios_ver', nargs='+', | 93 | nargs="+", |
4961 | 85 | help="First three chars in bios version, " | 94 | help="First three chars in bios version, " + "such as S08 or bvnLENOVO:bvrS08", |
4962 | 86 | + "such as S08 or bvnLENOVO:bvrS08") | 95 | ) |
4963 | 87 | args = parser.parse_args() | 96 | args = parser.parse_args() |
4964 | 88 | 97 | ||
4965 | 89 | platform = args.platform.lower() | 98 | platform = args.platform.lower() |
4966 | 90 | Platform = platform.title() | 99 | Platform = platform.title() |
4967 | 91 | series = args.series | 100 | series = args.series |
4971 | 92 | versions = dict(zip(info.get_all(result='codename'), | 101 | versions = dict(zip(info.get_all(result="codename"), info.get_all(result="release"))) |
4972 | 93 | info.get_all(result='release'))) | 102 | version = versions[series].split(" ")[0] |
4970 | 94 | version = versions[series].split(' ')[0] | ||
4973 | 95 | 103 | ||
4974 | 96 | # Sanity check | 104 | # Sanity check |
4977 | 97 | if codename == 'somerville': | 105 | if codename == "somerville": |
4978 | 98 | group = '' | 106 | group = "" |
4979 | 99 | if args.platform.startswith("fossa-"): | 107 | if args.platform.startswith("fossa-"): |
4981 | 100 | error('Please remove fossa- prefix from the platform name.') | 108 | error("Please remove fossa- prefix from the platform name.") |
4982 | 101 | exit(1) | 109 | exit(1) |
4983 | 102 | for bios_id in args.bios_id: | 110 | for bios_id in args.bios_id: |
4986 | 103 | if not re.match('[0-9a-fA-F]{4}$', bios_id): | 111 | if not re.match("[0-9a-fA-F]{4}$", bios_id): |
4987 | 104 | error('Invalid BIOS ID: {%s}' % bios_id) | 112 | error("Invalid BIOS ID: {%s}" % bios_id) |
4988 | 105 | exit(1) | 113 | exit(1) |
4991 | 106 | meta = 'oem-' + codename + '-' + platform + '-meta' | 114 | meta = "oem-" + codename + "-" + platform + "-meta" |
4992 | 107 | elif codename == 'stella': | 115 | elif codename == "stella": |
4993 | 108 | group = args.group.lower() | 116 | group = args.group.lower() |
4994 | 109 | for sd_id in args.sd_id: | 117 | for sd_id in args.sd_id: |
4997 | 110 | if not re.match('[0-9a-fA-F]{4}$', sd_id): | 118 | if not re.match("[0-9a-fA-F]{4}$", sd_id): |
4998 | 111 | error('Invalid subsystem device ID: {%s}' % sd_id) | 119 | error("Invalid subsystem device ID: {%s}" % sd_id) |
4999 | 112 | exit(1) | 120 | exit(1) |
5000 | 113 | meta = 'oem-' + codename + '.' + group + '-' + platform + '-meta' |
LGTM.