Merge lp:~sil2100/ubuntu-archive-tools/migration-assistant-minor-clean into lp:ubuntu-archive-tools
- migration-assistant-minor-clean
- Merge into trunk
Proposed by
Łukasz Zemczak
Status: | Merged |
---|---|
Merged at revision: | 1163 |
Proposed branch: | lp:~sil2100/ubuntu-archive-tools/migration-assistant-minor-clean |
Merge into: | lp:ubuntu-archive-tools |
Diff against target: |
371 lines (+104/-100) 1 file modified
migration-assistant.py (+104/-100) |
To merge this branch: | bzr merge lp:~sil2100/ubuntu-archive-tools/migration-assistant-minor-clean |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Łukasz Zemczak | Approve | ||
Review via email: mp+341119@code.launchpad.net |
Commit message
Refactor the migration-assistant script slightly, with indent changes and removing some of the global variables that should have been local.
Description of the change
Refactor the migration-assistant script slightly, with indent changes and removing some of the global variables that should have been local.
To post a comment you must log in.
- 1172. By Łukasz Zemczak
-
Fix some brokenness caused by the cleanup.
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'migration-assistant.py' | |||
2 | --- migration-assistant.py 2018-03-08 10:28:01 +0000 | |||
3 | +++ migration-assistant.py 2018-03-08 11:12:37 +0000 | |||
4 | @@ -38,23 +38,13 @@ | |||
5 | 38 | import logging | 38 | import logging |
6 | 39 | 39 | ||
7 | 40 | from enum import Enum | 40 | from enum import Enum |
8 | 41 | |||
9 | 42 | from urllib.request import FancyURLopener | 41 | from urllib.request import FancyURLopener |
10 | 43 | |||
11 | 44 | from launchpadlib.launchpad import Launchpad | 42 | from launchpadlib.launchpad import Launchpad |
12 | 45 | 43 | ||
13 | 46 | DEBIAN_CURRENT_SERIES = 'sid' | 44 | DEBIAN_CURRENT_SERIES = 'sid' |
14 | 47 | ARCHIVE_PAGES = 'https://people.canonical.com/~ubuntu-archive/' | 45 | ARCHIVE_PAGES = 'https://people.canonical.com/~ubuntu-archive/' |
15 | 48 | MAX_CACHE_AGE = 14400 # excuses cache should not be older than 4 hours | 46 | MAX_CACHE_AGE = 14400 # excuses cache should not be older than 4 hours |
16 | 49 | 47 | ||
17 | 50 | lp_cachedir = os.path.expanduser(os.path.join("~", ".launchpadlib/cache")) | ||
18 | 51 | lp = Launchpad.login_anonymously( | ||
19 | 52 | 'what-next', 'production', lp_cachedir, version='devel') | ||
20 | 53 | |||
21 | 54 | ubuntu = lp.distributions["ubuntu"] | ||
22 | 55 | archive = ubuntu.main_archive | ||
23 | 56 | series = ubuntu.current_series | ||
24 | 57 | |||
25 | 58 | excuses = {} | 48 | excuses = {} |
26 | 59 | 49 | ||
27 | 60 | level = 0 | 50 | level = 0 |
28 | @@ -137,7 +127,7 @@ | |||
29 | 137 | return None | 127 | return None |
30 | 138 | 128 | ||
31 | 139 | 129 | ||
33 | 140 | def find_excuses(src, level, seen): | 130 | def find_excuses(lp, src, level, seen): |
34 | 141 | if src in seen: | 131 | if src in seen: |
35 | 142 | return | 132 | return |
36 | 143 | 133 | ||
37 | @@ -145,7 +135,7 @@ | |||
38 | 145 | item_name = item.get('item-name') | 135 | item_name = item.get('item-name') |
39 | 146 | 136 | ||
40 | 147 | if item_name == src: | 137 | if item_name == src: |
42 | 148 | process(item, level, seen) | 138 | process(lp, item, level, seen) |
43 | 149 | 139 | ||
44 | 150 | 140 | ||
45 | 151 | def get_pkg_archive_path(package): | 141 | def get_pkg_archive_path(package): |
46 | @@ -181,12 +171,12 @@ | |||
47 | 181 | return None | 171 | return None |
48 | 182 | 172 | ||
49 | 183 | 173 | ||
51 | 184 | def package_in_distro(package, distro='ubuntu', proposed=False): | 174 | def package_in_distro(package, distro='ubuntu', distroseries='bionic', |
52 | 175 | proposed=False): | ||
53 | 185 | assistant = StatusAdapter(assistant_logger, {'depth': 0}) | 176 | assistant = StatusAdapter(assistant_logger, {'depth': 0}) |
54 | 186 | 177 | ||
55 | 187 | # TODO: This operation is pretty costly, do caching? | 178 | # TODO: This operation is pretty costly, do caching? |
56 | 188 | 179 | ||
57 | 189 | distroseries = series.name | ||
58 | 190 | if distro == 'debian': | 180 | if distro == 'debian': |
59 | 191 | distroseries = DEBIAN_CURRENT_SERIES | 181 | distroseries = DEBIAN_CURRENT_SERIES |
60 | 192 | if proposed: | 182 | if proposed: |
61 | @@ -204,7 +194,7 @@ | |||
62 | 204 | if " {} ".format(package) not in line: | 194 | if " {} ".format(package) not in line: |
63 | 205 | continue | 195 | continue |
64 | 206 | package_line = line.split(' | ') | 196 | package_line = line.split(' | ') |
66 | 207 | 197 | ||
67 | 208 | series_component = package_line[2].split('/') | 198 | series_component = package_line[2].split('/') |
68 | 209 | component = 'main' | 199 | component = 'main' |
69 | 210 | if len(series_component) > 1: | 200 | if len(series_component) > 1: |
70 | @@ -226,7 +216,7 @@ | |||
71 | 226 | return {} | 216 | return {} |
72 | 227 | 217 | ||
73 | 228 | 218 | ||
75 | 229 | def process_lp_build_results(source, level, uploads, failed): | 219 | def process_lp_build_results(source, archive, series, level, uploads, failed): |
76 | 230 | assistant = StatusAdapter(assistant_logger, {'depth': level + 1}) | 220 | assistant = StatusAdapter(assistant_logger, {'depth': level + 1}) |
77 | 231 | 221 | ||
78 | 232 | source_name = source.get('source') | 222 | source_name = source.get('source') |
79 | @@ -270,11 +260,12 @@ | |||
80 | 270 | } | 260 | } |
81 | 271 | 261 | ||
82 | 272 | 262 | ||
84 | 273 | def process_unsatisfiable_depends(source, level, seen): | 263 | def process_unsatisfiable_depends(source, series, level, seen): |
85 | 274 | assistant = StatusAdapter(assistant_logger, {'depth': level + 1}) | 264 | assistant = StatusAdapter(assistant_logger, {'depth': level + 1}) |
86 | 275 | 265 | ||
87 | 276 | affected_sources = {} | 266 | affected_sources = {} |
88 | 277 | unsatisfiable = {} | 267 | unsatisfiable = {} |
89 | 268 | distroseries = series.name | ||
90 | 278 | 269 | ||
91 | 279 | depends = source.get('dependencies').get('unsatisfiable-dependencies') | 270 | depends = source.get('dependencies').get('unsatisfiable-dependencies') |
92 | 280 | for arch, signatures in depends.items(): | 271 | for arch, signatures in depends.items(): |
93 | @@ -287,12 +278,15 @@ | |||
94 | 287 | pkg = get_source_package(binary_name) | 278 | pkg = get_source_package(binary_name) |
95 | 288 | affected_sources[arch].add(pkg) | 279 | affected_sources[arch].add(pkg) |
96 | 289 | except Exception: | 280 | except Exception: |
100 | 290 | # FIXME: we might be dealing with a new package in proposed here, | 281 | # FIXME: we might be dealing with a new package in proposed |
101 | 291 | # but using the binary name instead of the source name. | 282 | # here, but using the binary name instead of the source |
102 | 292 | if any(package_in_distro(binary_name, distro='ubuntu')): | 283 | # name. |
103 | 284 | if any(package_in_distro(binary_name, distro='ubuntu', | ||
104 | 285 | distroseries=distroseries)): | ||
105 | 293 | affected_sources[arch].add(binary_name) | 286 | affected_sources[arch].add(binary_name) |
106 | 294 | elif any(package_in_distro(binary_name, | 287 | elif any(package_in_distro(binary_name, |
107 | 295 | distro='ubuntu', | 288 | distro='ubuntu', |
108 | 289 | distroseries=distroseries, | ||
109 | 296 | proposed=True)): | 290 | proposed=True)): |
110 | 297 | affected_sources[arch].append(binary_name) | 291 | affected_sources[arch].append(binary_name) |
111 | 298 | 292 | ||
112 | @@ -328,8 +322,10 @@ | |||
113 | 328 | depends = signature.split(' ')[0] | 322 | depends = signature.split(' ')[0] |
114 | 329 | assistant.error("{} can not be satisfied".format(signature), | 323 | assistant.error("{} can not be satisfied".format(signature), |
115 | 330 | status=ExcuseValue.FAIL) | 324 | status=ExcuseValue.FAIL) |
117 | 331 | in_archive = package_in_distro(depends, distro='ubuntu') | 325 | in_archive = package_in_distro(depends, distro='ubuntu', |
118 | 326 | distroseries=distroseries) | ||
119 | 332 | in_proposed = package_in_distro(depends, distro='ubuntu', | 327 | in_proposed = package_in_distro(depends, distro='ubuntu', |
120 | 328 | distroseries=distroseries, | ||
121 | 333 | proposed=True) | 329 | proposed=True) |
122 | 334 | 330 | ||
123 | 335 | if any(in_archive) and not any(in_proposed): | 331 | if any(in_archive) and not any(in_proposed): |
124 | @@ -350,7 +346,8 @@ | |||
125 | 350 | "but not in Debian?", | 346 | "but not in Debian?", |
126 | 351 | status=ExcuseValue.INFO) | 347 | status=ExcuseValue.INFO) |
127 | 352 | elif not any(in_archive) and not any(in_proposed): | 348 | elif not any(in_archive) and not any(in_proposed): |
129 | 353 | in_debian = package_in_distro(depends, distro='debian') | 349 | in_debian = package_in_distro(depends, distro='debian', |
130 | 350 | distroseries=distroseries) | ||
131 | 354 | if any(in_debian): | 351 | if any(in_debian): |
132 | 355 | assistant.warning("{} only exists in Debian".format(depends), | 352 | assistant.warning("{} only exists in Debian".format(depends), |
133 | 356 | status=ExcuseValue.FAIL) | 353 | status=ExcuseValue.FAIL) |
134 | @@ -426,7 +423,7 @@ | |||
135 | 426 | status=ExcuseValue.INFO) | 423 | status=ExcuseValue.INFO) |
136 | 427 | 424 | ||
137 | 428 | 425 | ||
139 | 429 | def process_blocking(source, level): | 426 | def process_blocking(source, lp, level): |
140 | 430 | assistant = StatusAdapter(assistant_logger, {'depth': level + 1}) | 427 | assistant = StatusAdapter(assistant_logger, {'depth': level + 1}) |
141 | 431 | 428 | ||
142 | 432 | bugs = source.get('policy_info').get('block-bugs') | 429 | bugs = source.get('policy_info').get('block-bugs') |
143 | @@ -506,7 +503,7 @@ | |||
144 | 506 | find_excuses(blocker, level+2, seen) | 503 | find_excuses(blocker, level+2, seen) |
145 | 507 | 504 | ||
146 | 508 | 505 | ||
148 | 509 | def process_missing_builds(source, level): | 506 | def process_missing_builds(source, archive, series, level): |
149 | 510 | assistant = StatusAdapter(assistant_logger, {'depth': level + 1}) | 507 | assistant = StatusAdapter(assistant_logger, {'depth': level + 1}) |
150 | 511 | 508 | ||
151 | 512 | source_name = source.get('source') | 509 | source_name = source.get('source') |
152 | @@ -569,7 +566,7 @@ | |||
153 | 569 | anais = [] | 566 | anais = [] |
154 | 570 | new_binaries = set() | 567 | new_binaries = set() |
155 | 571 | 568 | ||
157 | 572 | process_lp_build_results(source, level, uploads, failed) | 569 | process_lp_build_results(source, archive, series, level, uploads, failed) |
158 | 573 | 570 | ||
159 | 574 | if new_version in uploads: | 571 | if new_version in uploads: |
160 | 575 | for arch, item in uploads[new_version].items(): | 572 | for arch, item in uploads[new_version].items(): |
161 | @@ -578,7 +575,7 @@ | |||
162 | 578 | new_binaries.add(binary_name) | 575 | new_binaries.add(binary_name) |
163 | 579 | if binary.get('is_new'): | 576 | if binary.get('is_new'): |
164 | 580 | new.append(binary) | 577 | new.append(binary) |
166 | 581 | 578 | ||
167 | 582 | if not any(failed): | 579 | if not any(failed): |
168 | 583 | assistant.error("No failed builds found", status=ExcuseValue.PASS) | 580 | assistant.error("No failed builds found", status=ExcuseValue.PASS) |
169 | 584 | 581 | ||
170 | @@ -598,9 +595,9 @@ | |||
171 | 598 | "Admin to run:", | 595 | "Admin to run:", |
172 | 599 | status=ExcuseValue.INFO) | 596 | status=ExcuseValue.INFO) |
173 | 600 | assistant.info("remove-package %(arches)s -b %(bins)s" | 597 | assistant.info("remove-package %(arches)s -b %(bins)s" |
177 | 601 | % ({ 'arches': " ".join(arch_o), | 598 | % ({'arches': " ".join(arch_o), |
178 | 602 | 'bins': " ".join(old_binaries), | 599 | 'bins': " ".join(old_binaries), |
179 | 603 | }), status=ExcuseValue.NONE) | 600 | }), status=ExcuseValue.NONE) |
180 | 604 | except AttributeError: | 601 | except AttributeError: |
181 | 605 | # Ignore a failure here, it just means we don't have | 602 | # Ignore a failure here, it just means we don't have |
182 | 606 | # missing-builds to process after all. | 603 | # missing-builds to process after all. |
183 | @@ -608,7 +605,7 @@ | |||
184 | 608 | 605 | ||
185 | 609 | if any(new): | 606 | if any(new): |
186 | 610 | assistant.error("This package has NEW binaries to process:", | 607 | assistant.error("This package has NEW binaries to process:", |
188 | 611 | status=ExcuseValue.INFO) | 608 | status=ExcuseValue.INFO) |
189 | 612 | for binary in new: | 609 | for binary in new: |
190 | 613 | assistant.warning("[{}] {}/{}".format( | 610 | assistant.warning("[{}] {}/{}".format( |
191 | 614 | binary.get('architecture'), | 611 | binary.get('architecture'), |
192 | @@ -617,11 +614,13 @@ | |||
193 | 617 | status=ExcuseValue.FAIL) | 614 | status=ExcuseValue.FAIL) |
194 | 618 | 615 | ||
195 | 619 | 616 | ||
199 | 620 | 617 | def process(lp, source, level, seen): | |
197 | 621 | |||
198 | 622 | def process(source, level, seen): | ||
200 | 623 | assistant = StatusAdapter(assistant_logger, {'depth': level}) | 618 | assistant = StatusAdapter(assistant_logger, {'depth': level}) |
201 | 624 | 619 | ||
202 | 620 | ubuntu = lp.distributions["ubuntu"] | ||
203 | 621 | archive = ubuntu.main_archive | ||
204 | 622 | series = ubuntu.current_series | ||
205 | 623 | |||
206 | 625 | source_name = source.get('source') | 624 | source_name = source.get('source') |
207 | 626 | reasons = source.get('reason') | 625 | reasons = source.get('reason') |
208 | 627 | 626 | ||
209 | @@ -644,15 +643,15 @@ | |||
210 | 644 | missing_builds = source.get('missing-builds') | 643 | missing_builds = source.get('missing-builds') |
211 | 645 | if missing_builds is not None or 'no-binaries' in reasons: | 644 | if missing_builds is not None or 'no-binaries' in reasons: |
212 | 646 | work_needed = True | 645 | work_needed = True |
214 | 647 | process_missing_builds(source, level) | 646 | process_missing_builds(source, archive, series, level) |
215 | 648 | 647 | ||
216 | 649 | if 'depends' in reasons: | 648 | if 'depends' in reasons: |
217 | 650 | work_needed = True | 649 | work_needed = True |
219 | 651 | process_unsatisfiable_depends(source, level, seen) | 650 | process_unsatisfiable_depends(source, series, level, seen) |
220 | 652 | 651 | ||
221 | 653 | if 'block' in reasons: | 652 | if 'block' in reasons: |
222 | 654 | work_needed = True | 653 | work_needed = True |
224 | 655 | process_blocking(source, level) | 654 | process_blocking(source, lp, level) |
225 | 656 | 655 | ||
226 | 657 | if 'autopkgtest' in reasons: | 656 | if 'autopkgtest' in reasons: |
227 | 658 | work_needed = True | 657 | work_needed = True |
228 | @@ -691,7 +690,7 @@ | |||
229 | 691 | src_num += 1 | 690 | src_num += 1 |
230 | 692 | 691 | ||
231 | 693 | while True: | 692 | while True: |
233 | 694 | print (options) | 693 | print(options) |
234 | 695 | print("\n".join(wrapper.wrap(msg))) | 694 | print("\n".join(wrapper.wrap(msg))) |
235 | 696 | num = input("\nWhich package do you want to look at?") | 695 | num = input("\nWhich package do you want to look at?") |
236 | 697 | 696 | ||
237 | @@ -705,66 +704,71 @@ | |||
238 | 705 | return num | 704 | return num |
239 | 706 | 705 | ||
240 | 707 | return options[choice] | 706 | return options[choice] |
304 | 708 | 707 | ||
305 | 709 | 708 | ||
306 | 710 | 709 | if __name__ == '__main__': | |
307 | 711 | parser = argparse.ArgumentParser( | 710 | |
308 | 712 | description='Evaluate next steps for proposed migration') | 711 | parser = argparse.ArgumentParser( |
309 | 713 | parser.add_argument('-s', '--source', dest='source', | 712 | description='Evaluate next steps for proposed migration') |
310 | 714 | help='the package to evaluate') | 713 | parser.add_argument('-s', '--source', dest='source', |
311 | 715 | parser.add_argument('--no-cache', dest='do_not_cache', action='store_const', | 714 | help='the package to evaluate') |
312 | 716 | const=True, default=False, | 715 | parser.add_argument('--no-cache', dest='do_not_cache', action='store_const', |
313 | 717 | help='Do not cache excuses') | 716 | const=True, default=False, |
314 | 718 | parser.add_argument('--refresh', action='store_const', | 717 | help='Do not cache excuses') |
315 | 719 | const=True, default=False, | 718 | parser.add_argument('--refresh', action='store_const', |
316 | 720 | help='Force refresh of cached excuses') | 719 | const=True, default=False, |
317 | 721 | parser.add_argument('--debug', action='store_const', | 720 | help='Force refresh of cached excuses') |
318 | 722 | const=True, default=False, | 721 | parser.add_argument('--debug', action='store_const', |
319 | 723 | help='Show debugging information for this tool.') | 722 | const=True, default=False, |
320 | 724 | 723 | help='Show debugging information for this tool.') | |
321 | 725 | args = parser.parse_args() | 724 | |
322 | 726 | 725 | args = parser.parse_args() | |
323 | 727 | if args.debug: | 726 | |
324 | 728 | logging.basicConfig(level=logging.DEBUG, format="%(message)s") | 727 | if args.debug: |
325 | 729 | else: | 728 | logging.basicConfig(level=logging.DEBUG, format="%(message)s") |
326 | 730 | logging.basicConfig(level=logging.INFO, format="%(message)s") | 729 | else: |
327 | 731 | 730 | logging.basicConfig(level=logging.INFO, format="%(message)s") | |
328 | 732 | refresh_due = False | 731 | |
329 | 733 | xdg_cache = os.getenv('XDG_CACHE_HOME', '~/.cache') | 732 | lp_cachedir = os.path.expanduser(os.path.join("~", ".launchpadlib/cache")) |
330 | 734 | excuses_path = os.path.expanduser(os.path.join(xdg_cache, 'excuses.yaml')) | 733 | lp = Launchpad.login_anonymously( |
331 | 735 | if args.do_not_cache: | 734 | 'what-next', 'production', lp_cachedir, version='devel') |
332 | 736 | fp = tempfile.NamedTemporaryFile() | 735 | |
333 | 737 | else: | 736 | refresh_due = False |
334 | 738 | try: | 737 | xdg_cache = os.getenv('XDG_CACHE_HOME', '~/.cache') |
335 | 739 | fp = open(excuses_path, 'r') | 738 | excuses_path = os.path.expanduser(os.path.join(xdg_cache, 'excuses.yaml')) |
336 | 740 | except FileNotFoundError: | 739 | if args.do_not_cache: |
337 | 741 | refresh_due = True | 740 | fp = tempfile.NamedTemporaryFile() |
338 | 742 | pass | 741 | else: |
339 | 743 | finally: | 742 | try: |
340 | 744 | fp = open(excuses_path, 'a+') | 743 | fp = open(excuses_path, 'r') |
341 | 745 | 744 | except FileNotFoundError: | |
342 | 746 | file_state = os.stat(excuses_path) | 745 | refresh_due = True |
343 | 747 | mtime = file_state.st_mtime | 746 | pass |
344 | 748 | now = time.time() | 747 | finally: |
345 | 749 | if (now - mtime) > MAX_CACHE_AGE: | 748 | fp = open(excuses_path, 'a+') |
346 | 750 | refresh_due = True | 749 | |
347 | 751 | 750 | file_state = os.stat(excuses_path) | |
348 | 752 | with fp: | 751 | mtime = file_state.st_mtime |
349 | 753 | if args.refresh or refresh_due: | 752 | now = time.time() |
350 | 754 | url_opener = FancyURLopener() | 753 | if (now - mtime) > MAX_CACHE_AGE: |
351 | 755 | excuses_url = ARCHIVE_PAGES + 'proposed-migration/update_excuses.yaml' | 754 | refresh_due = True |
352 | 756 | excuses_data = url_opener.retrieve(excuses_url, | 755 | |
353 | 757 | fp.name, | 756 | with fp: |
354 | 758 | report_download) | 757 | if args.refresh or refresh_due: |
355 | 759 | fp.seek(0) | 758 | url_opener = FancyURLopener() |
356 | 760 | 759 | excuses_url = ARCHIVE_PAGES + 'proposed-migration/update_excuses.yaml' | |
357 | 761 | # Use the C implementation of the SafeLoader, it's noticeably faster, and | 760 | excuses_data = url_opener.retrieve(excuses_url, |
358 | 762 | # here we're dealing with large input files. | 761 | fp.name, |
359 | 763 | excuses = yaml.load(fp, Loader=yaml.CSafeLoader) | 762 | report_download) |
360 | 764 | 763 | fp.seek(0) | |
361 | 765 | if args.source is None: | 764 | |
362 | 766 | print("No source package name was provided. The following packages are " | 765 | # Use the C implementation of the SafeLoader, it's noticeably faster, and |
363 | 767 | "blocked in proposed:\n") | 766 | # here we're dealing with large input files. |
364 | 768 | args.source = choose_blocked_source(excuses) | 767 | excuses = yaml.load(fp, Loader=yaml.CSafeLoader) |
365 | 769 | 768 | ||
366 | 770 | find_excuses(args.source, 0, seen) | 769 | if args.source is None: |
367 | 770 | print("No source package name was provided. The following packages are " | ||
368 | 771 | "blocked in proposed:\n") | ||
369 | 772 | args.source = choose_blocked_source(excuses) | ||
370 | 773 | |||
371 | 774 | find_excuses(lp, args.source, 0, seen) |
+1 by Matt.