Merge lp:~sil2100/ubuntu-archive-tools/migration-assistant-minor-clean into lp:ubuntu-archive-tools

Proposed by Łukasz Zemczak
Status: Merged
Merged at revision: 1163
Proposed branch: lp:~sil2100/ubuntu-archive-tools/migration-assistant-minor-clean
Merge into: lp:ubuntu-archive-tools
Diff against target: 371 lines (+104/-100)
1 file modified
migration-assistant.py (+104/-100)
To merge this branch: bzr merge lp:~sil2100/ubuntu-archive-tools/migration-assistant-minor-clean
Reviewer Review Type Date Requested Status
Łukasz Zemczak Approve
Review via email: mp+341119@code.launchpad.net

Commit message

Refactor the migration-assistant script slightly, with indent changes and removing some of the global variables that should have been local.

Description of the change

Refactor the migration-assistant script slightly, with indent changes and removing some of the global variables that should have been local.

To post a comment you must log in.
1172. By Łukasz Zemczak

Fix some brokenness caused by the cleanup.

Revision history for this message
Łukasz Zemczak (sil2100) wrote :

+1 by Matt.

review: Approve

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'migration-assistant.py'
2--- migration-assistant.py 2018-03-08 10:28:01 +0000
3+++ migration-assistant.py 2018-03-08 11:12:37 +0000
4@@ -38,23 +38,13 @@
5 import logging
6
7 from enum import Enum
8-
9 from urllib.request import FancyURLopener
10-
11 from launchpadlib.launchpad import Launchpad
12
13 DEBIAN_CURRENT_SERIES = 'sid'
14 ARCHIVE_PAGES = 'https://people.canonical.com/~ubuntu-archive/'
15 MAX_CACHE_AGE = 14400 # excuses cache should not be older than 4 hours
16
17-lp_cachedir = os.path.expanduser(os.path.join("~", ".launchpadlib/cache"))
18-lp = Launchpad.login_anonymously(
19- 'what-next', 'production', lp_cachedir, version='devel')
20-
21-ubuntu = lp.distributions["ubuntu"]
22-archive = ubuntu.main_archive
23-series = ubuntu.current_series
24-
25 excuses = {}
26
27 level = 0
28@@ -137,7 +127,7 @@
29 return None
30
31
32-def find_excuses(src, level, seen):
33+def find_excuses(lp, src, level, seen):
34 if src in seen:
35 return
36
37@@ -145,7 +135,7 @@
38 item_name = item.get('item-name')
39
40 if item_name == src:
41- process(item, level, seen)
42+ process(lp, item, level, seen)
43
44
45 def get_pkg_archive_path(package):
46@@ -181,12 +171,12 @@
47 return None
48
49
50-def package_in_distro(package, distro='ubuntu', proposed=False):
51+def package_in_distro(package, distro='ubuntu', distroseries='bionic',
52+ proposed=False):
53 assistant = StatusAdapter(assistant_logger, {'depth': 0})
54
55 # TODO: This operation is pretty costly, do caching?
56
57- distroseries = series.name
58 if distro == 'debian':
59 distroseries = DEBIAN_CURRENT_SERIES
60 if proposed:
61@@ -204,7 +194,7 @@
62 if " {} ".format(package) not in line:
63 continue
64 package_line = line.split(' | ')
65-
66+
67 series_component = package_line[2].split('/')
68 component = 'main'
69 if len(series_component) > 1:
70@@ -226,7 +216,7 @@
71 return {}
72
73
74-def process_lp_build_results(source, level, uploads, failed):
75+def process_lp_build_results(source, archive, series, level, uploads, failed):
76 assistant = StatusAdapter(assistant_logger, {'depth': level + 1})
77
78 source_name = source.get('source')
79@@ -270,11 +260,12 @@
80 }
81
82
83-def process_unsatisfiable_depends(source, level, seen):
84+def process_unsatisfiable_depends(source, series, level, seen):
85 assistant = StatusAdapter(assistant_logger, {'depth': level + 1})
86
87 affected_sources = {}
88 unsatisfiable = {}
89+ distroseries = series.name
90
91 depends = source.get('dependencies').get('unsatisfiable-dependencies')
92 for arch, signatures in depends.items():
93@@ -287,12 +278,15 @@
94 pkg = get_source_package(binary_name)
95 affected_sources[arch].add(pkg)
96 except Exception:
97- # FIXME: we might be dealing with a new package in proposed here,
98- # but using the binary name instead of the source name.
99- if any(package_in_distro(binary_name, distro='ubuntu')):
100+ # FIXME: we might be dealing with a new package in proposed
101+ # here, but using the binary name instead of the source
102+ # name.
103+ if any(package_in_distro(binary_name, distro='ubuntu',
104+ distroseries=distroseries)):
105 affected_sources[arch].add(binary_name)
106 elif any(package_in_distro(binary_name,
107 distro='ubuntu',
108+ distroseries=distroseries,
109 proposed=True)):
110 affected_sources[arch].append(binary_name)
111
112@@ -328,8 +322,10 @@
113 depends = signature.split(' ')[0]
114 assistant.error("{} can not be satisfied".format(signature),
115 status=ExcuseValue.FAIL)
116- in_archive = package_in_distro(depends, distro='ubuntu')
117+ in_archive = package_in_distro(depends, distro='ubuntu',
118+ distroseries=distroseries)
119 in_proposed = package_in_distro(depends, distro='ubuntu',
120+ distroseries=distroseries,
121 proposed=True)
122
123 if any(in_archive) and not any(in_proposed):
124@@ -350,7 +346,8 @@
125 "but not in Debian?",
126 status=ExcuseValue.INFO)
127 elif not any(in_archive) and not any(in_proposed):
128- in_debian = package_in_distro(depends, distro='debian')
129+ in_debian = package_in_distro(depends, distro='debian',
130+ distroseries=distroseries)
131 if any(in_debian):
132 assistant.warning("{} only exists in Debian".format(depends),
133 status=ExcuseValue.FAIL)
134@@ -426,7 +423,7 @@
135 status=ExcuseValue.INFO)
136
137
138-def process_blocking(source, level):
139+def process_blocking(source, lp, level):
140 assistant = StatusAdapter(assistant_logger, {'depth': level + 1})
141
142 bugs = source.get('policy_info').get('block-bugs')
143@@ -506,7 +503,7 @@
144 find_excuses(blocker, level+2, seen)
145
146
147-def process_missing_builds(source, level):
148+def process_missing_builds(source, archive, series, level):
149 assistant = StatusAdapter(assistant_logger, {'depth': level + 1})
150
151 source_name = source.get('source')
152@@ -569,7 +566,7 @@
153 anais = []
154 new_binaries = set()
155
156- process_lp_build_results(source, level, uploads, failed)
157+ process_lp_build_results(source, archive, series, level, uploads, failed)
158
159 if new_version in uploads:
160 for arch, item in uploads[new_version].items():
161@@ -578,7 +575,7 @@
162 new_binaries.add(binary_name)
163 if binary.get('is_new'):
164 new.append(binary)
165-
166+
167 if not any(failed):
168 assistant.error("No failed builds found", status=ExcuseValue.PASS)
169
170@@ -598,9 +595,9 @@
171 "Admin to run:",
172 status=ExcuseValue.INFO)
173 assistant.info("remove-package %(arches)s -b %(bins)s"
174- % ({ 'arches': " ".join(arch_o),
175- 'bins': " ".join(old_binaries),
176- }), status=ExcuseValue.NONE)
177+ % ({'arches': " ".join(arch_o),
178+ 'bins': " ".join(old_binaries),
179+ }), status=ExcuseValue.NONE)
180 except AttributeError:
181 # Ignore a failure here, it just means we don't have
182 # missing-builds to process after all.
183@@ -608,7 +605,7 @@
184
185 if any(new):
186 assistant.error("This package has NEW binaries to process:",
187- status=ExcuseValue.INFO)
188+ status=ExcuseValue.INFO)
189 for binary in new:
190 assistant.warning("[{}] {}/{}".format(
191 binary.get('architecture'),
192@@ -617,11 +614,13 @@
193 status=ExcuseValue.FAIL)
194
195
196-
197-
198-def process(source, level, seen):
199+def process(lp, source, level, seen):
200 assistant = StatusAdapter(assistant_logger, {'depth': level})
201
202+ ubuntu = lp.distributions["ubuntu"]
203+ archive = ubuntu.main_archive
204+ series = ubuntu.current_series
205+
206 source_name = source.get('source')
207 reasons = source.get('reason')
208
209@@ -644,15 +643,15 @@
210 missing_builds = source.get('missing-builds')
211 if missing_builds is not None or 'no-binaries' in reasons:
212 work_needed = True
213- process_missing_builds(source, level)
214+ process_missing_builds(source, archive, series, level)
215
216 if 'depends' in reasons:
217 work_needed = True
218- process_unsatisfiable_depends(source, level, seen)
219+ process_unsatisfiable_depends(source, series, level, seen)
220
221 if 'block' in reasons:
222 work_needed = True
223- process_blocking(source, level)
224+ process_blocking(source, lp, level)
225
226 if 'autopkgtest' in reasons:
227 work_needed = True
228@@ -691,7 +690,7 @@
229 src_num += 1
230
231 while True:
232- print (options)
233+ print(options)
234 print("\n".join(wrapper.wrap(msg)))
235 num = input("\nWhich package do you want to look at?")
236
237@@ -705,66 +704,71 @@
238 return num
239
240 return options[choice]
241-
242-
243-
244-parser = argparse.ArgumentParser(
245- description='Evaluate next steps for proposed migration')
246-parser.add_argument('-s', '--source', dest='source',
247- help='the package to evaluate')
248-parser.add_argument('--no-cache', dest='do_not_cache', action='store_const',
249- const=True, default=False,
250- help='Do not cache excuses')
251-parser.add_argument('--refresh', action='store_const',
252- const=True, default=False,
253- help='Force refresh of cached excuses')
254-parser.add_argument('--debug', action='store_const',
255- const=True, default=False,
256- help='Show debugging information for this tool.')
257-
258-args = parser.parse_args()
259-
260-if args.debug:
261- logging.basicConfig(level=logging.DEBUG, format="%(message)s")
262-else:
263- logging.basicConfig(level=logging.INFO, format="%(message)s")
264-
265-refresh_due = False
266-xdg_cache = os.getenv('XDG_CACHE_HOME', '~/.cache')
267-excuses_path = os.path.expanduser(os.path.join(xdg_cache, 'excuses.yaml'))
268-if args.do_not_cache:
269- fp = tempfile.NamedTemporaryFile()
270-else:
271- try:
272- fp = open(excuses_path, 'r')
273- except FileNotFoundError:
274- refresh_due = True
275- pass
276- finally:
277- fp = open(excuses_path, 'a+')
278-
279- file_state = os.stat(excuses_path)
280- mtime = file_state.st_mtime
281- now = time.time()
282- if (now - mtime) > MAX_CACHE_AGE:
283- refresh_due = True
284-
285-with fp:
286- if args.refresh or refresh_due:
287- url_opener = FancyURLopener()
288- excuses_url = ARCHIVE_PAGES + 'proposed-migration/update_excuses.yaml'
289- excuses_data = url_opener.retrieve(excuses_url,
290- fp.name,
291- report_download)
292- fp.seek(0)
293-
294- # Use the C implementation of the SafeLoader, it's noticeably faster, and
295- # here we're dealing with large input files.
296- excuses = yaml.load(fp, Loader=yaml.CSafeLoader)
297-
298- if args.source is None:
299- print("No source package name was provided. The following packages are "
300- "blocked in proposed:\n")
301- args.source = choose_blocked_source(excuses)
302-
303- find_excuses(args.source, 0, seen)
304+
305+
306+if __name__ == '__main__':
307+
308+ parser = argparse.ArgumentParser(
309+ description='Evaluate next steps for proposed migration')
310+ parser.add_argument('-s', '--source', dest='source',
311+ help='the package to evaluate')
312+ parser.add_argument('--no-cache', dest='do_not_cache', action='store_const',
313+ const=True, default=False,
314+ help='Do not cache excuses')
315+ parser.add_argument('--refresh', action='store_const',
316+ const=True, default=False,
317+ help='Force refresh of cached excuses')
318+ parser.add_argument('--debug', action='store_const',
319+ const=True, default=False,
320+ help='Show debugging information for this tool.')
321+
322+ args = parser.parse_args()
323+
324+ if args.debug:
325+ logging.basicConfig(level=logging.DEBUG, format="%(message)s")
326+ else:
327+ logging.basicConfig(level=logging.INFO, format="%(message)s")
328+
329+ lp_cachedir = os.path.expanduser(os.path.join("~", ".launchpadlib/cache"))
330+ lp = Launchpad.login_anonymously(
331+ 'what-next', 'production', lp_cachedir, version='devel')
332+
333+ refresh_due = False
334+ xdg_cache = os.getenv('XDG_CACHE_HOME', '~/.cache')
335+ excuses_path = os.path.expanduser(os.path.join(xdg_cache, 'excuses.yaml'))
336+ if args.do_not_cache:
337+ fp = tempfile.NamedTemporaryFile()
338+ else:
339+ try:
340+ fp = open(excuses_path, 'r')
341+ except FileNotFoundError:
342+ refresh_due = True
343+ pass
344+ finally:
345+ fp = open(excuses_path, 'a+')
346+
347+ file_state = os.stat(excuses_path)
348+ mtime = file_state.st_mtime
349+ now = time.time()
350+ if (now - mtime) > MAX_CACHE_AGE:
351+ refresh_due = True
352+
353+ with fp:
354+ if args.refresh or refresh_due:
355+ url_opener = FancyURLopener()
356+ excuses_url = ARCHIVE_PAGES + 'proposed-migration/update_excuses.yaml'
357+ excuses_data = url_opener.retrieve(excuses_url,
358+ fp.name,
359+ report_download)
360+ fp.seek(0)
361+
362+ # Use the C implementation of the SafeLoader, it's noticeably faster, and
363+ # here we're dealing with large input files.
364+ excuses = yaml.load(fp, Loader=yaml.CSafeLoader)
365+
366+ if args.source is None:
367+ print("No source package name was provided. The following packages are "
368+ "blocked in proposed:\n")
369+ args.source = choose_blocked_source(excuses)
370+
371+ find_excuses(lp, args.source, 0, seen)

Subscribers

People subscribed via source and target branches