Merge lp:~mac9416/unwrapt/modularization into lp:unwrapt
- modularization
- Merge into stable
Proposed by
mac9416
Status: | Merged |
---|---|
Merged at revision: | 53 |
Proposed branch: | lp:~mac9416/unwrapt/modularization |
Merge into: | lp:unwrapt |
Diff against target: |
447 lines (+116/-84) 3 files modified
example.py (+6/-2) unwrapt/DefinitionBase.py (+5/-2) unwrapt/definitions/aptdef/__init__.py (+105/-80) |
To merge this branch: | bzr merge lp:~mac9416/unwrapt/modularization |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Chris Oliver | Approve | ||
Review via email: mp+32608@code.launchpad.net |
This proposal has been superseded by a proposal from 2010-08-13.
Commit message
Description of the change
I tried to modularize a couple of things, and in the process hacked the existing code to pieces. There's still a lot of work to do, so feel free to do the same to mine. :-)
To post a comment you must log in.
Revision history for this message
mac9416 (mac9416) wrote : | # |
Revision history for this message
Chris Oliver (excid3) wrote : | # |
Yeah I did not update it because I wanted to make sure recent changes were tested in my personal branch before pushing. I think it was all good and I forgot to push. :P
Revision history for this message
Chris Oliver (excid3) : | # |
review:
Approve
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'example.py' |
2 | --- example.py 2010-08-01 19:34:08 +0000 |
3 | +++ example.py 2010-08-13 17:34:44 +0000 |
4 | @@ -29,7 +29,8 @@ |
5 | #apt.set_proxy({"http": "http://192.168.1.100:3128"}, "username", "password") |
6 | |
7 | # Configure the apt client |
8 | -apt.set_architecture("amd64") |
9 | +#apt.set_architecture("amd64") |
10 | +apt.set_architecture("i386") |
11 | |
12 | apt.set_status("/var/lib/dpkg/status") |
13 | |
14 | @@ -51,7 +52,10 @@ |
15 | |
16 | print "%i available packages" % len(apt.get_available_package_names()) |
17 | |
18 | -#apt.update(directory="/var/lib/apt/lists", download=False) |
19 | +#FIXME: /var/lib/apt is not the download directory. We should have the ability |
20 | +# to individually specify the download directories. |
21 | +#apt.set_download_directory("/var/lib/apt") |
22 | +#apt.update(download=False) |
23 | |
24 | apt.update() |
25 | |
26 | |
27 | === modified file 'unwrapt/DefinitionBase.py' |
28 | --- unwrapt/DefinitionBase.py 2010-08-01 19:34:08 +0000 |
29 | +++ unwrapt/DefinitionBase.py 2010-08-13 17:34:44 +0000 |
30 | @@ -90,6 +90,7 @@ |
31 | |
32 | - directory is the location of the |
33 | """ |
34 | + #FIXME: that docstring. |
35 | |
36 | self.download_directory = os.path.abspath(os.path.expanduser(directory)) |
37 | |
38 | @@ -163,6 +164,7 @@ |
39 | |
40 | pass |
41 | |
42 | + |
43 | @callback |
44 | def update(self, reporthook=None, directory=None, download=True): |
45 | """ |
46 | @@ -202,6 +204,7 @@ |
47 | |
48 | pass |
49 | |
50 | + |
51 | @callback |
52 | def get_latest_binary(self, package): |
53 | """ |
54 | @@ -268,7 +271,8 @@ |
55 | |
56 | For example: |
57 | |
58 | - client.mark_package("firefox") |
59 | + package = client.get_latest_binary("firefox") |
60 | + client.mark_package(package) |
61 | """ |
62 | |
63 | pass |
64 | @@ -289,7 +293,6 @@ |
65 | For example: |
66 | |
67 | client.apply_changes() |
68 | - |
69 | """ |
70 | |
71 | pass |
72 | |
73 | === modified file 'unwrapt/definitions/aptdef/__init__.py' |
74 | --- unwrapt/definitions/aptdef/__init__.py 2010-08-01 19:34:08 +0000 |
75 | +++ unwrapt/definitions/aptdef/__init__.py 2010-08-13 17:34:44 +0000 |
76 | @@ -51,14 +51,12 @@ |
77 | |
78 | |
79 | #TODO: Move this code to proper library location |
80 | -def url_join(first, last): |
81 | +def url_join(*args): |
82 | """ Returns full URL """ |
83 | - if first.endswith('/'): |
84 | - if last.startswith('/'): return first + last[1:] |
85 | - else: return first + last |
86 | - else: |
87 | - if last.startswith('/'): return first + last |
88 | - else: return first + '/' + last |
89 | + # Strip any leading or trailing slashes from the parts. |
90 | + args = [x.strip("/") for x in args] |
91 | + |
92 | + return "/".join(args) |
93 | |
94 | |
95 | #class Repository(Base): |
96 | @@ -108,10 +106,15 @@ |
97 | |
98 | |
99 | def to_url(repository, architecture, format): |
100 | - return url_join(repository["url"], url_join(architecture, format)) |
101 | + return url_join(repository["url"], architecture, format) |
102 | |
103 | |
104 | def to_filename(directory, url): |
105 | + """ |
106 | + Forms a full filename from a directory and url. |
107 | + i.e. Strips the url of the protocol prefix, replaces all slashes with |
108 | + underscores, and appends it to directory. |
109 | + """ |
110 | return os.path.join(directory, url.split("//")[1].replace("/", "_")) |
111 | |
112 | |
113 | @@ -147,8 +150,13 @@ |
114 | supported = ["amd64", "armel", "i386", "ia64", "powerpc", "sparc"] |
115 | status_properties = ["Package", "Version", "Status", "Provides"] |
116 | binary_dependencies = ["Pre-Depends", "Depends", "Recommends"] |
117 | - supported_statuses = ["install ok installed", "to be installed", "to be downloaded"] |
118 | + supported_statuses = ["install ok installed", |
119 | + "to be downloaded", |
120 | + "dependency to be downloaded", |
121 | + "to be installed", |
122 | + "dependency to be installed"] |
123 | |
124 | + #FIXME: This seems redundant. Could it be moved to DefinitionBase? |
125 | def on_set_proxy(self, proxy, username=None, password=None): |
126 | self.proxy = {"proxy": proxy, |
127 | "user": username, |
128 | @@ -184,7 +192,7 @@ |
129 | self.repositories[count]["url"] = url |
130 | self.repositories[count]["dist"] = dist |
131 | self.repositories[count]["section"] = section |
132 | - self.repositories[count]["url"] = url_join(url, url_join("dists", url_join(dist, section))) |
133 | + self.repositories[count]["url"] = url_join(url, "dists", dist, section) |
134 | |
135 | count += 1 |
136 | |
137 | @@ -218,13 +226,22 @@ |
138 | """ |
139 | This is a missing docstring ZOMG! |
140 | """ |
141 | + |
142 | + if download: |
143 | + self.on_download_lists(reporthook) |
144 | + |
145 | + # Read the newly-downloaded lists. |
146 | + self.on_read_lists() |
147 | + |
148 | + |
149 | + def on_download_lists(self, reporthook=None): |
150 | |
151 | directory = os.path.join(self.download_directory, "lists") |
152 | |
153 | - #TODO: This function obviously needs to be split up and modularized :) |
154 | + # If the download directory does not exist, create it |
155 | + if not os.path.exists(directory): |
156 | + os.makedirs(directory) |
157 | |
158 | - # This is a list of files we downloaded and now need to parse |
159 | - downloaded = [] |
160 | for repo in self.__iter_repositories(): |
161 | |
162 | # Build the strings |
163 | @@ -232,31 +249,31 @@ |
164 | filename = to_filename(directory, url) |
165 | display_name = "Repository => %s / %s" % (repo["dist"], repo["section"]) |
166 | |
167 | - # If the download directory does not exist, create it |
168 | - if not os.path.exists(directory): |
169 | - os.makedirs(directory) |
170 | - |
171 | # Download |
172 | #TODO: pass proxy information and catch exceptions |
173 | #TODO: Support bz2 and unarchived Packages files |
174 | filename = "%s.gz" % filename |
175 | - if download: |
176 | - download_url("%s.gz" % url, filename, display_name, proxy=self.proxy["proxy"], username=self.proxy["user"], password=self.proxy["pass"]) |
177 | - downloaded.append((repo, filename)) |
178 | - |
179 | - #TODO: Improve this. For now we are just opening local files in |
180 | - # unextracted format (what you find in /var/lib/apt/lists) since |
181 | - # that's an easy way to do things. This won't open the gz files |
182 | - # that Unwrapt downloads however |
183 | - else: # Files that are pre-downloaded |
184 | - downloaded.append((repo, filename[:-3])) |
185 | - |
186 | + download_url("%s.gz" % url, filename, display_name, proxy=self.proxy["proxy"], username=self.proxy["user"], password=self.proxy["pass"]) |
187 | + |
188 | + |
189 | + def on_read_lists(self): |
190 | + |
191 | + directory = os.path.join(self.download_directory, "lists") |
192 | + |
193 | + lists = [] |
194 | + for repo in self.__iter_repositories(): |
195 | + |
196 | + # Build the strings |
197 | + url = to_url(repo, self.architecture, "Packages") |
198 | + filename = to_filename(directory, url) |
199 | + filename = "%s.gz" % filename # Works only if the index files are gz |
200 | + lists.append((repo, filename)) |
201 | |
202 | self.packages = {} |
203 | |
204 | - total = len(downloaded) |
205 | + total = len(lists) |
206 | # Now parse each file, extracting as necessary |
207 | - for i, value in enumerate(downloaded): |
208 | + for i, value in enumerate(lists): |
209 | repo, filename = value |
210 | |
211 | # Display percent read |
212 | @@ -264,17 +281,19 @@ |
213 | sys.stdout.write("\rReading package lists... %3i%%" % frac) |
214 | sys.stdout.flush() |
215 | |
216 | - # Parse packages into dictionary |
217 | + # Attempt to open the package list. |
218 | try: |
219 | if filename.endswith(".gz"): |
220 | f = gzip.open(filename, "rb") |
221 | else: |
222 | - f = open(filename, "rb") |
223 | - |
224 | - self.__parse(repo, f) |
225 | - f.close() |
226 | - except: |
227 | - logging.error("\nPackage list does not exist: %s" % filename) |
228 | + f = open(filename, "rb") |
229 | + except: #FIXME: specify exception. |
230 | + logging.error("\nPackage list does not exist: %s" % filename) |
231 | + continue |
232 | + |
233 | + # Parse packages into dictionary |
234 | + self.__parse(repo, f) |
235 | + f.close() |
236 | |
237 | #TODO: Insert items into database |
238 | |
239 | @@ -282,6 +301,7 @@ |
240 | sys.stdout.write("\n") |
241 | |
242 | logging.info("%i packages available" % len(self.packages)) |
243 | + |
244 | |
245 | def __parse(self, repo, f): |
246 | """ |
247 | @@ -328,7 +348,6 @@ |
248 | installed statuses. |
249 | """ |
250 | |
251 | - |
252 | f = open(status, "rb") |
253 | |
254 | self.status = {} |
255 | @@ -420,7 +439,7 @@ |
256 | return self.packages[package] |
257 | |
258 | |
259 | - def on_mark_package(self, metadata): |
260 | + def on_mark_package(self, metadata, dependency=False): |
261 | """ |
262 | Get a list of dependencies based on package metadata |
263 | """ |
264 | @@ -431,25 +450,21 @@ |
265 | #TODO: This function obviously needs to be split up and modularized :) |
266 | |
267 | # First check if the package is installed already? |
268 | - if metadata["Package"] in self.status: |
269 | - raise AttributeError, "Package already set to status: %s" % \ |
270 | - self.status[metadata["Package"]]["Status"] |
271 | + status = self.on_get_package_status(metadata["Package"]) |
272 | + if status != "not installed": |
273 | + raise AttributeError, "Package already set to status: %s" % status |
274 | |
275 | # Mark the package itself |
276 | - metadata["Status"] = "to be downloaded" |
277 | + if not dependency: metadata["Status"] = "to be downloaded" |
278 | + else: metadata["Status"] = "dependency to be downloaded" |
279 | self.status[metadata["Package"]] = metadata |
280 | |
281 | logging.info("Finding dependencies for %s..." % metadata["Package"]) |
282 | |
283 | - # Build a string of the necessary sections we need |
284 | - depends = [] |
285 | - for section in self.binary_dependencies: |
286 | - if section in metadata: |
287 | - depends.append(metadata[section]) |
288 | - depends = ", ".join(depends) |
289 | + depends = self.on_get_package_dependencies(metadata) |
290 | |
291 | # Do the dependency calculations |
292 | - for dep in depends.split(", "): |
293 | + for dep in depends: |
294 | |
295 | # In case we have some ORs |
296 | options = dep.split(" | ") |
297 | @@ -470,7 +485,7 @@ |
298 | # Test for compatible version just in case |
299 | if len(details) > 1: |
300 | comparison = details[1][1:] # strip the '(' |
301 | - version = details [2][:-1] # strip the ')' |
302 | + version = details[2][:-1] # strip the ')' |
303 | |
304 | satisfied = DpkgVersion(self.status[name]["Version"]).compare_string(comparison, version) |
305 | |
306 | @@ -492,15 +507,26 @@ |
307 | |
308 | # Mark sub-dependencies as well |
309 | if pkg: |
310 | - self.on_mark_package(pkg) |
311 | - |
312 | - |
313 | + self.on_mark_package(pkg, dependency=True) |
314 | + |
315 | + |
316 | + def on_get_package_dependencies(self, metadata): |
317 | + |
318 | + # Build a string of the necessary sections we need |
319 | + depends = [] |
320 | + for section in self.binary_dependencies: |
321 | + if section in metadata: |
322 | + depends += metadata[section].split(", ") |
323 | + |
324 | + return depends |
325 | + |
326 | + |
327 | def on_apply_changes(self): |
328 | |
329 | directory = os.path.join(self.download_directory, "packages") |
330 | |
331 | # Build the list of package urls to download |
332 | - downloads = [(key, value["Repository"]["url"].split("dists")[0] + value["Filename"]) for key, value in self.status.items() if value["Status"] == "to be downloaded"] |
333 | + downloads = [(key, value["Repository"]["url"].split("dists")[0] + value["Filename"]) for key, value in self.status.items() if value["Status"] in ["to be downloaded", "dependency to be downloaded"]] |
334 | |
335 | #downloads = [] |
336 | #for key, value in self.status.items(): |
337 | @@ -517,12 +543,16 @@ |
338 | for key, url in downloads: |
339 | download_url(url, "%s/%s" % (directory, url.rsplit("/", 1)[1]), proxy=self.proxy["proxy"], username=self.proxy["user"], password=self.proxy["pass"]) |
340 | # Once it's downloaded, mark this package status to "to be installed" |
341 | - self.status[key]["Status"] = "to be installed" |
342 | + # or "dependency to be installed", depending on what it is now. |
343 | + if self.status[key]["Status"] == "to be downloaded": |
344 | + self.status[key]["Status"] = "to be installed" |
345 | + elif self.status[key]["Status"] == "dependency to be downloaded": |
346 | + self.status[key]["Status"] = "dependency to be installed" |
347 | |
348 | |
349 | def on_save_changes(self, status): |
350 | |
351 | - # This will NOT create a staus file to override /var/lib/dpkg/status |
352 | + # This will NOT create a status file to override /var/lib/dpkg/status |
353 | # so DO NOT try to replace the system status file. |
354 | # YOU HAVE BEEN WARNED |
355 | |
356 | @@ -541,22 +571,19 @@ |
357 | |
358 | |
359 | def on_cancel_changes(self, downloads, installs): |
360 | - |
361 | - cancellations = [] |
362 | |
363 | for key, value in self.status.items(): |
364 | - if downloads and value["Status"] == "to be downloaded" or \ |
365 | - installs and value["Status"] == "to be installed": |
366 | - cancellations.append(key) |
367 | - |
368 | - for key in cancellations: |
369 | - del self.status[key] |
370 | + if downloads and value["Status"] in \ |
371 | + ["to be downloaded", "dependency to be downloaded"] or \ |
372 | + installs and value["Status"] in \ |
373 | + ["to be installed", "dependency to be installed"]: |
374 | + del self.status[key] |
375 | |
376 | |
377 | def on_get_changes_size(self): |
378 | |
379 | # Build list of packages to be downloaded |
380 | - packages = [(value["Package"], value["Version"]) for key, value in self.status.items() if value["Status"] == "to be downloaded"] |
381 | + packages = [(value["Package"], value["Version"]) for key, value in self.status.items() if value["Status"] in ["to be downloaded", "dependency to be downloaded"]] |
382 | |
383 | count = 0 |
384 | total = 0 |
385 | @@ -582,36 +609,36 @@ |
386 | We will take the approach of installing by copying the lists to |
387 | /var/lib/apt/lists and the packages to /var/cache/apt/archives and |
388 | calling apt-get update and then apt-get install on the packages |
389 | - which have the stats of "to be installed". This prevents tampering |
390 | + which have the status of "to be installed". This prevents tampering |
391 | with sources.list and works more or less the exact same if we made |
392 | a local repository. |
393 | """ |
394 | |
395 | - if not os.geteuid()==0: |
396 | + if not os.geteuid() == 0: |
397 | raise PermissionsError, "You may only install as root" |
398 | |
399 | # Copy lists over |
400 | - try: |
401 | - for repo in self.__iter_repositories(): |
402 | - url = to_url(repo, self.architecture, "Packages") |
403 | - filename = to_filename(os.path.join(self.download_directory, "lists"), url) |
404 | + for repo in self.__iter_repositories(): |
405 | + url = to_url(repo, self.architecture, "Packages") |
406 | + filename = to_filename(os.path.join(self.download_directory, "lists"), url) |
407 | |
408 | + try: |
409 | # Extract the gz |
410 | g = gzip.open("%s.gz" % filename, "rb") |
411 | f = open(os.path.join("/var/lib/apt/lists", os.path.basename(filename)), "wb") |
412 | f.write(g.read()) |
413 | f.close() |
414 | g.close() |
415 | - except IOError, e: |
416 | - # We will just ignore this, it only trip out if the user did download=False on update() |
417 | - pass |
418 | + except IOError, e: |
419 | + # We will just ignore this, it only trip out if the user did download=False on update() |
420 | + pass |
421 | |
422 | |
423 | # Copy packages over |
424 | for key, value in self.status.items(): |
425 | - if value["Status"] == "to be installed": |
426 | + if value["Status"] in ["to be installed", "dependency to be installed"]: |
427 | pkg_filename = self.get_binary_version(value["Package"], value["Version"])["Filename"].rsplit("/", 1)[1] |
428 | - filename = os.path.join(self.download_directory, os.path.join("packages", pkg_filename)) |
429 | + filename = os.path.join(self.download_directory, "packages", pkg_filename) |
430 | dest = os.path.join("/var/cache/apt/archives", os.path.basename(filename)) |
431 | shutil.copyfile(filename, dest) |
432 | |
433 | @@ -619,7 +646,7 @@ |
434 | # Call apt-get install with the packages |
435 | packages = [value["Package"] for key, value in self.status.items() if value["Status"] == "to be installed"] |
436 | |
437 | - subprocess.call("apt-get update", shell=True) |
438 | + subprocess.call("apt-gcache gencaches", shell=True) |
439 | subprocess.call("apt-get -y install %s" % " ".join(packages), shell=True) |
440 | |
441 | |
442 | @@ -642,5 +669,3 @@ |
443 | |
444 | |
445 | return upgrades |
446 | - |
447 | - |
lp:unwrapt was out of date. I've updated it to lp:~excid3/keryx/unwrapt. Resubmitting.