Merge lp:~jml/pkgme-devportal/restore-download-file into lp:pkgme-devportal
- restore-download-file
- Merge into trunk
Proposed by
Jonathan Lange
Status: | Merged |
---|---|
Approved by: | Jonathan Lange |
Approved revision: | 180 |
Merged at revision: | 146 |
Proposed branch: | lp:~jml/pkgme-devportal/restore-download-file |
Merge into: | lp:pkgme-devportal |
Diff against target: |
798 lines (+15/-665) 7 files modified
NEWS (+3/-0) devportalbinary/aptfile.py (+0/-226) devportalbinary/database.py (+5/-131) devportalbinary/testing.py (+3/-139) devportalbinary/tests/test_aptfile.py (+0/-53) devportalbinary/tests/test_database.py (+2/-114) setup.py (+2/-2) |
To merge this branch: | bzr merge lp:~jml/pkgme-devportal/restore-download-file |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Jonathan Lange | Approve | ||
Review via email: mp+134116@code.launchpad.net |
Commit message
Add download_file back to devportalbinary
Description of the change
download_file was being imported by pkgme-service from devportalbinary
Another branch is in progress, removing this particular dependency. In the mean
time, this branch should land, restoring the behaviour so we can bump pkgme-service
to use latest devportal without probs.
To post a comment you must log in.
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'NEWS' | |||
2 | --- NEWS 2012-11-13 11:25:44 +0000 | |||
3 | +++ NEWS 2012-11-13 14:53:23 +0000 | |||
4 | @@ -12,6 +12,9 @@ | |||
5 | 12 | maintain a database of library dependencies based on published packages | 12 | maintain a database of library dependencies based on published packages |
6 | 13 | from Launchpad has been removed. (Jonathan Lange) | 13 | from Launchpad has been removed. (Jonathan Lange) |
7 | 14 | 14 | ||
8 | 15 | * ``aptfile`` backend has been removed. (Jonathan Lange) | ||
9 | 16 | |||
10 | 17 | |||
11 | 15 | 0.4.11 (2012-10-29) | 18 | 0.4.11 (2012-10-29) |
12 | 16 | =================== | 19 | =================== |
13 | 17 | 20 | ||
14 | 18 | 21 | ||
15 | === removed file 'devportalbinary/aptfile.py' | |||
16 | --- devportalbinary/aptfile.py 2012-11-13 11:25:44 +0000 | |||
17 | +++ devportalbinary/aptfile.py 1970-01-01 00:00:00 +0000 | |||
18 | @@ -1,226 +0,0 @@ | |||
19 | 1 | # Copyright 2012 Canonical Ltd. This software is licensed under the | ||
20 | 2 | # GNU Affero General Public License version 3 (see the file LICENSE). | ||
21 | 3 | |||
22 | 4 | # XXX: Remove this. No longer used. | ||
23 | 5 | |||
24 | 6 | __all__ = [ | ||
25 | 7 | 'AptFilePackageDatabase', | ||
26 | 8 | ] | ||
27 | 9 | |||
28 | 10 | import gzip | ||
29 | 11 | import urllib | ||
30 | 12 | import argparse | ||
31 | 13 | import os | ||
32 | 14 | import re | ||
33 | 15 | |||
34 | 16 | |||
35 | 17 | def make_arg_parser(): | ||
36 | 18 | p = argparse.ArgumentParser() | ||
37 | 19 | p.add_argument('--cache-dir', type=str, default='cache') | ||
38 | 20 | p.add_argument('output_file', type=argparse.FileType('w')) | ||
39 | 21 | return p | ||
40 | 22 | |||
41 | 23 | |||
42 | 24 | so_filename_re = re.compile(r'\.so(\.[0-9]+)*$') | ||
43 | 25 | def export_database(db, stream): | ||
44 | 26 | for library, package, arch in db.iter_database(): | ||
45 | 27 | if so_filename_re.search(library): | ||
46 | 28 | stream.write(','.join([package, library, package, arch])) | ||
47 | 29 | stream.write('\n') | ||
48 | 30 | stream.flush() | ||
49 | 31 | |||
50 | 32 | |||
51 | 33 | def dump_apt_file_db(): | ||
52 | 34 | parser = make_arg_parser() | ||
53 | 35 | args = parser.parse_args() | ||
54 | 36 | if not os.path.isdir(args.cache_dir): | ||
55 | 37 | os.path.makedirs(args.cache_dir) | ||
56 | 38 | db = AptFilePackageDatabase(args.cache_dir) | ||
57 | 39 | export_database(db, args.output_file) | ||
58 | 40 | return 0 | ||
59 | 41 | |||
60 | 42 | |||
61 | 43 | def iter_contents_file(contents): | ||
62 | 44 | """ Yield (full-library-path, set-of-pkgnames) from a Contents file. | ||
63 | 45 | |||
64 | 46 | It expects a line starting with "FILE" that tells it when the header ends | ||
65 | 47 | and the actual content starts. | ||
66 | 48 | """ | ||
67 | 49 | found_start_marker = False | ||
68 | 50 | for line in contents: | ||
69 | 51 | if not found_start_marker: | ||
70 | 52 | if line.startswith("FILE"): | ||
71 | 53 | found_start_marker = True | ||
72 | 54 | continue | ||
73 | 55 | (path, sep, pkgs) = [s.strip() for s in line.rpartition(" ")] | ||
74 | 56 | # pkgs is formated a bit funny, e.g. universe/pkgname | ||
75 | 57 | pkgs = set([os.path.basename(pkg) for pkg in pkgs.split(",")]) | ||
76 | 58 | yield (path, pkgs) | ||
77 | 59 | |||
78 | 60 | |||
79 | 61 | class AptFilePackageDatabase(object): | ||
80 | 62 | """Really dumb database that just uses apt-file for local testing """ | ||
81 | 63 | |||
82 | 64 | # we could also read /etc/ld.so.conf.d/*.conf but this maybe different on | ||
83 | 65 | # different distroseries especially if | ||
84 | 66 | # server-distroseries != target-distroseries | ||
85 | 67 | # (I wish there was ldconfig --print-search-dirs) | ||
86 | 68 | LD_SEARCH_PATH = [ | ||
87 | 69 | # standards | ||
88 | 70 | "lib", | ||
89 | 71 | "usr/lib", | ||
90 | 72 | "usr/local/lib", | ||
91 | 73 | # old biarch | ||
92 | 74 | "lib32", | ||
93 | 75 | "usr/lib32", | ||
94 | 76 | # new multiarch | ||
95 | 77 | "lib/i686-linux-gnu", | ||
96 | 78 | "lib/i386-linux-gnu", | ||
97 | 79 | "lib/x86_64-linux-gnu", | ||
98 | 80 | "usr/lib/i386-linux-gnu", | ||
99 | 81 | "usr/lib/i686-linux-gnu", | ||
100 | 82 | "usr/lib/x86_64-linux-gnu", | ||
101 | 83 | # ? | ||
102 | 84 | "usr/lib/x86_64-linux-gnu/fakechroot", | ||
103 | 85 | "usr/lib/x86_64-linux-gnu/mesa", | ||
104 | 86 | "usr/lib/x86_64-linux-gnu/mesa-egl", | ||
105 | 87 | "usr/lib/i386-linux-gnu/mesa", | ||
106 | 88 | ] | ||
107 | 89 | |||
108 | 90 | DISTROSERIES = "oneiric" | ||
109 | 91 | |||
110 | 92 | # If db_type is set to this in the config, that means use this database. | ||
111 | 93 | DB_TYPE = 'aptfile' | ||
112 | 94 | |||
113 | 95 | CONTENTS_FILE_URL_LOCATION = ( | ||
114 | 96 | "http://archive.ubuntu.com/ubuntu/dists/%(distroseries)s/" | ||
115 | 97 | "Contents-%(arch)s.gz") | ||
116 | 98 | |||
117 | 99 | CONTENTS_FILE = "Contents-%(distroseries)s-%(arch)s" | ||
118 | 100 | |||
119 | 101 | def __init__(self, cachedir): | ||
120 | 102 | self.cachedir = os.path.expanduser(cachedir) | ||
121 | 103 | self._distroseries_arch_cache = {} | ||
122 | 104 | |||
123 | 105 | @classmethod | ||
124 | 106 | def from_options(cls, options): | ||
125 | 107 | return cls(options.database_aptfile_cachedir) | ||
126 | 108 | |||
127 | 109 | def _get_lib_to_pkgs_mapping(self, distroseries, arch): | ||
128 | 110 | """Returns a dict of { library-name : set([pkg1,pkg2]) | ||
129 | 111 | |||
130 | 112 | This function will return a dict to lookup library-name to package | ||
131 | 113 | dependencies for the given distroseries and architecture | ||
132 | 114 | """ | ||
133 | 115 | if not (distroseries, arch) in self._distroseries_arch_cache: | ||
134 | 116 | self._distroseries_arch_cache[(distroseries, arch)] = \ | ||
135 | 117 | self._get_mapping_from_contents_file(distroseries, arch) | ||
136 | 118 | return self._distroseries_arch_cache[(distroseries, arch)] | ||
137 | 119 | |||
138 | 120 | def _get_contents_file_cache_path(self, distroseries, arch): | ||
139 | 121 | """Return the path in the cache for the given distroseries, arch """ | ||
140 | 122 | return os.path.join( | ||
141 | 123 | self.cachedir, self.CONTENTS_FILE % { | ||
142 | 124 | 'distroseries': distroseries, 'arch': arch}) | ||
143 | 125 | |||
144 | 126 | def _get_contents_file_server_url(self, distroseries, arch): | ||
145 | 127 | """Return the remote server URL for the given distroseries, arch """ | ||
146 | 128 | return self.CONTENTS_FILE_URL_LOCATION % { | ||
147 | 129 | 'distroseries': distroseries, 'arch': arch} | ||
148 | 130 | |||
149 | 131 | def _get_mapping_from_contents_file(self, distroseries, arch): | ||
150 | 132 | """Return lib,pkgs mapping from contents file for distroseries, arch | ||
151 | 133 | |||
152 | 134 | This expects the contents file to be in the cachedir already. | ||
153 | 135 | """ | ||
154 | 136 | lib_to_pkgs = {} | ||
155 | 137 | path = self._get_contents_file_cache_path(distroseries, arch) | ||
156 | 138 | with open(path) as f: | ||
157 | 139 | for path, pkgs in self._iter_contents_file(f): | ||
158 | 140 | basename = os.path.basename(path) | ||
159 | 141 | if not basename in lib_to_pkgs: | ||
160 | 142 | lib_to_pkgs[basename] = set() | ||
161 | 143 | lib_to_pkgs[basename] |= pkgs | ||
162 | 144 | return lib_to_pkgs | ||
163 | 145 | |||
164 | 146 | def _download_contents_file_compressed(self, distroseries, arch): | ||
165 | 147 | """Downloads the content file for distroseries, arch into target """ | ||
166 | 148 | # XXX: we may eventually want to merge the Contents files from | ||
167 | 149 | # the -updates repository too in addition to the main archive | ||
168 | 150 | url = self._get_contents_file_server_url(distroseries, arch) | ||
169 | 151 | target = self._get_contents_file_cache_path(distroseries, arch) | ||
170 | 152 | compressed_target = target + os.path.splitext(url)[1] | ||
171 | 153 | # download | ||
172 | 154 | urllib.urlretrieve(url, compressed_target) | ||
173 | 155 | return compressed_target | ||
174 | 156 | |||
175 | 157 | def _iter_contents_file(self, in_file): | ||
176 | 158 | for path, pkgs in iter_contents_file(in_file): | ||
177 | 159 | if os.path.dirname(path) in self.LD_SEARCH_PATH: | ||
178 | 160 | yield path, pkgs | ||
179 | 161 | |||
180 | 162 | def _prune_contents_gz_file(self, infile, outfile): | ||
181 | 163 | """Read a compressed Contents.gz and write out a pruned version. | ||
182 | 164 | |||
183 | 165 | This will use iter_contents_file to go over infile and write | ||
184 | 166 | the relevant lines that are in the LD_SEARCH_PATH to outfile. | ||
185 | 167 | """ | ||
186 | 168 | with open(outfile, "w") as outf, gzip.open(infile) as inf: | ||
187 | 169 | # first write the header | ||
188 | 170 | outf.write("FILE LOCATION\n") | ||
189 | 171 | # then iter over all relevant lines and write them out | ||
190 | 172 | for path, pkgs in self._iter_contents_file(inf): | ||
191 | 173 | outf.write("%s %s\n" % (path, ",".join(pkgs))) | ||
192 | 174 | |||
193 | 175 | def _download_and_prepare_contents_file_if_needed(self, distroseries, arch): | ||
194 | 176 | """Ensure there is a usable Contents file in the cachedir | ||
195 | 177 | |||
196 | 178 | This will download, uncompress and prune a Conents file for | ||
197 | 179 | distroseries, arch so that get_dependencies works. | ||
198 | 180 | """ | ||
199 | 181 | # mvo: We can (and should eventually) do etag/if-modified-since | ||
200 | 182 | # matching here. But its not really important as long as | ||
201 | 183 | # we package for stable distroseries as the Contents file | ||
202 | 184 | # will not change | ||
203 | 185 | path = self._get_contents_file_cache_path(distroseries, arch) | ||
204 | 186 | if not os.path.exists(path): | ||
205 | 187 | compressed_contents = self._download_contents_file_compressed( | ||
206 | 188 | distroseries, arch) | ||
207 | 189 | # and prune from ~300mb to 1mb uncompressed as we are only | ||
208 | 190 | # interested in the library path parts | ||
209 | 191 | self._prune_contents_gz_file(compressed_contents, path) | ||
210 | 192 | os.remove(compressed_contents) | ||
211 | 193 | |||
212 | 194 | def iter_database(self, architectures=('i386', 'amd64'), | ||
213 | 195 | distroseries=None): | ||
214 | 196 | """Export the database. | ||
215 | 197 | |||
216 | 198 | Yields (library, package, arch) tuples for everything that we can | ||
217 | 199 | find. | ||
218 | 200 | """ | ||
219 | 201 | # XXX: Untested | ||
220 | 202 | if distroseries is None: | ||
221 | 203 | distroseries = self.DISTROSERIES | ||
222 | 204 | for arch in architectures: | ||
223 | 205 | self._download_and_prepare_contents_file_if_needed( | ||
224 | 206 | distroseries, arch) | ||
225 | 207 | mapping = self._get_lib_to_pkgs_mapping(distroseries, arch) | ||
226 | 208 | for library in mapping: | ||
227 | 209 | for package in mapping[library]: | ||
228 | 210 | yield library, package, arch | ||
229 | 211 | |||
230 | 212 | def get_multiple_dependencies(self, library_names, arch): | ||
231 | 213 | """Get the binary packages that provide libraries. | ||
232 | 214 | |||
233 | 215 | :return: (deps, missing), where ``deps`` is a dict mapping library | ||
234 | 216 | names to sets of packages that provide them, and ``missing`` is a | ||
235 | 217 | set of library names for which no dependencies could be found. | ||
236 | 218 | """ | ||
237 | 219 | self._download_and_prepare_contents_file_if_needed( | ||
238 | 220 | self.DISTROSERIES, arch) | ||
239 | 221 | lib_to_pkgs = self._get_lib_to_pkgs_mapping(self.DISTROSERIES, arch) | ||
240 | 222 | deps = ((lib, lib_to_pkgs.get(lib)) for lib in library_names) | ||
241 | 223 | return dict((lib, dep) for (lib, dep) in deps if dep) | ||
242 | 224 | |||
243 | 225 | def close(self): | ||
244 | 226 | pass | ||
245 | 227 | 0 | ||
246 | === modified file 'devportalbinary/database.py' | |||
247 | --- devportalbinary/database.py 2012-11-13 11:28:53 +0000 | |||
248 | +++ devportalbinary/database.py 2012-11-13 14:53:23 +0000 | |||
249 | @@ -1,134 +1,13 @@ | |||
250 | 1 | # Copyright 2011 Canonical Ltd. This software is licensed under the | 1 | # Copyright 2011 Canonical Ltd. This software is licensed under the |
251 | 2 | # GNU Affero General Public License version 3 (see the file LICENSE). | 2 | # GNU Affero General Public License version 3 (see the file LICENSE). |
252 | 3 | 3 | ||
263 | 4 | from storm.expr import And, Column, Select, Table | 4 | from .configuration import load_configuration |
264 | 5 | from storm.locals import create_database, Store | 5 | from .utils import download_file |
255 | 6 | from storm.uri import URI as StormURI | ||
256 | 7 | |||
257 | 8 | from .aptfile import AptFilePackageDatabase | ||
258 | 9 | from .configuration import ( | ||
259 | 10 | CONF_FILE_ENV_VAR, | ||
260 | 11 | get_config_file_path, | ||
261 | 12 | load_configuration, | ||
262 | 13 | ) | ||
265 | 14 | 6 | ||
266 | 15 | from libdep_service_client.client import Client | 7 | from libdep_service_client.client import Client |
267 | 16 | 8 | ||
383 | 17 | 9 | # Shut up pyflakes. Imported because other things use this. | |
384 | 18 | class URI(StormURI): | 10 | download_file |
270 | 19 | """A stand-in for Storm's URI class. | ||
271 | 20 | |||
272 | 21 | This class implements the same interface as `storm.uri.URI`, except | ||
273 | 22 | that the constructor has a different signature. Storm's version takes | ||
274 | 23 | a string and parses it, this version can be used when you already | ||
275 | 24 | have a parsed version and just need to create the object. | ||
276 | 25 | """ | ||
277 | 26 | |||
278 | 27 | # XXX: Only used by PackageDatabase, which is flagged for deletion. | ||
279 | 28 | |||
280 | 29 | def __init__(self, scheme=None, host=None, port=None, username=None, | ||
281 | 30 | password=None, database=None, options=None): | ||
282 | 31 | self.scheme = scheme | ||
283 | 32 | self.host = host | ||
284 | 33 | self.port = port | ||
285 | 34 | self.username = username | ||
286 | 35 | self.password = password | ||
287 | 36 | self.database = database | ||
288 | 37 | self.options = options | ||
289 | 38 | if self.options is None: | ||
290 | 39 | self.options = dict() | ||
291 | 40 | |||
292 | 41 | |||
293 | 42 | class PackageDatabase(object): | ||
294 | 43 | |||
295 | 44 | # XXX: No longer used within pkgme-devportal | ||
296 | 45 | |||
297 | 46 | SQLITE = 'sqlite' | ||
298 | 47 | POSTGRES = 'postgres' | ||
299 | 48 | |||
300 | 49 | def __init__(self, store): | ||
301 | 50 | self._store = store | ||
302 | 51 | |||
303 | 52 | @classmethod | ||
304 | 53 | def _get_storm_sqlite_connection_uri(cls, opts): | ||
305 | 54 | raise ValueError( | ||
306 | 55 | "SQLite is no longer supported, you must migrate to postgresql.") | ||
307 | 56 | |||
308 | 57 | @classmethod | ||
309 | 58 | def _get_storm_postgres_connection_uri(cls, opts): | ||
310 | 59 | if not getattr(opts, 'database_db_name', None): | ||
311 | 60 | raise ValueError( | ||
312 | 61 | "Can't create database, no connection info available. " | ||
313 | 62 | "You must specify %s. Looked in %s. " | ||
314 | 63 | "Perhaps %s is set incorrectly?" % ( | ||
315 | 64 | 'db_name', get_config_file_path(), CONF_FILE_ENV_VAR)) | ||
316 | 65 | return URI(scheme=opts.database_db_type, | ||
317 | 66 | username=opts.database_username, | ||
318 | 67 | password=opts.database_password, | ||
319 | 68 | host=opts.database_host, | ||
320 | 69 | port=opts.database_port, | ||
321 | 70 | database=opts.database_db_name) | ||
322 | 71 | |||
323 | 72 | @classmethod | ||
324 | 73 | def _get_storm_connection_uri(cls, opts): | ||
325 | 74 | if opts.database_db_type == cls.POSTGRES: | ||
326 | 75 | return cls._get_storm_postgres_connection_uri(opts) | ||
327 | 76 | elif opts.database_db_type == cls.SQLITE: | ||
328 | 77 | return cls._get_storm_sqlite_connection_uri(opts) | ||
329 | 78 | else: | ||
330 | 79 | raise AssertionError( | ||
331 | 80 | "Unsupported database: %s" % opts.database_db_type) | ||
332 | 81 | |||
333 | 82 | @classmethod | ||
334 | 83 | def get_db_info_from_config(cls, opts): | ||
335 | 84 | return cls._get_storm_connection_uri(opts) | ||
336 | 85 | |||
337 | 86 | @classmethod | ||
338 | 87 | def get_store_from_config(cls, opts): | ||
339 | 88 | """Create a storm store based on a config file. | ||
340 | 89 | |||
341 | 90 | This method will create a storm store based | ||
342 | 91 | on the information in ``~/.config/pkgme-binary/conf`` | ||
343 | 92 | |||
344 | 93 | :return: a tuple of (store, store_type), where store_type | ||
345 | 94 | is one of cls.SQLITE or cls.POSTGRES, indicating what | ||
346 | 95 | is at the other end of the store. | ||
347 | 96 | """ | ||
348 | 97 | connection_info = cls.get_db_info_from_config(opts) | ||
349 | 98 | database = create_database(connection_info) | ||
350 | 99 | return Store(database) | ||
351 | 100 | |||
352 | 101 | @classmethod | ||
353 | 102 | def from_options(cls, options): | ||
354 | 103 | return cls(cls.get_store_from_config(options)) | ||
355 | 104 | |||
356 | 105 | def _get_query(self, library_names, arch): | ||
357 | 106 | return Select( | ||
358 | 107 | [Column('library'), Column('dependency')], | ||
359 | 108 | And(Column('architecture') == arch, | ||
360 | 109 | Column('library').is_in(map(unicode, library_names))), | ||
361 | 110 | Table('libdep')) | ||
362 | 111 | |||
363 | 112 | def get_multiple_dependencies(self, library_names, arch): | ||
364 | 113 | """Get the binary packages that provide libraries. | ||
365 | 114 | |||
366 | 115 | :return: (deps, missing), where ``deps`` is a dict mapping library | ||
367 | 116 | names to sets of packages that provide them, and ``missing`` is a | ||
368 | 117 | set of library names for which no dependencies could be found. | ||
369 | 118 | """ | ||
370 | 119 | arch = unicode(arch) | ||
371 | 120 | result = self._store.execute(self._get_query(library_names, arch)) | ||
372 | 121 | found = {} | ||
373 | 122 | for row in result: | ||
374 | 123 | [lib, dependency] = row | ||
375 | 124 | if lib in found: | ||
376 | 125 | found[lib].add(dependency) | ||
377 | 126 | else: | ||
378 | 127 | found[lib] = set([dependency]) | ||
379 | 128 | return found | ||
380 | 129 | |||
381 | 130 | def close(self): | ||
382 | 131 | self._store.close() | ||
385 | 132 | 11 | ||
386 | 133 | 12 | ||
387 | 134 | class LibdepServiceClient(object): | 13 | class LibdepServiceClient(object): |
388 | @@ -159,10 +38,5 @@ | |||
389 | 159 | 38 | ||
390 | 160 | def get_dependency_database(): | 39 | def get_dependency_database(): |
391 | 161 | """Return an object that can get dependencies.""" | 40 | """Return an object that can get dependencies.""" |
392 | 162 | # XXX: Remove AptFilePackageDatabase from here and simplify the method. | ||
393 | 163 | databases = { | ||
394 | 164 | AptFilePackageDatabase.DB_TYPE: AptFilePackageDatabase.from_options, | ||
395 | 165 | LibdepServiceClient.DB_TYPE: LibdepServiceClient.from_options, | ||
396 | 166 | } | ||
397 | 167 | options = load_configuration() | 41 | options = load_configuration() |
399 | 168 | return databases[options.database_db_type](options) | 42 | return LibdepServiceClient.from_options(options) |
400 | 169 | 43 | ||
401 | === modified file 'devportalbinary/testing.py' | |||
402 | --- devportalbinary/testing.py 2012-11-13 11:12:08 +0000 | |||
403 | +++ devportalbinary/testing.py 2012-11-13 14:53:23 +0000 | |||
404 | @@ -1,7 +1,6 @@ | |||
405 | 1 | # Copyright 2011-2012 Canonical Ltd. This software is licensed under the | 1 | # Copyright 2011-2012 Canonical Ltd. This software is licensed under the |
406 | 2 | # GNU Affero General Public License version 3 (see the file LICENSE). | 2 | # GNU Affero General Public License version 3 (see the file LICENSE). |
407 | 3 | 3 | ||
408 | 4 | from contextlib import closing | ||
409 | 5 | import json | 4 | import json |
410 | 6 | import os | 5 | import os |
411 | 7 | import random | 6 | import random |
412 | @@ -21,11 +20,6 @@ | |||
413 | 21 | Fixture, | 20 | Fixture, |
414 | 22 | TempDir, | 21 | TempDir, |
415 | 23 | ) | 22 | ) |
416 | 24 | from postgresfixture import ClusterFixture | ||
417 | 25 | from storm.locals import create_database, Store | ||
418 | 26 | from testresources import ( | ||
419 | 27 | FixtureResource as _FixtureResource, | ||
420 | 28 | ) | ||
421 | 29 | from testtools import TestCase | 23 | from testtools import TestCase |
422 | 30 | from treeshape import ( | 24 | from treeshape import ( |
423 | 31 | from_rough_spec, | 25 | from_rough_spec, |
424 | @@ -33,11 +27,7 @@ | |||
425 | 33 | ) | 27 | ) |
426 | 34 | 28 | ||
427 | 35 | from devportalbinary.binary import MetadataBackend | 29 | from devportalbinary.binary import MetadataBackend |
433 | 36 | from devportalbinary.database import ( | 30 | from devportalbinary.database import LibdepServiceClient |
429 | 37 | LibdepServiceClient, | ||
430 | 38 | PackageDatabase, | ||
431 | 39 | URI, | ||
432 | 40 | ) | ||
434 | 41 | 31 | ||
435 | 42 | from devportalbinary.configuration import CONF_FILE_ENV_VAR | 32 | from devportalbinary.configuration import CONF_FILE_ENV_VAR |
436 | 43 | 33 | ||
437 | @@ -90,133 +80,6 @@ | |||
438 | 90 | im.size[0], im.size[1], im.format)) | 80 | im.size[0], im.size[1], im.format)) |
439 | 91 | 81 | ||
440 | 92 | 82 | ||
441 | 93 | def get_db_schema_file_path(name): | ||
442 | 94 | return os.path.join(os.path.dirname( | ||
443 | 95 | os.path.abspath(__file__)), 'db', name) | ||
444 | 96 | |||
445 | 97 | |||
446 | 98 | def get_db_schema_queries(filenames): | ||
447 | 99 | for filename in filenames: | ||
448 | 100 | path = get_db_schema_file_path(filename) | ||
449 | 101 | with open(path) as f: | ||
450 | 102 | yield f.read() | ||
451 | 103 | |||
452 | 104 | |||
453 | 105 | class PostgresDatabaseFixture(Fixture): | ||
454 | 106 | |||
455 | 107 | def __init__(self): | ||
456 | 108 | super(PostgresDatabaseFixture, self).__init__() | ||
457 | 109 | self.db_name = "libdep" | ||
458 | 110 | |||
459 | 111 | def drop_db(self): | ||
460 | 112 | # stub suggests that dropping all tables would be quicker than | ||
461 | 113 | # dropping the db when the number of tables is small. | ||
462 | 114 | # select quote_ident(table_schema) || '.' || | ||
463 | 115 | # quote_ident(table_name) from information_schema.tables | ||
464 | 116 | # WHERE table_schema = 'public'; | ||
465 | 117 | self.cluster.dropdb(self.db_name) | ||
466 | 118 | |||
467 | 119 | def create_db(self): | ||
468 | 120 | self.cluster.createdb(self.db_name) | ||
469 | 121 | queries = [ | ||
470 | 122 | 'postgres_schema.sql', | ||
471 | 123 | 'patch-00001.sql', | ||
472 | 124 | 'patch-00002.sql', | ||
473 | 125 | ] | ||
474 | 126 | for patch in get_db_schema_queries(queries): | ||
475 | 127 | self._execute(patch) | ||
476 | 128 | |||
477 | 129 | def _execute(self, query): | ||
478 | 130 | with closing(self.cluster.connect(self.db_name)) as conn: | ||
479 | 131 | cur = conn.cursor() | ||
480 | 132 | cur.execute(query) | ||
481 | 133 | conn.commit() | ||
482 | 134 | |||
483 | 135 | def close_connection(self): | ||
484 | 136 | self.conn.close() | ||
485 | 137 | |||
486 | 138 | def open_connection(self): | ||
487 | 139 | db = create_database(URI(scheme='postgres', | ||
488 | 140 | host=self.cluster.datadir, database=self.db_name)) | ||
489 | 141 | self.conn = Store(db) | ||
490 | 142 | self.addCleanup(self.close_connection) | ||
491 | 143 | |||
492 | 144 | def reset(self): | ||
493 | 145 | self.close_connection() | ||
494 | 146 | self.drop_db() | ||
495 | 147 | self.create_db() | ||
496 | 148 | self.open_connection() | ||
497 | 149 | |||
498 | 150 | def setUp(self): | ||
499 | 151 | super(PostgresDatabaseFixture, self).setUp() | ||
500 | 152 | self.tempdir = self.useFixture(TempDir()) | ||
501 | 153 | self.cluster = self.useFixture(ClusterFixture(self.tempdir.path)) | ||
502 | 154 | self.create_db() | ||
503 | 155 | self.open_connection() | ||
504 | 156 | |||
505 | 157 | |||
506 | 158 | class FixtureResource(_FixtureResource): | ||
507 | 159 | """The built in FixtureResource doesn't get properly dirtied.""" | ||
508 | 160 | # XXX: workaround for bug 1023423 | ||
509 | 161 | |||
510 | 162 | def _get_dirty(self): | ||
511 | 163 | return True | ||
512 | 164 | |||
513 | 165 | def _set_dirty(self, new_val): | ||
514 | 166 | pass | ||
515 | 167 | |||
516 | 168 | _dirty = property(_get_dirty, _set_dirty) | ||
517 | 169 | |||
518 | 170 | |||
519 | 171 | class PostgresDatabaseResource(FixtureResource): | ||
520 | 172 | |||
521 | 173 | def __init__(self): | ||
522 | 174 | fixture = PostgresDatabaseFixture() | ||
523 | 175 | super(PostgresDatabaseResource, self).__init__(fixture) | ||
524 | 176 | |||
525 | 177 | def reset(self, resource, result=None): | ||
526 | 178 | resource.reset() | ||
527 | 179 | return resource | ||
528 | 180 | |||
529 | 181 | |||
530 | 182 | postgres_db_resource = PostgresDatabaseResource() | ||
531 | 183 | |||
532 | 184 | |||
533 | 185 | class DatabaseConfig(Fixture): | ||
534 | 186 | |||
535 | 187 | def __init__(self, db_fixture): | ||
536 | 188 | super(DatabaseConfig, self).__init__() | ||
537 | 189 | self.db_fixture = db_fixture | ||
538 | 190 | |||
539 | 191 | def setUp(self): | ||
540 | 192 | super(DatabaseConfig, self).setUp() | ||
541 | 193 | self.useFixture( | ||
542 | 194 | ConfigSettings( | ||
543 | 195 | ('database', {'db_type': 'postgres', | ||
544 | 196 | 'host': self.db_fixture.cluster.datadir, | ||
545 | 197 | 'db_name': self.db_fixture.db_name, | ||
546 | 198 | }))) | ||
547 | 199 | |||
548 | 200 | |||
549 | 201 | class DatabaseFixture(Fixture): | ||
550 | 202 | """Create a temporary database and make it the default. | ||
551 | 203 | |||
552 | 204 | Don't use this twice within a test, otherwise you'll get confused. | ||
553 | 205 | """ | ||
554 | 206 | |||
555 | 207 | def setUp(self): | ||
556 | 208 | super(DatabaseFixture, self).setUp() | ||
557 | 209 | pg_db = self.useFixture(PostgresDatabaseFixture()) | ||
558 | 210 | self.useFixture(DatabaseConfig(pg_db)) | ||
559 | 211 | self.db = PackageDatabase(pg_db.conn) | ||
560 | 212 | self.addCleanup(self.db.close) | ||
561 | 213 | |||
562 | 214 | |||
563 | 215 | def ConfigFileFixture(location): | ||
564 | 216 | """Use a different configuration file.""" | ||
565 | 217 | return EnvironmentVariableFixture(CONF_FILE_ENV_VAR, location) | ||
566 | 218 | |||
567 | 219 | |||
568 | 220 | class ConfigSettings(Fixture): | 83 | class ConfigSettings(Fixture): |
569 | 221 | """Use a configuration file with different settings.""" | 84 | """Use a configuration file with different settings.""" |
570 | 222 | 85 | ||
571 | @@ -239,7 +102,8 @@ | |||
572 | 239 | tempdir = self.useFixture(TempDir()) | 102 | tempdir = self.useFixture(TempDir()) |
573 | 240 | config_file_path = os.path.join(tempdir.path, 'test.cfg') | 103 | config_file_path = os.path.join(tempdir.path, 'test.cfg') |
574 | 241 | write_config_file(config_file_path, self._settings) | 104 | write_config_file(config_file_path, self._settings) |
576 | 242 | self.useFixture(ConfigFileFixture(config_file_path)) | 105 | self.useFixture( |
577 | 106 | EnvironmentVariableFixture(CONF_FILE_ENV_VAR, config_file_path)) | ||
578 | 243 | 107 | ||
579 | 244 | 108 | ||
580 | 245 | class LibdepFixture(Fixture): | 109 | class LibdepFixture(Fixture): |
581 | 246 | 110 | ||
582 | === removed file 'devportalbinary/tests/test_aptfile.py' | |||
583 | --- devportalbinary/tests/test_aptfile.py 2012-10-26 12:21:03 +0000 | |||
584 | +++ devportalbinary/tests/test_aptfile.py 1970-01-01 00:00:00 +0000 | |||
585 | @@ -1,53 +0,0 @@ | |||
586 | 1 | import gzip | ||
587 | 2 | import os | ||
588 | 3 | |||
589 | 4 | from mock import patch | ||
590 | 5 | from fixtures import TempDir | ||
591 | 6 | from testtools import TestCase | ||
592 | 7 | |||
593 | 8 | from ..aptfile import AptFilePackageDatabase | ||
594 | 9 | |||
595 | 10 | |||
596 | 11 | class AptFilePackageDatabaseTestCase(TestCase): | ||
597 | 12 | |||
598 | 13 | # point to our local contents file version that is a tad smaller | ||
599 | 14 | CONTENTS_CACHE = os.path.join( | ||
600 | 15 | os.path.dirname(__file__), "data", "apt-file-backend") | ||
601 | 16 | |||
602 | 17 | def setUp(self): | ||
603 | 18 | super(AptFilePackageDatabaseTestCase, self).setUp() | ||
604 | 19 | self.db = AptFilePackageDatabase(self.CONTENTS_CACHE) | ||
605 | 20 | |||
606 | 21 | def test_read_fixture_contents_worked(self): | ||
607 | 22 | """ test that our fixture Contents file works as expected """ | ||
608 | 23 | # our test DB has 4 entries in the default search path | ||
609 | 24 | self.assertEqual( | ||
610 | 25 | len(self.db._get_lib_to_pkgs_mapping("oneiric", "i386")), 4) | ||
611 | 26 | |||
612 | 27 | def test_get_dependencies(self): | ||
613 | 28 | """ Test that data from the fixture dependencies file works """ | ||
614 | 29 | self.assertEqual( | ||
615 | 30 | self.db.get_multiple_dependencies(["libz.so.1"], 'i386'), | ||
616 | 31 | ({'libz.so.1': 'zlib1g'}, set())) | ||
617 | 32 | |||
618 | 33 | @patch("urllib.urlretrieve") | ||
619 | 34 | def test_lazy_downloading(self, mock_urlretrieve): | ||
620 | 35 | """ test that lazy downloading works """ | ||
621 | 36 | def _put_fixture_contents_file_in_place(url, target): | ||
622 | 37 | with gzip.open(target, "w") as f: | ||
623 | 38 | f.write(""" | ||
624 | 39 | Some header text that is ignored | ||
625 | 40 | FILE LOCATION | ||
626 | 41 | usr/lib/libfoo.so.2 pkgfoo,pkgbar | ||
627 | 42 | """) | ||
628 | 43 | tempdir = self.useFixture(TempDir()) | ||
629 | 44 | db = AptFilePackageDatabase(tempdir.path) | ||
630 | 45 | mock_urlretrieve.side_effect = _put_fixture_contents_file_in_place | ||
631 | 46 | self.assertEqual( | ||
632 | 47 | db.get_multiple_dependencies(["libfoo.so.2"], arch="i386"), | ||
633 | 48 | ({'libfoo.so.2': set(["pkgfoo", "pkgbar"])}, set())) | ||
634 | 49 | self.assertEqual(len(db._get_lib_to_pkgs_mapping("oneiric", "i386")), 1) | ||
635 | 50 | |||
636 | 51 | def test_close(self): | ||
637 | 52 | # Test that there is a close method we can call | ||
638 | 53 | self.db.close() | ||
639 | 54 | 0 | ||
640 | === modified file 'devportalbinary/tests/test_database.py' | |||
641 | --- devportalbinary/tests/test_database.py 2012-11-13 11:22:16 +0000 | |||
642 | +++ devportalbinary/tests/test_database.py 2012-11-13 14:53:23 +0000 | |||
643 | @@ -1,124 +1,12 @@ | |||
644 | 1 | import os | ||
645 | 2 | 1 | ||
646 | 3 | from fixtures import TempDir | ||
647 | 4 | from testresources import ResourcedTestCase | ||
648 | 5 | from testtools import TestCase | 2 | from testtools import TestCase |
649 | 6 | from testtools.matchers import ( | ||
650 | 7 | Equals, | ||
651 | 8 | Matcher, | ||
652 | 9 | ) | ||
653 | 10 | 3 | ||
667 | 11 | from devportalbinary.database import ( | 4 | from devportalbinary.database import LibdepServiceClient |
668 | 12 | AptFilePackageDatabase, | 5 | from devportalbinary.testing import get_libdep_service_client |
656 | 13 | get_dependency_database, | ||
657 | 14 | LibdepServiceClient, | ||
658 | 15 | load_configuration, | ||
659 | 16 | PackageDatabase, | ||
660 | 17 | ) | ||
661 | 18 | from devportalbinary.testing import ( | ||
662 | 19 | ConfigFileFixture, | ||
663 | 20 | ConfigSettings, | ||
664 | 21 | get_libdep_service_client, | ||
665 | 22 | postgres_db_resource, | ||
666 | 23 | ) | ||
669 | 24 | 6 | ||
670 | 25 | from libdep_service_client.client import Client | 7 | from libdep_service_client.client import Client |
671 | 26 | 8 | ||
672 | 27 | 9 | ||
673 | 28 | class ResultsIn(Matcher): | ||
674 | 29 | |||
675 | 30 | def __init__(self, db, rows): | ||
676 | 31 | self._db = db | ||
677 | 32 | self._rows = rows | ||
678 | 33 | |||
679 | 34 | def match(self, query): | ||
680 | 35 | # XXX: Abstraction violation | ||
681 | 36 | results = self._db._store.execute(query) | ||
682 | 37 | return Equals(self._rows).match(list(results)) | ||
683 | 38 | |||
684 | 39 | |||
685 | 40 | class TestDatabase(TestCase, ResourcedTestCase): | ||
686 | 41 | |||
687 | 42 | resources = [ | ||
688 | 43 | ('db_fixture', postgres_db_resource), | ||
689 | 44 | ] | ||
690 | 45 | |||
691 | 46 | def get_package_db(self): | ||
692 | 47 | db = PackageDatabase(self.db_fixture.conn) | ||
693 | 48 | self.addCleanup(db.close) | ||
694 | 49 | return db | ||
695 | 50 | |||
696 | 51 | def test_unknown_library(self): | ||
697 | 52 | db = self.get_package_db() | ||
698 | 53 | deps = db.get_multiple_dependencies(['libfoo.so.0'], 'i386') | ||
699 | 54 | self.assertEqual(deps, {}) | ||
700 | 55 | |||
701 | 56 | def test_close(self): | ||
702 | 57 | # Test that we can close the package db. | ||
703 | 58 | db = PackageDatabase(self.db_fixture.conn) | ||
704 | 59 | db.close() | ||
705 | 60 | |||
706 | 61 | def test_close_twice(self): | ||
707 | 62 | # Test that we can close the package db twice with no exception. | ||
708 | 63 | db = PackageDatabase(self.db_fixture.conn) | ||
709 | 64 | db.close() | ||
710 | 65 | db.close() | ||
711 | 66 | |||
712 | 67 | |||
713 | 68 | class TestDatabaseConfiguration(TestCase): | ||
714 | 69 | |||
715 | 70 | def use_database_config(self, **db_settings): | ||
716 | 71 | return self.useFixture(ConfigSettings(('database', db_settings))) | ||
717 | 72 | |||
718 | 73 | def test_get_db_info_from_config_sqlite(self): | ||
719 | 74 | other_tempdir = self.useFixture(TempDir()) | ||
720 | 75 | expected_db_path = os.path.join(other_tempdir.path, 'db') | ||
721 | 76 | self.use_database_config(db_type='sqlite', path=expected_db_path) | ||
722 | 77 | options = load_configuration() | ||
723 | 78 | self.assertRaises(ValueError, PackageDatabase.get_db_info_from_config, | ||
724 | 79 | options) | ||
725 | 80 | |||
726 | 81 | def test_default_create_no_config(self): | ||
727 | 82 | nonexistent = self.getUniqueString() | ||
728 | 83 | self.useFixture(ConfigFileFixture(nonexistent)) | ||
729 | 84 | self.assertIsInstance( | ||
730 | 85 | get_dependency_database(), AptFilePackageDatabase) | ||
731 | 86 | |||
732 | 87 | def test_default_create_empty_config(self): | ||
733 | 88 | self.useFixture(ConfigSettings()) | ||
734 | 89 | self.assertIsInstance( | ||
735 | 90 | get_dependency_database(), AptFilePackageDatabase) | ||
736 | 91 | |||
737 | 92 | def test_remote_service(self): | ||
738 | 93 | base_url = 'http://example.com/libdep-service/' | ||
739 | 94 | self.use_database_config(db_type='libdep-service', base_url=base_url) | ||
740 | 95 | db = get_dependency_database() | ||
741 | 96 | self.assertIsInstance(db, LibdepServiceClient) | ||
742 | 97 | self.assertEqual(base_url, db._client.base_url) | ||
743 | 98 | |||
744 | 99 | def test_get_db_info_from_config_postgres(self): | ||
745 | 100 | expected_username = self.getUniqueString() | ||
746 | 101 | expected_password = self.getUniqueString() | ||
747 | 102 | expected_host = self.getUniqueString() | ||
748 | 103 | expected_port = self.getUniqueInteger() | ||
749 | 104 | expected_db_name = self.getUniqueString() | ||
750 | 105 | |||
751 | 106 | self.use_database_config( | ||
752 | 107 | db_type='postgres', | ||
753 | 108 | username=expected_username, | ||
754 | 109 | password=expected_password, | ||
755 | 110 | host=expected_host, | ||
756 | 111 | port=expected_port, | ||
757 | 112 | db_name=expected_db_name) | ||
758 | 113 | options = load_configuration() | ||
759 | 114 | uri = PackageDatabase.get_db_info_from_config(options) | ||
760 | 115 | self.assertEqual(expected_db_name, uri.database) | ||
761 | 116 | self.assertEqual(expected_port, uri.port) | ||
762 | 117 | self.assertEqual(expected_host, uri.host) | ||
763 | 118 | self.assertEqual(expected_password, uri.password) | ||
764 | 119 | self.assertEqual(expected_username, uri.username) | ||
765 | 120 | |||
766 | 121 | |||
767 | 122 | class TestLibdepServiceClient(TestCase): | 10 | class TestLibdepServiceClient(TestCase): |
768 | 123 | 11 | ||
769 | 124 | TEST_DATA = [('libfoo', {'i386': {'libfoo': 'libfoo-bin'}})] | 12 | TEST_DATA = [('libfoo', {'i386': {'libfoo': 'libfoo-bin'}})] |
770 | 125 | 13 | ||
771 | === modified file 'setup.py' | |||
772 | --- setup.py 2012-11-13 09:14:39 +0000 | |||
773 | +++ setup.py 2012-11-13 14:53:23 +0000 | |||
774 | @@ -18,6 +18,8 @@ | |||
775 | 18 | ) | 18 | ) |
776 | 19 | from setuptools import setup, find_packages | 19 | from setuptools import setup, find_packages |
777 | 20 | 20 | ||
778 | 21 | # XXX: Need to do a trawl to see if we are depending on things that we are no | ||
779 | 22 | # longer using. | ||
780 | 21 | 23 | ||
781 | 22 | __version__ = get_version('devportalbinary/__init__.py') | 24 | __version__ = get_version('devportalbinary/__init__.py') |
782 | 23 | 25 | ||
783 | @@ -36,7 +38,6 @@ | |||
784 | 36 | install_requires = [ | 38 | install_requires = [ |
785 | 37 | 'bzr', | 39 | 'bzr', |
786 | 38 | 'configglue', | 40 | 'configglue', |
787 | 39 | 'launchpadlib', | ||
788 | 40 | 'libdep-service-python>=0.0.5', | 41 | 'libdep-service-python>=0.0.5', |
789 | 41 | 'PIL', | 42 | 'PIL', |
790 | 42 | 'pkgme>=0.4.1', | 43 | 'pkgme>=0.4.1', |
791 | @@ -45,7 +46,6 @@ | |||
792 | 45 | ], | 46 | ], |
793 | 46 | entry_points = { | 47 | entry_points = { |
794 | 47 | 'console_scripts': [ | 48 | 'console_scripts': [ |
795 | 48 | 'dump-apt-file-db=devportalbinary.aptfile:dump_apt_file_db', | ||
796 | 49 | 'guess-executable=devportalbinary.binary:print_executable', | 49 | 'guess-executable=devportalbinary.binary:print_executable', |
797 | 50 | 'guess-deps=devportalbinary.binary:print_dependencies', | 50 | 'guess-deps=devportalbinary.binary:print_dependencies', |
798 | 51 | ], | 51 | ], |
Rubberstamp! Proposer approves of own proposal.