Merge lp:~abentley/charmworld/config-storage into lp:~juju-jitsu/charmworld/trunk
- config-storage
- Merge into trunk
Proposed by
Aaron Bentley
Status: | Superseded |
---|---|
Proposed branch: | lp:~abentley/charmworld/config-storage |
Merge into: | lp:~juju-jitsu/charmworld/trunk |
Diff against target: |
1499 lines (+786/-153) 21 files modified
charmworld/jobs/ingest.py (+13/-6) charmworld/jobs/tests/test_cstat.py (+2/-1) charmworld/jobs/tests/test_scan.py (+34/-2) charmworld/models.py (+54/-11) charmworld/routes.py (+2/-1) charmworld/search.py (+49/-15) charmworld/testing/__init__.py (+9/-4) charmworld/testing/data/sample_charm/config.yaml (+0/-1) charmworld/testing/factory.py (+3/-1) charmworld/tests/test_models.py (+67/-9) charmworld/tests/test_search.py (+1/-1) charmworld/views/api.py (+1/-1) charmworld/views/charms.py (+3/-0) charmworld/views/tests/test_charms.py (+63/-7) migrations/migrate.py (+169/-49) migrations/test_migrate.py (+225/-15) migrations/versions/005_no_op.py (+0/-5) migrations/versions/006_no_op.py (+0/-5) migrations/versions/007_no_op.py (+0/-6) migrations/versions/010_remap_options.py (+17/-0) migrations/versions/tests/test_migrations.py (+74/-13) |
To merge this branch: | bzr merge lp:~abentley/charmworld/config-storage |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Curtis Hovey (community) | code | Approve | |
Review via email: mp+171801@code.launchpad.net |
This proposal has been superseded by a proposal from 2013-07-05.
Commit message
Description of the change
Change the internal representation of config options, so that the user-supplied option names are used as values, not as keys.
To post a comment you must log in.
- 296. By Aaron Bentley
-
Merged trunk into config-storage.
- 297. By Aaron Bentley
-
Merged slow-migrations into config-storage.
- 298. By Aaron Bentley
-
Fix test failures by adding error translation to delete_charm.
- 299. By Aaron Bentley
-
Merged slow-migrations into config-storage.
- 300. By Aaron Bentley
-
Fix failing test.
- 301. By Aaron Bentley
-
Convert options migration to an exodus.
- 302. By Aaron Bentley
-
Merged slow-migrations into config-storage.
- 303. By Aaron Bentley
-
Remove unused code.
- 304. By Aaron Bentley
-
Merged slow-migrations-2 into config-storage.
- 305. By Aaron Bentley
-
Merged slow-migrations-2 into config-storage.
Unmerged revisions
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'charmworld/jobs/ingest.py' |
2 | --- charmworld/jobs/ingest.py 2013-07-01 14:45:02 +0000 |
3 | +++ charmworld/jobs/ingest.py 2013-07-05 14:15:30 +0000 |
4 | @@ -32,11 +32,14 @@ |
5 | get_branch_info, |
6 | parse_date, |
7 | ) |
8 | -from charmworld.models import getconnection |
9 | -from charmworld.models import getdb |
10 | -from charmworld.models import getfs |
11 | -from charmworld.models import CharmFileSet |
12 | -from charmworld.models import CharmSource |
13 | +from charmworld.models import ( |
14 | + CharmFileSet, |
15 | + CharmSource, |
16 | + getconnection, |
17 | + getdb, |
18 | + getfs, |
19 | + options_to_storage, |
20 | +) |
21 | from charmworld.search import ( |
22 | ElasticSearchClient, |
23 | SearchServiceNotAvailable, |
24 | @@ -491,7 +494,11 @@ |
25 | config_raw = cfile.read() |
26 | |
27 | try: |
28 | - config = quote_yaml(yaml.load(config_raw)) |
29 | + config_yaml = yaml.load(config_raw) |
30 | + if 'options' in config_yaml: |
31 | + config_yaml['options'] = options_to_storage( |
32 | + config_yaml['options']) |
33 | + config = quote_yaml(config_yaml) |
34 | except Exception, exc: |
35 | raise IngestError( |
36 | 'Invalid charm config yaml. %s: %s' % ( |
37 | |
38 | === modified file 'charmworld/jobs/tests/test_cstat.py' |
39 | --- charmworld/jobs/tests/test_cstat.py 2013-07-01 15:57:20 +0000 |
40 | +++ charmworld/jobs/tests/test_cstat.py 2013-07-05 14:15:30 +0000 |
41 | @@ -33,7 +33,8 @@ |
42 | def send(request): |
43 | response = Response() |
44 | if ('start' in request.url): |
45 | - response._content = '[["2010-12-24", 4], ["2010-12-25", 1]]' |
46 | + response._content = ('[["2010-12-24", 4],' |
47 | + ' ["2010-12-25", 1]]') |
48 | else: |
49 | response._content = '[[25]]' |
50 | return response |
51 | |
52 | === modified file 'charmworld/jobs/tests/test_scan.py' |
53 | --- charmworld/jobs/tests/test_scan.py 2013-06-24 15:45:29 +0000 |
54 | +++ charmworld/jobs/tests/test_scan.py 2013-07-05 14:15:30 +0000 |
55 | @@ -34,8 +34,40 @@ |
56 | # 'foo.bar' is replaced with 'foo%2Ebar'. |
57 | self.assertFalse('dotted.name' in self.charm) |
58 | self.assertEqual('foo', self.charm['dotted%2Ename']) |
59 | - self.assertFalse('foo.bar' in self.charm['config']['options']) |
60 | - self.assertEqual('baz', self.charm['config']['options']['foo%2Ebar']) |
61 | + |
62 | + def test_options_format(self): |
63 | + del self.charm['config'] |
64 | + scan_charm(self.charm, self.db, self.fs, self.log) |
65 | + expected = [ |
66 | + { |
67 | + 'name': 'source', |
68 | + 'default': 'lp:charmworld', |
69 | + 'type': 'string', |
70 | + 'description': 'The bzr branch to pull the charmworld source' |
71 | + ' from.' |
72 | + }, |
73 | + { |
74 | + 'name': 'revno', |
75 | + 'default': -1, |
76 | + 'type': 'int', |
77 | + 'description': 'The revno of the bzr branch to use. -1 for' |
78 | + ' current tip.' |
79 | + }, |
80 | + { |
81 | + 'name': 'execute-ingest-every', |
82 | + 'default': 15, |
83 | + 'type': 'int', |
84 | + 'description': 'The amount of time (in minutes) that the' |
85 | + ' ingest tasks for charmworld are executed' |
86 | + }, |
87 | + { |
88 | + 'name': 'error-email', |
89 | + 'default': '', |
90 | + 'type': 'string', |
91 | + 'description': 'Address to mail errors to.' |
92 | + } |
93 | + ] |
94 | + self.assertItemsEqual(expected, self.charm['config']['options']) |
95 | |
96 | def test_short_interface_specification(self): |
97 | # Charms can specify the interfaces they require or provide |
98 | |
99 | === modified file 'charmworld/models.py' |
100 | --- charmworld/models.py 2013-07-01 14:45:02 +0000 |
101 | +++ charmworld/models.py 2013-07-05 14:15:30 +0000 |
102 | @@ -544,7 +544,11 @@ |
103 | |
104 | The dict is parsed from the config_raw property's YAML. |
105 | """ |
106 | - return self._representation['config'] |
107 | + config = self._representation['config'] |
108 | + if config is not None and 'options' in config: |
109 | + config = dict(config) |
110 | + config['options'] = storage_to_options(config['options']) |
111 | + return config |
112 | |
113 | @property |
114 | def config_raw(self): |
115 | @@ -557,7 +561,7 @@ |
116 | @property |
117 | def options(self): |
118 | """The charm's configuration options.""" |
119 | - config = self._representation['config'] |
120 | + config = self.config |
121 | if config is not None and 'options' in config: |
122 | return config['options'] |
123 | else: |
124 | @@ -969,8 +973,10 @@ |
125 | |
126 | class CharmSource: |
127 | |
128 | - def __init__(self, db, index_client): |
129 | - self.db = db |
130 | + def __init__(self, db, index_client, collection=None): |
131 | + if collection is None: |
132 | + collection = db.charms |
133 | + self.collection = collection |
134 | self.index_client = index_client |
135 | |
136 | @classmethod |
137 | @@ -984,16 +990,20 @@ |
138 | return cls(request.db, request.index_client) |
139 | |
140 | def sync_index(self): |
141 | - for charm in self.db.charms.find(): |
142 | + for charm in self.collection.find(): |
143 | yield charm['_id'] |
144 | self.index_client.index_charm(charm) |
145 | |
146 | def save(self, charm): |
147 | - self.db.charms.save(charm) |
148 | - self.index_client.index_charm(charm) |
149 | + self.collection.save(charm) |
150 | + try: |
151 | + self.index_client.index_charm(charm) |
152 | + except: |
153 | + self.index_client.delete_charm(charm['_id']) |
154 | + raise |
155 | |
156 | def get_store_charms(self): |
157 | - return self.db.charms.find({'store_url': {'$exists': True}}) |
158 | + return self.collection.find({'store_url': {'$exists': True}}) |
159 | |
160 | @staticmethod |
161 | def exclude_obsolete(query): |
162 | @@ -1002,7 +1012,7 @@ |
163 | def find_mia_charms(self): |
164 | query = self.exclude_obsolete( |
165 | {"store_data.errors": {"$exists": True}}) |
166 | - return (Charm(data) for data in self.db.charms.find(query)) |
167 | + return (Charm(data) for data in self.collection.find(query)) |
168 | |
169 | def find_proof_error_charms(self, sub_only): |
170 | query = {"proof.e": {"$exists": True}, "promulgated": True} |
171 | @@ -1010,13 +1020,16 @@ |
172 | query['subordinate'] = True |
173 | query = self.exclude_obsolete(query) |
174 | sort = [("series", pymongo.DESCENDING), ("name", pymongo.ASCENDING)] |
175 | - return (Charm(data) for data in self.db.charms.find(query, sort=sort)) |
176 | + return (Charm(data) for data in self.collection.find(query, sort=sort)) |
177 | |
178 | def _get_all(self, charm_id): |
179 | """"Return all stored values for a given charm.""" |
180 | - yield self.db.charms.find_one({'_id': charm_id}) |
181 | + yield self.collection.find_one({'_id': charm_id}) |
182 | yield self.index_client.get(charm_id) |
183 | |
184 | + def create_collection(self): |
185 | + self.collection.database.create_collection(self.collection.name) |
186 | + |
187 | |
188 | def acquire_session_secret(database): |
189 | """Return a secret to use for AuthTkt sessions. |
190 | @@ -1073,3 +1086,33 @@ |
191 | logger.exception('Unable to index charm.') |
192 | else: |
193 | logger.info('Syncing %s to Elasticsearch' % charm_id) |
194 | + |
195 | + |
196 | +def options_to_storage(options): |
197 | + """Convert options, as retrieved from config.yaml, into a list. |
198 | + |
199 | + This format avoids user-defined keys, which are problematic in |
200 | + elasticsearch. |
201 | + """ |
202 | + storage = [] |
203 | + for key, value in options.items(): |
204 | + option = {'name': key} |
205 | + option.update(value) |
206 | + storage.append(option) |
207 | + return storage |
208 | + |
209 | + |
210 | +def storage_to_options(storage): |
211 | + """Convert options, as retrieved from mongodb, into a dict. |
212 | + |
213 | + This format has the same structure as config.yaml. |
214 | + """ |
215 | + options = {} |
216 | + for option in storage: |
217 | + new_option = dict(option) |
218 | + del new_option['name'] |
219 | + value = options.setdefault(option['name'], new_option) |
220 | + if value is not new_option: |
221 | + raise ValueError('Option name "%s" occurs multiple times.' % |
222 | + option['name']) |
223 | + return options |
224 | |
225 | === modified file 'charmworld/routes.py' |
226 | --- charmworld/routes.py 2013-07-02 12:28:28 +0000 |
227 | +++ charmworld/routes.py 2013-07-05 14:15:30 +0000 |
228 | @@ -89,7 +89,8 @@ |
229 | # up and running. They may need to be consulted when this is changed to a |
230 | # later API version. |
231 | config.add_route('api_latest_single', '/api/latest/{endpoint}') |
232 | - config.add_view('charmworld.views.api.API2', route_name='api_latest_single') |
233 | + config.add_view('charmworld.views.api.API2', |
234 | + route_name='api_latest_single') |
235 | config.add_route('api_latest', '/api/latest/{endpoint}/*remainder') |
236 | config.add_view('charmworld.views.api.API2', route_name='api_latest') |
237 | |
238 | |
239 | === modified file 'charmworld/search.py' |
240 | --- charmworld/search.py 2013-07-01 14:45:02 +0000 |
241 | +++ charmworld/search.py 2013-07-05 14:15:30 +0000 |
242 | @@ -30,7 +30,7 @@ |
243 | 'name': 10, |
244 | 'summary': 5, |
245 | 'description': 3, |
246 | - 'config.options.*.description': None, |
247 | + 'config.options.description': None, |
248 | 'relations': None, |
249 | } |
250 | |
251 | @@ -100,6 +100,14 @@ |
252 | self._client = client |
253 | self.index_name = index_name |
254 | |
255 | + def exists(self): |
256 | + try: |
257 | + self.get_status() |
258 | + except IndexMissing: |
259 | + return False |
260 | + else: |
261 | + return True |
262 | + |
263 | def create_index(self): |
264 | self._client.create_index(self.index_name) |
265 | |
266 | @@ -146,13 +154,24 @@ |
267 | """ |
268 | exact_index = ['categories', 'name', 'owner', 'i_provides', |
269 | 'i_requires', 'series', 'store_url'] |
270 | + charm_properties = dict( |
271 | + (name, {'type': 'string', 'index': 'not_analyzed'}) |
272 | + for name in exact_index) |
273 | + charm_properties['config'] = { |
274 | + 'properties': { |
275 | + 'options': { |
276 | + 'properties': { |
277 | + 'default': {'type': 'string'}, |
278 | + 'description': {'type': 'string'}, |
279 | + } |
280 | + } |
281 | + } |
282 | + } |
283 | + |
284 | with translate_error(): |
285 | self._client.put_mapping(self.index_name, 'charm', { |
286 | 'charm': { |
287 | - 'properties': dict( |
288 | - (name, {'type': 'string', 'index': 'not_analyzed'}) |
289 | - for name in exact_index |
290 | - ) |
291 | + 'properties': charm_properties, |
292 | } |
293 | }) |
294 | |
295 | @@ -166,6 +185,10 @@ |
296 | self._client.index(self.index_name, 'charm', charm, charm_id, |
297 | refresh=True) |
298 | |
299 | + def delete_charm(self, charm_id): |
300 | + with translate_error(): |
301 | + self._client.delete(self.index_name, 'charm', charm_id) |
302 | + |
303 | def index_charms(self, charms): |
304 | if len(charms) == 0: |
305 | return |
306 | @@ -311,6 +334,10 @@ |
307 | hits = self._unlimited_search(dsl, limit) |
308 | return [hit['_source'] for hit in hits['hits']['hits']] |
309 | |
310 | + def get_status(self): |
311 | + with translate_error(): |
312 | + return self._client.status(index=self.index_name)['indices'] |
313 | + |
314 | def search(self, terms, valid_charms_only=True): |
315 | # Avoid circular imports. |
316 | from charmworld.models import Charm |
317 | @@ -323,8 +350,7 @@ |
318 | dsl = self._get_filtered(dsl, {}, None, valid_charms_only) |
319 | |
320 | with Timer() as timer: |
321 | - with translate_error(): |
322 | - status = self._client.status(index=self.index_name)['indices'] |
323 | + status = self.get_status() |
324 | for real_index in self.get_aliased(): |
325 | break |
326 | else: |
327 | @@ -395,6 +421,9 @@ |
328 | result_r.setdefault(interface, []).append(payload) |
329 | return result_r, result_p, |
330 | |
331 | + def get_related_client(self, name): |
332 | + return self.__class__(self._client, name) |
333 | + |
334 | def get_aliasable_client(self, name=None): |
335 | """Return a client that can be aliased to this one. |
336 | |
337 | @@ -403,7 +432,18 @@ |
338 | """ |
339 | if name is None: |
340 | name = '%s-%d' % (self.index_name, random.randint(1, 99999)) |
341 | - return self.__class__(self._client, name) |
342 | + return self.get_related_client(name) |
343 | + |
344 | + @contextmanager |
345 | + def replacement(self, name=None): |
346 | + copy = self.get_aliasable_client(name) |
347 | + copy.create_index() |
348 | + try: |
349 | + copy.put_mapping() |
350 | + yield copy |
351 | + except: |
352 | + copy.delete_index() |
353 | + raise |
354 | |
355 | def create_replacement(self, name=None, charms=None): |
356 | """Create a replacement for this index. |
357 | @@ -417,10 +457,7 @@ |
358 | index will be used. |
359 | :return: The ElasticSearchClient for the supplied mapping. |
360 | """ |
361 | - copy = self.get_aliasable_client(name) |
362 | - copy.create_index() |
363 | - try: |
364 | - copy.put_mapping() |
365 | + with self.replacement(name) as copy: |
366 | if charms is None: |
367 | try: |
368 | charms = self.api_search(valid_charms_only=False) |
369 | @@ -428,9 +465,6 @@ |
370 | charms = [] |
371 | copy.index_charms(charms) |
372 | return copy |
373 | - except: |
374 | - copy.delete_index() |
375 | - raise |
376 | |
377 | |
378 | def reindex(index_client, charms=None): |
379 | |
380 | === modified file 'charmworld/testing/__init__.py' |
381 | --- charmworld/testing/__init__.py 2013-06-24 15:08:43 +0000 |
382 | +++ charmworld/testing/__init__.py 2013-07-05 14:15:30 +0000 |
383 | @@ -60,9 +60,9 @@ |
384 | self.addCleanup(context.__exit__, None, None, None) |
385 | return result |
386 | |
387 | - def use_index_client(self, put_mapping=True): |
388 | + def use_index_client(self, put_mapping=True, aliased=False): |
389 | self.index_client = self.use_context( |
390 | - temp_index_client(put_mapping=put_mapping)) |
391 | + temp_index_client(put_mapping=put_mapping, aliased=aliased)) |
392 | return self.index_client |
393 | |
394 | @contextmanager |
395 | @@ -223,13 +223,18 @@ |
396 | |
397 | |
398 | @contextmanager |
399 | -def temp_index_client(name='temp_index', put_mapping=True): |
400 | +def temp_index_client(name='temp_index', put_mapping=True, aliased=False): |
401 | client = ElasticSearchClient.from_settings(INI, name) |
402 | try: |
403 | client.delete_index() |
404 | except IndexMissing: |
405 | pass |
406 | - client.create_index() |
407 | + if aliased: |
408 | + alias_client = client.get_aliasable_client() |
409 | + alias_client.create_index() |
410 | + client.update_aliased(alias_client.index_name, []) |
411 | + else: |
412 | + client.create_index() |
413 | try: |
414 | if put_mapping: |
415 | client.put_mapping() |
416 | |
417 | === modified file 'charmworld/testing/data/sample_charm/config.yaml' |
418 | --- charmworld/testing/data/sample_charm/config.yaml 2013-02-07 17:26:59 +0000 |
419 | +++ charmworld/testing/data/sample_charm/config.yaml 2013-07-05 14:15:30 +0000 |
420 | @@ -15,4 +15,3 @@ |
421 | type: string |
422 | default: "" |
423 | description: "Address to mail errors to." |
424 | - foo.bar: baz |
425 | |
426 | === modified file 'charmworld/testing/factory.py' |
427 | --- charmworld/testing/factory.py 2013-07-01 14:45:02 +0000 |
428 | +++ charmworld/testing/factory.py 2013-07-05 14:15:30 +0000 |
429 | @@ -24,8 +24,9 @@ |
430 | process_charm, |
431 | ) |
432 | from charmworld.models import ( |
433 | + CharmFileSet, |
434 | getfs, |
435 | - CharmFileSet, |
436 | + options_to_storage, |
437 | ) |
438 | |
439 | |
440 | @@ -185,6 +186,7 @@ |
441 | 'description': 'The interval between script runs.' |
442 | }, |
443 | } |
444 | + options = options_to_storage(options) |
445 | if files is None: |
446 | files = { |
447 | 'readme': { |
448 | |
449 | === modified file 'charmworld/tests/test_models.py' |
450 | --- charmworld/tests/test_models.py 2013-07-01 14:45:02 +0000 |
451 | +++ charmworld/tests/test_models.py 2013-07-05 14:15:30 +0000 |
452 | @@ -14,6 +14,10 @@ |
453 | from os.path import join |
454 | |
455 | from mock import patch |
456 | +from pyelasticsearch.exceptions import ( |
457 | + ElasticHttpError, |
458 | + ElasticHttpNotFoundError, |
459 | +) |
460 | |
461 | from charmworld.models import ( |
462 | acquire_session_secret, |
463 | @@ -22,7 +26,9 @@ |
464 | CharmFileSet, |
465 | CharmSource, |
466 | find_charms, |
467 | + options_to_storage, |
468 | QAData, |
469 | + storage_to_options, |
470 | sync_index, |
471 | UserMgr, |
472 | ) |
473 | @@ -437,11 +443,10 @@ |
474 | |
475 | def test_config(self): |
476 | # The config property is a dict of charm options. |
477 | - config = {'options': {'mode': 'fast'}} |
478 | - charm_data = factory.get_charm_json() |
479 | - charm_data['config'] = config |
480 | + config = {'options': {'key': {'default': 'value', 'type': 'string'}}} |
481 | + charm_data = factory.get_charm_json(options=config['options']) |
482 | charm = Charm(charm_data) |
483 | - self.assertIs(config, charm.config) |
484 | + self.assertEqual(config, charm.config) |
485 | # The default is a dict with a single key named 'options' set to |
486 | # an empty dict. |
487 | charm = Charm({}) |
488 | @@ -460,12 +465,10 @@ |
489 | |
490 | def test_options(self): |
491 | # The options property is a dict from the charm's config. |
492 | - options = {'mode': 'fast'} |
493 | - config = {'options': options} |
494 | - charm_data = factory.get_charm_json() |
495 | - charm_data['config'] = config |
496 | + options = {'key': {'default': 'value', 'type': 'string'}} |
497 | + charm_data = factory.get_charm_json(options=options) |
498 | charm = Charm(charm_data) |
499 | - self.assertIs(options, charm.options) |
500 | + self.assertEqual(options, charm.options) |
501 | # The default is a dict. |
502 | charm = Charm({}) |
503 | self.assertEqual({}, charm.options) |
504 | @@ -1040,3 +1043,58 @@ |
505 | messages = [r.getMessage() for r in handler.buffer] |
506 | self.assertIn('Unable to index charm.', messages) |
507 | self.assertEqual(1, len(self.index_client.api_search())) |
508 | + |
509 | + def test_save_deletes_on_error(self): |
510 | + self.use_index_client() |
511 | + source = CharmSource.from_request(self) |
512 | + source.save({'_id': 'a', 'b': {}}) |
513 | + with self.assertRaises(ElasticHttpError): |
514 | + source.save({'_id': 'a', 'b': 'c'}) |
515 | + with self.assertRaises(ElasticHttpNotFoundError): |
516 | + self.index_client.get('a') |
517 | + |
518 | + |
519 | +class TestOptionsStorage(TestCase): |
520 | + |
521 | + yaml = { |
522 | + 'foo': { |
523 | + 'default': 'bar', |
524 | + 'type': 'string', |
525 | + 'description': 'Hello.' |
526 | + }, |
527 | + 'baz': { |
528 | + 'default': 42, |
529 | + 'type': 'int', |
530 | + 'description': 'Hello.' |
531 | + }, |
532 | + } |
533 | + storage = [{ |
534 | + 'name': 'baz', |
535 | + 'default': 42, |
536 | + 'type': 'int', |
537 | + 'description': 'Hello.' |
538 | + }, { |
539 | + 'name': 'foo', |
540 | + 'default': 'bar', |
541 | + 'type': 'string', |
542 | + 'description': 'Hello.' |
543 | + }] |
544 | + |
545 | + def test_options_to_storage(self): |
546 | + self.assertItemsEqual(self.storage, options_to_storage(self.yaml)) |
547 | + |
548 | + def test_storage_to_options(self): |
549 | + self.assertEqual(self.yaml, storage_to_options(self.storage)) |
550 | + |
551 | + def test_storage_to_options_duplicate_key(self): |
552 | + with self.assertRaises(ValueError) as exc_context: |
553 | + storage_to_options([{'name': 'same'}, {'name': 'same'}]) |
554 | + self.assertEqual('Option name "same" occurs multiple times.', |
555 | + str(exc_context.exception)) |
556 | + |
557 | + def test_roundtrip(self): |
558 | + self.assertEqual( |
559 | + self.yaml, storage_to_options(options_to_storage(self.yaml))) |
560 | + self.assertItemsEqual( |
561 | + self.storage, |
562 | + options_to_storage(storage_to_options(self.storage))) |
563 | |
564 | === modified file 'charmworld/tests/test_search.py' |
565 | --- charmworld/tests/test_search.py 2013-07-01 14:45:02 +0000 |
566 | +++ charmworld/tests/test_search.py 2013-07-05 14:15:30 +0000 |
567 | @@ -173,7 +173,7 @@ |
568 | elif key in ('provides', 'requires'): |
569 | query = query.values()[0].values()[0] |
570 | elif key == 'config': |
571 | - query = query.values()[0].values()[0]['description'] |
572 | + query = query.values()[0][0]['description'] |
573 | elif key == 'store_data': |
574 | query = query['digest'] |
575 | elif key == 'files': |
576 | |
577 | === modified file 'charmworld/views/api.py' |
578 | --- charmworld/views/api.py 2013-07-02 12:28:28 +0000 |
579 | +++ charmworld/views/api.py 2013-07-05 14:15:30 +0000 |
580 | @@ -60,7 +60,7 @@ |
581 | message = 'This API version is no longer supported.' |
582 | return Response( |
583 | message, |
584 | - headerlist = [ |
585 | + headerlist=[ |
586 | ('Access-Control-Allow-Origin', '*'), |
587 | ('Access-Control-Allow-Headers', 'X-Requested-With'), |
588 | ('Content-Type', 'text/plain'), |
589 | |
590 | === modified file 'charmworld/views/charms.py' |
591 | --- charmworld/views/charms.py 2013-07-03 21:33:20 +0000 |
592 | +++ charmworld/views/charms.py 2013-07-05 14:15:30 +0000 |
593 | @@ -165,7 +165,10 @@ |
594 | |
595 | |
596 | def _json_charm(charm): |
597 | + charm_config = charm.config |
598 | charm = dict(charm._representation) |
599 | + # Reformat config into old internal representation. |
600 | + charm['config'] = charm_config |
601 | charm_keys = set(charm.keys()) |
602 | for k in SANITIZE.intersection(charm_keys): |
603 | del charm[k] |
604 | |
605 | === modified file 'charmworld/views/tests/test_charms.py' |
606 | --- charmworld/views/tests/test_charms.py 2013-07-03 21:33:20 +0000 |
607 | +++ charmworld/views/tests/test_charms.py 2013-07-05 14:15:30 +0000 |
608 | @@ -1,6 +1,7 @@ |
609 | # Copyright 2012, 2013 Canonical Ltd. This software is licensed under the |
610 | # GNU Affero General Public License version 3 (see the file LICENSE). |
611 | |
612 | +from datetime import date |
613 | from logging import getLogger |
614 | |
615 | from charmworld.jobs.ingest import ( |
616 | @@ -16,7 +17,6 @@ |
617 | from charmworld.testing import ViewTestBase |
618 | from charmworld.testing import WebTestBase |
619 | from charmworld.utils import quote_key |
620 | -from charmworld.views import SANITIZE |
621 | from charmworld.views.api import API2 |
622 | from charmworld.views.charms import ( |
623 | _charm_view, |
624 | @@ -35,7 +35,65 @@ |
625 | quality_edit, |
626 | series_collection, |
627 | ) |
628 | -import json |
629 | + |
630 | + |
631 | +DISTRO_CHARM_DATA = { |
632 | + 'name': 'foo', |
633 | + 'series': 'precise', |
634 | + 'owner': 'bar', |
635 | + 'maintainer': 'John Doe <jdoe@example.com>', |
636 | + 'summary': 'Remote terminal classroom over https', |
637 | + 'description': 'foobar', |
638 | + 'store_url': 'cs:precise/foo-1', |
639 | + 'bzr_branch': '', |
640 | + 'promulgated': True, |
641 | + 'branch_deleted': False, |
642 | + 'categories': [], |
643 | + 'is_featured': False, |
644 | + 'subordinate': False, |
645 | + 'requires': {'database': {'interface': 'mongodb'}}, |
646 | + 'provides': {'website': {'interface': 'https'}}, |
647 | + 'config': { |
648 | + 'options': { |
649 | + 'script-interval': { |
650 | + 'default': 5, |
651 | + 'type': 'int', |
652 | + 'description': 'The interval between script runs.' |
653 | + } |
654 | + } |
655 | + }, |
656 | + 'ensemble': 'formula', |
657 | + 'downloads': 0, |
658 | + 'downloads_in_past_30_days': 0, |
659 | + 'files': { |
660 | + 'readme': { |
661 | + 'subdir': '', |
662 | + 'filename': 'README', |
663 | + }, |
664 | + 'install': { |
665 | + 'subdir': 'hooks', |
666 | + 'filename': 'install', |
667 | + }, |
668 | + }, |
669 | + 'tests': {}, |
670 | + 'test_results': {}, |
671 | + 'proof': { |
672 | + 'w': [ |
673 | + ' no README file', |
674 | + ' (install:31) - hook accesses EC2 metadata service' |
675 | + ' directly' |
676 | + ] |
677 | + }, |
678 | + 'date_created': '2012-01-01', |
679 | + 'error': '', |
680 | + 'qa': None, |
681 | + 'last_change': { |
682 | + 'committer': u'John Doe <jdoe@example.com>', |
683 | + 'message': u'maintainer', |
684 | + 'revno': 12, |
685 | + 'created': 1343082725.06 |
686 | + } |
687 | +} |
688 | |
689 | |
690 | class TestCharms(ViewTestBase): |
691 | @@ -368,17 +426,15 @@ |
692 | self.enable_routes() |
693 | ignore, charm = factory.makeCharm( |
694 | self.db, name='foo', owner='bar', series='precise', |
695 | - promulgated=True) |
696 | + description='foobar', promulgated=True, |
697 | + date_created=date(2012, 1, 1)) |
698 | request = self.getRequest() |
699 | request.matchdict = { |
700 | 'charm': 'foo', |
701 | 'series': 'precise', |
702 | } |
703 | - charm_data = dict(Charm(charm)._representation) |
704 | - for key in SANITIZE.intersection(charm_data.keys()): |
705 | - del charm_data[key] |
706 | response = distro_charm_json(request) |
707 | - self.assertEqual(charm_data, json.loads(response.body)) |
708 | + self.assertEqual(DISTRO_CHARM_DATA, response.json_body) |
709 | |
710 | def test_valid_others(self): |
711 | charm = factory.makeCharm(self.db, files={})[1] |
712 | |
713 | === modified file 'migrations/migrate.py' |
714 | --- migrations/migrate.py 2013-06-04 15:36:45 +0000 |
715 | +++ migrations/migrate.py 2013-07-05 14:15:30 +0000 |
716 | @@ -7,6 +7,7 @@ |
717 | |
718 | """ |
719 | import argparse |
720 | +from collections import namedtuple |
721 | from datetime import datetime |
722 | import imp |
723 | from os import listdir |
724 | @@ -15,8 +16,13 @@ |
725 | from os.path import join |
726 | import re |
727 | |
728 | -from charmworld.models import getconnection |
729 | -from charmworld.models import getdb |
730 | +from pymongo.errors import OperationFailure |
731 | + |
732 | +from charmworld.models import ( |
733 | + CharmSource, |
734 | + getconnection, |
735 | + getdb, |
736 | +) |
737 | from charmworld.search import ElasticSearchClient |
738 | from charmworld.utils import get_ini |
739 | |
740 | @@ -45,6 +51,22 @@ |
741 | return s |
742 | |
743 | |
744 | +class MigrationBeforeExodus(Exception): |
745 | + pass |
746 | + |
747 | + |
748 | +class MissingExodusCollection(Exception): |
749 | + pass |
750 | + |
751 | + |
752 | +class MissingExodusIndex(Exception): |
753 | + pass |
754 | + |
755 | + |
756 | +class MultipleExodus(Exception): |
757 | + pass |
758 | + |
759 | + |
760 | class DataStore(object): |
761 | """Communicate with the data store to determine version status.""" |
762 | |
763 | @@ -68,6 +90,19 @@ |
764 | else: |
765 | return None |
766 | |
767 | + def get_current_version(self, init): |
768 | + current_version = self.current_version |
769 | + |
770 | + if current_version is not None: |
771 | + return current_version |
772 | + |
773 | + if init: |
774 | + # We want to auto init the database anyway. |
775 | + self.version_datastore() |
776 | + return 0 |
777 | + else: |
778 | + raise Exception('Data store is not versioned') |
779 | + |
780 | def update_version(self, version): |
781 | """Update the version number in the data store.""" |
782 | self.db.migration_version.save({ |
783 | @@ -93,6 +128,9 @@ |
784 | }) |
785 | |
786 | |
787 | +VersionInfo = namedtuple('VersionInfo', ['num', 'filename', 'is_exodus']) |
788 | + |
789 | + |
790 | class Versions(object): |
791 | |
792 | def __init__(self, path): |
793 | @@ -103,7 +141,7 @@ |
794 | self.versions_dir = path |
795 | # Create temporary list of files, allowing skipped version numbers. |
796 | files = listdir(path) |
797 | - versions = {} |
798 | + self.versions = {} |
799 | |
800 | for name in files: |
801 | if not name.endswith('.py'): |
802 | @@ -113,80 +151,149 @@ |
803 | match = FILENAME_WITH_VERSION.match(name) |
804 | if match: |
805 | num = int(match.group(1)) |
806 | - versions[num] = name |
807 | + self.versions[num] = name |
808 | else: |
809 | pass # Must be a helper file or something, let's ignore it. |
810 | |
811 | - self.versions = {} |
812 | - version_numbers = versions.keys() |
813 | - version_numbers.sort() |
814 | - |
815 | - self.version_indexes = version_numbers |
816 | - for idx in version_numbers: |
817 | - self.versions[idx] = versions[idx] |
818 | - |
819 | - def create_new_version_file(self, description): |
820 | + def iter_versions(self, current): |
821 | + return (i for i in sorted(self.versions.items()) if i[0] > current) |
822 | + |
823 | + def version_path(self, description, version): |
824 | + filename = "{0}_{1}.py".format( |
825 | + str(version).zfill(3), |
826 | + str_to_filename(description) |
827 | + ) |
828 | + path = join(self.versions_dir, filename) |
829 | + return path, filename |
830 | + |
831 | + def create_new_version_file(self, description, content=None): |
832 | """Create Python files for new version""" |
833 | version = self.latest + 1 |
834 | |
835 | if not description: |
836 | raise ValueError('Please provide a short migration description.') |
837 | - |
838 | - filename = "{0}_{1}.py".format( |
839 | - str(version).zfill(3), |
840 | - str_to_filename(description) |
841 | - ) |
842 | - |
843 | - filepath = join(self.versions_dir, filename) |
844 | - with open(filepath, 'w') as new_migration: |
845 | - new_migration.write( |
846 | - SCRIPT_TEMPLATE.format(description=description)) |
847 | + if content is None: |
848 | + content = SCRIPT_TEMPLATE.format(description=description) |
849 | + path, filename = self.version_path(description, version) |
850 | + with open(path, 'w') as new_migration: |
851 | + new_migration.write(content) |
852 | |
853 | # Update our current store of version data. |
854 | self.versions[version] = filename |
855 | - self.version_indexes.append(version) |
856 | |
857 | return filename |
858 | |
859 | @property |
860 | def latest(self): |
861 | """:returns: Latest version in Collection""" |
862 | - return self.version_indexes[-1] if len(self.version_indexes) > 0 else 0 |
863 | - |
864 | - def run_migration(self, db, index_client, module_name): |
865 | - module = imp.load_source( |
866 | + return max(self.versions.keys()) if len(self.versions) > 0 else 0 |
867 | + |
868 | + def get_exodus(self, filename): |
869 | + module = self.import_version(filename) |
870 | + return getattr(module, 'exodus_update', None) |
871 | + |
872 | + def list_pending(self, current): |
873 | + return [VersionInfo(n, f, self.get_exodus(f) is not None) for n, f in |
874 | + self.iter_versions(current)] |
875 | + |
876 | + def _check_exodus(self, version_info): |
877 | + exodus_seen = False |
878 | + migration_seen = False |
879 | + for num, filename, is_exodus in version_info: |
880 | + if is_exodus: |
881 | + if migration_seen: |
882 | + raise MigrationBeforeExodus( |
883 | + 'There are migrations scheduled before an exodus.') |
884 | + if exodus_seen: |
885 | + raise MultipleExodus( |
886 | + 'There are multiple exoduses scheduled.') |
887 | + exodus_seen = True |
888 | + else: |
889 | + migration_seen = True |
890 | + |
891 | + def import_version(self, module_name): |
892 | + return imp.load_source( |
893 | module_name.strip('.py'), |
894 | join(self.versions_dir, module_name)) |
895 | + |
896 | + def run_migration(self, db, index_client, module_name): |
897 | + module = self.import_version(module_name) |
898 | getattr(module, 'upgrade')(db, index_client) |
899 | |
900 | + def get_pending_source(self, source, number): |
901 | + db = source.collection.database |
902 | + pending_suffix = '_pending_%03d' % number |
903 | + pending_name = source.index_client.index_name + pending_suffix |
904 | + pending_index = source.index_client.get_aliasable_client( |
905 | + pending_name) |
906 | + pending_collection = db[source.collection.name + pending_suffix] |
907 | + return CharmSource(db, pending_index, pending_collection) |
908 | + |
909 | + def prepare_exodus(self, source, init=False): |
910 | + db = source.collection.database |
911 | + datastore = DataStore(db) |
912 | + current_version = datastore.get_current_version(init) |
913 | + pending_migrations = self.list_pending(current_version) |
914 | + self._check_exodus(pending_migrations) |
915 | + if len(pending_migrations) == 0 or not pending_migrations[0].is_exodus: |
916 | + return None |
917 | + number, filename, is_exodus = pending_migrations[0] |
918 | + pending = self.get_pending_source(source, number) |
919 | + temp_collection = db[pending.collection.name + 'temp'] |
920 | + with source.index_client.replacement() as temp_index: |
921 | + db.create_collection(temp_collection.name) |
922 | + temp = CharmSource(db, temp_index, temp_collection) |
923 | + self.get_exodus(filename)(source, temp) |
924 | + aliased = pending.index_client.get_aliased() |
925 | + temp_collection.rename(pending.collection.name, dropTarget=True) |
926 | + pending.index_client.update_aliased(temp_index.index_name, aliased) |
927 | + return pending |
928 | + |
929 | + def finalize_exodus(self, source, version_number): |
930 | + pending = self.get_pending_source(source, version_number) |
931 | + aliased = source.index_client.get_aliased() |
932 | + pending_aliased = pending.index_client.get_aliased() |
933 | + if len(pending_aliased) == 0: |
934 | + raise MissingExodusIndex( |
935 | + 'Exodus index "%s" does not exist.' % |
936 | + pending.index_client.index_name) |
937 | + if len(pending_aliased) != 1: |
938 | + raise AssertionError('More than index aliased to pending.') |
939 | + try: |
940 | + pending.collection.rename(source.collection.name, |
941 | + dropTarget=True) |
942 | + except OperationFailure as e: |
943 | + if e.message.endswith('source namespace does not exist'): |
944 | + raise MissingExodusCollection( |
945 | + 'Exodus collection "%s" does not exist.' % |
946 | + pending.collection.name) |
947 | + raise |
948 | + source.index_client.update_aliased( |
949 | + pending_aliased[0], aliased) |
950 | + for alias_name in aliased: |
951 | + source.index_client.get_related_client(alias_name).delete_index() |
952 | + |
953 | def upgrade(self, datastore, index_client, init): |
954 | """Run `upgrade` methods for required version files. |
955 | |
956 | :param datastore: An instance of DataStore |
957 | |
958 | """ |
959 | - current_version = datastore.current_version |
960 | - |
961 | - if current_version is None: |
962 | - if init: |
963 | - # We want to auto init the database anyway. |
964 | - datastore.version_datastore() |
965 | - current_version = 0 |
966 | + current_version = datastore.get_current_version(init) |
967 | + pending_versions = self.list_pending(current_version) |
968 | + self._check_exodus(pending_versions) |
969 | + |
970 | + if len(pending_versions) == 0: |
971 | + return None |
972 | + |
973 | + for num, module_name, is_exodus in pending_versions: |
974 | + if is_exodus: |
975 | + source = CharmSource(datastore.db, index_client) |
976 | + self.finalize_exodus(source, pending_versions[0][0]) |
977 | else: |
978 | - raise Exception('Data store is not versioned') |
979 | - |
980 | - if current_version >= self.latest: |
981 | - # Nothing to do here. All migrations processed already. |
982 | - return None |
983 | - |
984 | - while current_version < self.latest: |
985 | - # Let's get processing. |
986 | - next_version = current_version + 1 |
987 | - module_name = self.versions[next_version] |
988 | - self.run_migration(datastore.db, index_client, module_name) |
989 | - current_version = next_version |
990 | - datastore.update_version(current_version) |
991 | - return current_version |
992 | + self.run_migration(datastore.db, index_client, module_name) |
993 | + datastore.update_version(num) |
994 | + return num |
995 | |
996 | |
997 | def parse_args(): |
998 | @@ -214,6 +321,13 @@ |
999 | default=False, |
1000 | help='Auto init the database if not already init.') |
1001 | |
1002 | + parser_prepare_upgrade = subparsers.add_parser('prepare-upgrade') |
1003 | + parser_prepare_upgrade.set_defaults(func=Commands.prepare_upgrade) |
1004 | + parser_prepare_upgrade.add_argument( |
1005 | + '-i', '--init', action='store_true', |
1006 | + default=False, |
1007 | + help='Auto init the database if not already init.') |
1008 | + |
1009 | args = parser.parse_args() |
1010 | return args |
1011 | |
1012 | @@ -253,6 +367,12 @@ |
1013 | print "Created new migration: {0}".format(filename) |
1014 | |
1015 | @classmethod |
1016 | + def prepare_upgrade(cls, ini, args): |
1017 | + source = CharmSource.from_settings(ini) |
1018 | + migrations = Versions(ini['migrations']) |
1019 | + migrations.prepare_exodus(source, args.init) |
1020 | + |
1021 | + @classmethod |
1022 | def upgrade(cls, ini, args): |
1023 | """Upgrade the data store to the latest available migration.""" |
1024 | ds = cls.get_datastore(ini) |
1025 | |
1026 | === modified file 'migrations/test_migrate.py' |
1027 | --- migrations/test_migrate.py 2013-06-04 15:36:45 +0000 |
1028 | +++ migrations/test_migrate.py 2013-07-05 14:15:30 +0000 |
1029 | @@ -5,16 +5,29 @@ |
1030 | from os.path import join |
1031 | from os.path import exists |
1032 | import pymongo |
1033 | -from tempfile import mkdtemp |
1034 | +from textwrap import dedent |
1035 | |
1036 | -from charmworld.models import getconnection |
1037 | -from charmworld.models import getdb |
1038 | -from charmworld.testing import TestCase |
1039 | +from charmworld.models import ( |
1040 | + CharmSource, |
1041 | + getconnection, |
1042 | + getdb, |
1043 | +) |
1044 | +from charmworld.testing import ( |
1045 | + MongoTestBase, |
1046 | + TestCase, |
1047 | + temp_dir |
1048 | +) |
1049 | from charmworld.testing.factory import random_string |
1050 | from charmworld.utils import get_ini |
1051 | |
1052 | -from migrate import DataStore |
1053 | -from migrate import Versions |
1054 | +from migrate import ( |
1055 | + DataStore, |
1056 | + MigrationBeforeExodus, |
1057 | + MissingExodusCollection, |
1058 | + MissingExodusIndex, |
1059 | + MultipleExodus, |
1060 | + Versions, |
1061 | +) |
1062 | |
1063 | INI = get_ini() |
1064 | MONGO_URL = environ.get('TEST_MONGODB') |
1065 | @@ -92,7 +105,7 @@ |
1066 | class TestVersions(TestCase): |
1067 | """The local versions of the files.""" |
1068 | def setUp(self): |
1069 | - self.tmpdir = mkdtemp() |
1070 | + self.tmpdir = self.use_context(temp_dir()) |
1071 | self.version_files = make_version_files(self.tmpdir, 4) |
1072 | self.versions = Versions(self.tmpdir) |
1073 | |
1074 | @@ -102,9 +115,9 @@ |
1075 | |
1076 | def test_latest_wo_migrations(self): |
1077 | """""" |
1078 | - tmpdir = mkdtemp() |
1079 | - versions = Versions(tmpdir) |
1080 | - self.assertEqual(versions.latest, 0) |
1081 | + with temp_dir() as tmpdir: |
1082 | + versions = Versions(tmpdir) |
1083 | + self.assertEqual(versions.latest, 0) |
1084 | |
1085 | def test_latest_version(self): |
1086 | """latest returns the number of the newest revision.""" |
1087 | @@ -128,6 +141,34 @@ |
1088 | # And this new file should exist in our versions directory |
1089 | self.assertTrue(exists(join(self.tmpdir, self.versions.versions[5]))) |
1090 | |
1091 | + def test_list_pending(self): |
1092 | + self.versions.create_new_version_file('foo', content=dedent(""" |
1093 | + def exodus_update(): |
1094 | + pass |
1095 | + """)) |
1096 | + self.versions.create_new_version_file('foo', content=dedent(""" |
1097 | + def update(): |
1098 | + pass |
1099 | + """)) |
1100 | + self.assertEqual([ |
1101 | + (5, '005_foo.py', True), |
1102 | + (6, '006_foo.py', False), |
1103 | + ], self.versions.list_pending(4)) |
1104 | + |
1105 | + def test_check_exodus(self): |
1106 | + self.versions._check_exodus([]) |
1107 | + with self.assertRaises(MigrationBeforeExodus): |
1108 | + self.versions._check_exodus([ |
1109 | + (1, '001_foo.py', False), (2, '002_bar.py', True)]) |
1110 | + self.versions._check_exodus([ |
1111 | + (1, '001_foo.py', True), (2, '002_bar.py', False)]) |
1112 | + with self.assertRaises(MultipleExodus): |
1113 | + self.versions._check_exodus([ |
1114 | + (1, '001_foo.py', True), (2, '002_bar.py', True)]) |
1115 | + self.versions._check_exodus([ |
1116 | + (1, '001_foo.py', False), (2, '002_bar.py', False)]) |
1117 | + |
1118 | + |
1119 | |
1120 | class TestVersionUpgrades(TestCase): |
1121 | """Functional testing of the actual upgrade process.""" |
1122 | @@ -136,8 +177,9 @@ |
1123 | 'mongo.url': MONGO_URL, |
1124 | }) |
1125 | self.db = getdb(self.connection, 'juju_test') |
1126 | + self.use_index_client(aliased=True) |
1127 | self.ds = DataStore(self.db) |
1128 | - tmpdir = mkdtemp() |
1129 | + tmpdir = self.use_context(temp_dir()) |
1130 | make_version_files(tmpdir, 4, runnable=True) |
1131 | self.version = Versions(tmpdir) |
1132 | |
1133 | @@ -155,9 +197,8 @@ |
1134 | |
1135 | def test_upgrade_wo_migrations(self): |
1136 | """Upgrade runs silently if no migrations are created yet.""" |
1137 | - self.use_index_client() |
1138 | self.ds.version_datastore() |
1139 | - tmpdir = mkdtemp() |
1140 | + tmpdir = self.use_context(temp_dir()) |
1141 | versions = Versions(tmpdir) |
1142 | self.assertEqual(versions.upgrade(self.ds, self.index_client, False), |
1143 | None) |
1144 | @@ -165,7 +206,6 @@ |
1145 | def test_upgrade_succeeds(self): |
1146 | """upgrade will run the methods and we get a version of 4.""" |
1147 | self.ds.version_datastore() |
1148 | - self.use_index_client() |
1149 | # We should get 4 versions done. The upgrade returns 4 |
1150 | last_version = self.version.upgrade(self.ds, self.index_client, False) |
1151 | self.assertEqual(4, last_version) |
1152 | @@ -175,10 +215,180 @@ |
1153 | |
1154 | def test_upgrade_succeeds_from_unversioned(self): |
1155 | """Forcing init no an unversioned db will upgrade successfully.""" |
1156 | - self.use_index_client() |
1157 | last_version = self.version.upgrade(self.ds, self.index_client, True) |
1158 | self.assertEqual(4, last_version) |
1159 | |
1160 | # and we can query the db for the revision of 4 |
1161 | self.assertEqual(4, self.ds.current_version) |
1162 | |
1163 | + def create_exodus_environment(self, index_client=None): |
1164 | + if index_client is None: |
1165 | + index_client = self.index_client |
1166 | + versions = Versions(self.use_context(temp_dir())) |
1167 | + versions.create_new_version_file( |
1168 | + random_string(20), content=dedent(""" |
1169 | + def exodus_update(source, target): |
1170 | + pass |
1171 | + """)) |
1172 | + source = CharmSource(self.db, index_client) |
1173 | + return versions, source |
1174 | + |
1175 | + def test_upgrade_errors_if_no_exodus_db(self): |
1176 | + versions, source = self.create_exodus_environment() |
1177 | + aliased, = source.index_client.get_aliased() |
1178 | + pending = versions.get_pending_source(source, 1) |
1179 | + pending_alias = self.create_aliased(pending.index_client) |
1180 | + self.addCleanup(pending_alias.delete_index) |
1181 | + pending.index_client.wait_for_startup() |
1182 | + with self.assertRaises(MissingExodusCollection): |
1183 | + versions.upgrade(self.ds, source.index_client, True) |
1184 | + self.assertEqual([aliased], source.index_client.get_aliased()) |
1185 | + |
1186 | + def test_upgrade_errors_if_no_exodus_index(self): |
1187 | + versions, source = self.create_exodus_environment() |
1188 | + pending = versions.get_pending_source(source, 1) |
1189 | + pending.create_collection() |
1190 | + self.addCleanup(pending.collection.drop) |
1191 | + with self.assertRaises(MissingExodusIndex): |
1192 | + versions.upgrade(self.ds, self.index_client, True) |
1193 | + self.assertIn(pending.collection.name, |
1194 | + pending.collection.database.collection_names()) |
1195 | + |
1196 | + def create_aliased(self, index_client): |
1197 | + alias_client = index_client.get_aliasable_client() |
1198 | + alias_client.create_index() |
1199 | + index_client.update_aliased(alias_client.index_name, []) |
1200 | + return alias_client |
1201 | + |
1202 | + def create_exodus_success_environment(self): |
1203 | + versions, source = self.create_exodus_environment() |
1204 | + pending = versions.get_pending_source(source, 1) |
1205 | + pending.create_collection() |
1206 | + pending_alias = self.create_aliased(pending.index_client) |
1207 | + return source, pending, versions |
1208 | + |
1209 | + def test_finalize_exodus_success(self): |
1210 | + source, pending, versions = self.create_exodus_success_environment() |
1211 | + aliased = source.index_client.get_aliased() |
1212 | + pending.save({'_id': 'a', 'message': 'hello'}) |
1213 | + versions.finalize_exodus(source, 1) |
1214 | + for charm in source._get_all('a'): |
1215 | + self.assertEqual({'_id': 'a', 'message': 'hello'}, charm) |
1216 | + self.assertNotIn(pending.collection.name, |
1217 | + pending.collection.database.collection_names()) |
1218 | + for aliased_name in aliased: |
1219 | + aliased_client = source.index_client.get_related_client( |
1220 | + aliased_name) |
1221 | + self.assertFalse(aliased_client.exists()) |
1222 | + |
1223 | + def test_upgrade_succeeds_with_exodus(self): |
1224 | + source, pending, versions = self.create_exodus_success_environment() |
1225 | + pending.save({'_id': 'a', 'message': 'hello'}) |
1226 | + versions.upgrade(self.ds, source.index_client, True) |
1227 | + |
1228 | + def test_upgrade_errors_on_non_initial_exodus(self): |
1229 | + versions = Versions(self.use_context(temp_dir())) |
1230 | + versions.create_new_version_file(random_string(20)) |
1231 | + versions.create_new_version_file( |
1232 | + random_string(20), content=dedent(""" |
1233 | + def exodus_update(source, target): |
1234 | + pass |
1235 | + """)) |
1236 | + with self.assertRaises(MigrationBeforeExodus): |
1237 | + versions.upgrade(self.ds, self.index_client, True) |
1238 | + |
1239 | + |
1240 | +class TestExodus(MongoTestBase): |
1241 | + |
1242 | + def setUp(self): |
1243 | + super(TestExodus, self).setUp() |
1244 | + self.use_index_client(aliased=True) |
1245 | + self.source = CharmSource.from_request(self) |
1246 | + self.versions = Versions(self.use_context(temp_dir())) |
1247 | + |
1248 | + def test_prepare_exodus_checks_validity(self): |
1249 | + self.versions.create_new_version_file( |
1250 | + random_string(20), content=dedent(""" |
1251 | + def update(source, target): |
1252 | + pass |
1253 | + """)) |
1254 | + self.versions.create_new_version_file( |
1255 | + random_string(20), content=dedent(""" |
1256 | + def exodus_update(source, target): |
1257 | + pass |
1258 | + """)) |
1259 | + with self.assertRaises(MigrationBeforeExodus): |
1260 | + self.versions.prepare_exodus(self.source, init=True) |
1261 | + |
1262 | + def test_prepare_exodus_initializes(self): |
1263 | + with self.assertRaises(Exception) as exc_context: |
1264 | + self.versions.prepare_exodus(self.source, init=False) |
1265 | + self.assertEqual('Data store is not versioned', |
1266 | + str(exc_context.exception)) |
1267 | + self.versions.prepare_exodus(self.source, init=True) |
1268 | + ds = DataStore(self.source.collection.database) |
1269 | + self.assertEqual(0, ds.current_version) |
1270 | + |
1271 | + def test_prepare_exodus_acquires_pending(self): |
1272 | + pending = self.versions.prepare_exodus(self.source, init=True) |
1273 | + self.assertIs(None, pending) |
1274 | + self.versions.create_new_version_file('foo', content=dedent(""" |
1275 | + def exodus_update(source, target): |
1276 | + pass |
1277 | + """)) |
1278 | + pending = self.versions.prepare_exodus(self.source) |
1279 | + self.addCleanup(pending.index_client.delete_index) |
1280 | + self.assertIsInstance(pending, CharmSource) |
1281 | + |
1282 | + def test_prepare_exodus_runs_exodus_update(self): |
1283 | + self.versions.create_new_version_file('foo', content=dedent(""" |
1284 | + def exodus_update(source, target): |
1285 | + target.save({'_id': 'a', 'message': 'hello'}) |
1286 | + """)) |
1287 | + pending = self.versions.prepare_exodus(self.source, init=True) |
1288 | + self.addCleanup(pending.index_client.delete_index) |
1289 | + for charm in pending._get_all('a'): |
1290 | + self.assertEqual({'_id': 'a', 'message': 'hello'}, charm) |
1291 | + |
1292 | + def test_exodus_update_runs_against_temp_but_generates_pending(self): |
1293 | + self.versions.create_new_version_file('foo', content=dedent(""" |
1294 | + def exodus_update(source, target): |
1295 | + if target.index_client.index_name == 'temp_index_pending_001': |
1296 | + raise AssertionError('Used pending name!') |
1297 | + if target.collection.name == 'charms_pending_001': |
1298 | + raise AssertionError('Used pending name!') |
1299 | + """)) |
1300 | + pending = self.versions.prepare_exodus(self.source, init=True) |
1301 | + self.addCleanup(pending.index_client.delete_index) |
1302 | + self.assertEqual('temp_index_pending_001', |
1303 | + pending.index_client.index_name) |
1304 | + self.assertEqual('charms_pending_001', pending.collection.name) |
1305 | + |
1306 | + def test_no_leftovers(self): |
1307 | + db = self.source.collection.database |
1308 | + index_client = self.source.index_client._client |
1309 | + index_keys = index_client.status()['indices'].keys() |
1310 | + index_count = len(index_client.status()['indices']) |
1311 | + DataStore(db).version_datastore() |
1312 | + self.source.create_collection() |
1313 | + collection_names = db.collection_names() |
1314 | + self.versions.create_new_version_file('foo', content=dedent(""" |
1315 | + def exodus_update(source, target): |
1316 | + pass |
1317 | + """)) |
1318 | + self.versions.prepare_exodus(self.source, init=True) |
1319 | + self.versions.finalize_exodus(self.source, 1) |
1320 | + self.assertEqual(collection_names, db.collection_names()) |
1321 | + self.assertEqual(index_count, len(index_client.status()['indices'])) |
1322 | + |
1323 | + def test_suffix_of_source(self): |
1324 | + self.versions.create_new_version_file('foo', content=dedent(""" |
1325 | + import re |
1326 | + |
1327 | + def exodus_update(source, target): |
1328 | + target_name = target.index_client.index_name |
1329 | + match = re.match('(.*)-([0-9]*)', target_name) |
1330 | + if match.group(1) != source.index_client.index_name: |
1331 | + raise AssertionError('Poor name: ' + target_name) |
1332 | + """)) |
1333 | + self.versions.prepare_exodus(self.source, init=True) |
1334 | |
1335 | === removed file 'migrations/versions/005_no_op.py' |
1336 | --- migrations/versions/005_no_op.py 2013-06-04 18:22:19 +0000 |
1337 | +++ migrations/versions/005_no_op.py 1970-01-01 00:00:00 +0000 |
1338 | @@ -1,5 +0,0 @@ |
1339 | -# Copyright 2013 Canonical Ltd. This software is licensed under the |
1340 | -# GNU Affero General Public License version 3 (see the file LICENSE). |
1341 | - |
1342 | -def upgrade(db, index_client): |
1343 | - """005 failed to remove icons from charms, so now disabled.""" |
1344 | |
1345 | === removed file 'migrations/versions/006_no_op.py' |
1346 | --- migrations/versions/006_no_op.py 2013-06-05 18:30:47 +0000 |
1347 | +++ migrations/versions/006_no_op.py 1970-01-01 00:00:00 +0000 |
1348 | @@ -1,5 +0,0 @@ |
1349 | -# Copyright 2013 Canonical Ltd. This software is licensed under the |
1350 | -# GNU Affero General Public License version 3 (see the file LICENSE). |
1351 | - |
1352 | -def upgrade(db, index_client): |
1353 | - """006 lacked a corresponding ingest update, so now disabled.""" |
1354 | |
1355 | === removed file 'migrations/versions/007_no_op.py' |
1356 | --- migrations/versions/007_no_op.py 2013-06-11 19:13:07 +0000 |
1357 | +++ migrations/versions/007_no_op.py 1970-01-01 00:00:00 +0000 |
1358 | @@ -1,6 +0,0 @@ |
1359 | -# Copyright 2013 Canonical Ltd. This software is licensed under the |
1360 | -# GNU Affero General Public License version 3 (see the file LICENSE). |
1361 | - |
1362 | - |
1363 | -def upgrade(db, index_client): |
1364 | - """007 was renamed to 008 because of a deployment issue in production""" |
1365 | |
1366 | === added file 'migrations/versions/010_remap_options.py' |
1367 | --- migrations/versions/010_remap_options.py 1970-01-01 00:00:00 +0000 |
1368 | +++ migrations/versions/010_remap_options.py 2013-07-05 14:15:30 +0000 |
1369 | @@ -0,0 +1,17 @@ |
1370 | +# Copyright 2013 Canonical Ltd. This software is licensed under the |
1371 | +# GNU Affero General Public License version 3 (see the file LICENSE). |
1372 | + |
1373 | +from charmworld.models import ( |
1374 | + options_to_storage, |
1375 | +) |
1376 | + |
1377 | + |
1378 | +def exodus_update(source, target): |
1379 | + for charm in source.collection.find(): |
1380 | + config = charm.get('config') |
1381 | + if config is not None and 'options' in config: |
1382 | + config['options'] = options_to_storage(config['options']) |
1383 | + try: |
1384 | + target.save(charm) |
1385 | + except Exception as e: |
1386 | + print e |
1387 | |
1388 | === modified file 'migrations/versions/tests/test_migrations.py' |
1389 | --- migrations/versions/tests/test_migrations.py 2013-06-11 19:18:47 +0000 |
1390 | +++ migrations/versions/tests/test_migrations.py 2013-07-05 14:15:30 +0000 |
1391 | @@ -1,34 +1,95 @@ |
1392 | # Copyright 2012, 2013 Canonical Ltd. This software is licensed under the |
1393 | # GNU Affero General Public License version 3 (see the file LICENSE). |
1394 | |
1395 | +from pyelasticsearch.exceptions import ( |
1396 | + ElasticHttpNotFoundError, |
1397 | +) |
1398 | from charmworld.models import CharmSource |
1399 | from charmworld.testing import MongoTestBase |
1400 | from migrations.migrate import Versions |
1401 | |
1402 | |
1403 | -def run_migration(db, index_client, module_name): |
1404 | - versions = Versions('migrations/versions/') |
1405 | - versions.run_migration(db, index_client, module_name) |
1406 | - |
1407 | - |
1408 | -class TestMigration004(MongoTestBase): |
1409 | +class MigrationTestBase(MongoTestBase): |
1410 | + |
1411 | + def setUp(self): |
1412 | + super(MigrationTestBase, self).setUp() |
1413 | + self.versions = Versions('migrations/versions/') |
1414 | + |
1415 | + |
1416 | +class TestMigration004(MigrationTestBase): |
1417 | |
1418 | def test_migration(self): |
1419 | self.use_index_client() |
1420 | self.db.create_collection('charm-errors') |
1421 | - run_migration(self.db, self.index_client, '004_remove_charm_errors.py') |
1422 | + self.versions.run_migration(self.db, self.index_client, |
1423 | + '004_remove_charm_errors.py') |
1424 | self.assertNotIn('charm-errors', self.db.collection_names()) |
1425 | |
1426 | |
1427 | -class TestMigration008(MongoTestBase): |
1428 | +class TestMigration008(MigrationTestBase): |
1429 | |
1430 | def test_migration(self): |
1431 | self.use_index_client() |
1432 | source = CharmSource.from_request(self) |
1433 | source.save({'_id': 'a', 'icon': 'asdf', 'asdf': 'asdf'}) |
1434 | source.save({'_id': 'b', 'icon': 'asdf', 'asdf': 'asdf'}) |
1435 | - run_migration(self.db, self.index_client, '008_delete_icon_field.py') |
1436 | - for charm in source._get_all('a'): |
1437 | - self.assertNotIn('icon', charm) |
1438 | - for charm in source._get_all('b'): |
1439 | - self.assertNotIn('icon', charm) |
1440 | + self.versions.run_migration(self.db, self.index_client, |
1441 | + '008_delete_icon_field.py') |
1442 | + for charm in source._get_all('a'): |
1443 | + self.assertNotIn('icon', charm) |
1444 | + for charm in source._get_all('b'): |
1445 | + self.assertNotIn('icon', charm) |
1446 | + |
1447 | + |
1448 | +class TestMigration010(MigrationTestBase): |
1449 | + |
1450 | + def test_migration(self): |
1451 | + self.use_index_client() |
1452 | + source = CharmSource.from_request(self) |
1453 | + source.save({ |
1454 | + '_id': 'a', |
1455 | + 'config': None, |
1456 | + }) |
1457 | + source.save({ |
1458 | + '_id': 'b', |
1459 | + 'config': { |
1460 | + 'options': {}, |
1461 | + }, |
1462 | + }) |
1463 | + source.save({ |
1464 | + '_id': 'c', |
1465 | + 'config': { |
1466 | + 'options': {'foo': {'default': 'bar'}}, |
1467 | + }, |
1468 | + }) |
1469 | + source.save({'_id': 'd'}) |
1470 | + # This is not acceptable to Elasticsearch, because config is a list. |
1471 | + self.db.charms.save({ |
1472 | + '_id': 'e', |
1473 | + 'config': ['a'], |
1474 | + }) |
1475 | + self.versions.get_exodus('010_remap_options.py')(source, source) |
1476 | + for charm in source._get_all('a'): |
1477 | + self.assertEqual({ |
1478 | + '_id': 'a', |
1479 | + 'config': None, |
1480 | + }, charm) |
1481 | + for charm in source._get_all('b'): |
1482 | + self.assertEqual({ |
1483 | + '_id': 'b', |
1484 | + 'config': {'options': []}, |
1485 | + }, charm) |
1486 | + for charm in source._get_all('c'): |
1487 | + self.assertEqual({ |
1488 | + '_id': 'c', |
1489 | + 'config': {'options': [{ |
1490 | + 'name': 'foo', |
1491 | + 'default': 'bar', |
1492 | + }]} |
1493 | + }, charm) |
1494 | + for charm in source._get_all('d'): |
1495 | + self.assertEqual({'_id': 'd'}, charm) |
1496 | + with self.assertRaises(ElasticHttpNotFoundError): |
1497 | + self.index_client.get('e') |
1498 | + self.assertEqual({'_id': 'e', 'config': ['a']}, |
1499 | + self.db.charms.find_one({'_id': 'e'})) |
We talked on IRC to clarify that some charms are not in a sane state and migration needs to handle cases where config is not a dict with a single item of key, sub-dict. We might want migrate to log in the future. We know from the 007 migration problem seen with production's bogus charms that the juju-log is capturing stdout.