Merge lp:~stub/launchpad/garbo into lp:launchpad/db-devel

Proposed by Stuart Bishop
Status: Superseded
Proposed branch: lp:~stub/launchpad/garbo
Merge into: lp:launchpad/db-devel
Diff against target: 466 lines (+148/-83)
5 files modified
cronscripts/garbo-frequently.py (+23/-0)
database/schema/security.cfg (+4/-0)
lib/canonical/launchpad/utilities/looptuner.py (+1/-1)
lib/lp/scripts/garbo.py (+67/-47)
lib/lp/scripts/tests/test_garbo.py (+53/-35)
To merge this branch: bzr merge lp:~stub/launchpad/garbo
Reviewer Review Type Date Requested Status
Robert Collins (community) Needs Fixing
Review via email: mp+69792@code.launchpad.net

This proposal has been superseded by a proposal from 2011-09-09.

Description of the change

Implement a 5 minute garbo job running and use it to fix Bug #795305.

Also a bit of delinting of touched code, and some minor garbo tunings (reducing the default transaction goal time to 2 seconds, and moving some other jobs to the fequent garbo runner to spread the load more evenly).

To post a comment you must log in.
Revision history for this message
Robert Collins (lifeless) wrote :

This should be a merge into devel - it has no schema changes [security.cfg does not count]

review: Needs Fixing
Revision history for this message
Stuart Bishop (stub) wrote :

It depends on a database patch, so cannot land on devel until r10832 of lp:launchpad/db-devel has been merged into lp:launchpad/devel

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
=== added file 'cronscripts/garbo-frequently.py'
--- cronscripts/garbo-frequently.py 1970-01-01 00:00:00 +0000
+++ cronscripts/garbo-frequently.py 2011-07-29 13:44:41 +0000
@@ -0,0 +1,23 @@
1#!/usr/bin/python -S
2#
3# Copyright 2011 Canonical Ltd. This software is licensed under the
4# GNU Affero General Public License version 3 (see the file LICENSE).
5
6"""Database garbage collector, every 5 minutes.
7
8Remove or archive unwanted data. Detect, warn and possibly repair data
9corruption.
10"""
11
12__metaclass__ = type
13__all__ = []
14
15import _pythonpath
16
17from lp.scripts.garbo import FrequentDatabaseGarbageCollector
18
19
20if __name__ == '__main__':
21 script = FrequentDatabaseGarbageCollector()
22 script.continue_on_failure = True
23 script.lock_and_run()
024
=== modified file 'database/schema/security.cfg'
--- database/schema/security.cfg 2011-07-28 12:58:14 +0000
+++ database/schema/security.cfg 2011-07-29 13:44:41 +0000
@@ -2187,6 +2187,10 @@
2187groups=garbo2187groups=garbo
2188type=user2188type=user
21892189
2190[garbo_frequently]
2191groups=garbo
2192type=user
2193
2190[generateppahtaccess]2194[generateppahtaccess]
2191groups=script2195groups=script
2192public.archive = SELECT2196public.archive = SELECT
21932197
=== modified file 'lib/canonical/launchpad/utilities/looptuner.py'
--- lib/canonical/launchpad/utilities/looptuner.py 2011-04-12 09:57:20 +0000
+++ lib/canonical/launchpad/utilities/looptuner.py 2011-07-29 13:44:41 +0000
@@ -311,7 +311,7 @@
311 """A base implementation of `ITunableLoop`."""311 """A base implementation of `ITunableLoop`."""
312 implements(ITunableLoop)312 implements(ITunableLoop)
313313
314 goal_seconds = 4314 goal_seconds = 2
315 minimum_chunk_size = 1315 minimum_chunk_size = 1
316 maximum_chunk_size = None # Override316 maximum_chunk_size = None # Override
317 cooldown_time = 0317 cooldown_time = 0
318318
=== modified file 'lib/lp/scripts/garbo.py'
--- lib/lp/scripts/garbo.py 2011-07-05 05:46:02 +0000
+++ lib/lp/scripts/garbo.py 2011-07-29 13:44:41 +0000
@@ -56,7 +56,6 @@
56from lp.bugs.interfaces.bug import IBugSet56from lp.bugs.interfaces.bug import IBugSet
57from lp.bugs.model.bug import Bug57from lp.bugs.model.bug import Bug
58from lp.bugs.model.bugattachment import BugAttachment58from lp.bugs.model.bugattachment import BugAttachment
59from lp.bugs.model.bugmessage import BugMessage
60from lp.bugs.model.bugnotification import BugNotification59from lp.bugs.model.bugnotification import BugNotification
61from lp.bugs.model.bugwatch import BugWatchActivity60from lp.bugs.model.bugwatch import BugWatchActivity
62from lp.bugs.scripts.checkwatches.scheduler import (61from lp.bugs.scripts.checkwatches.scheduler import (
@@ -84,7 +83,7 @@
84from lp.translations.model.potranslation import POTranslation83from lp.translations.model.potranslation import POTranslation
8584
8685
87ONE_DAY_IN_SECONDS = 24*60*6086ONE_DAY_IN_SECONDS = 24 * 60 * 60
8887
8988
90class BulkPruner(TunableLoop):89class BulkPruner(TunableLoop):
@@ -290,12 +289,34 @@
290 """289 """
291290
292291
292class BugSummaryJournalRollup(TunableLoop):
293 """Rollup BugSummaryJournal rows into BugSummary."""
294 maximum_chunk_size = 5000
295
296 def __init__(self, log, abort_time=None):
297 super(BugSummaryJournalRollup, self).__init__(log, abort_time)
298 self.store = getUtility(IStoreSelector).get(MAIN_STORE, MASTER_FLAVOR)
299
300 def isDone(self):
301 has_more = self.store.execute(
302 "SELECT EXISTS (SELECT TRUE FROM BugSummaryJournal LIMIT 1)"
303 ).get_one()[0]
304 return not has_more
305
306 def __call__(self, chunk_size):
307 chunk_size = int(chunk_size + 0.5)
308 self.store.execute(
309 "SELECT bugsummary_rollup_journal(%s)", (chunk_size,),
310 noresult=True)
311 self.store.commit()
312
313
293class OpenIDConsumerNoncePruner(TunableLoop):314class OpenIDConsumerNoncePruner(TunableLoop):
294 """An ITunableLoop to prune old OpenIDConsumerNonce records.315 """An ITunableLoop to prune old OpenIDConsumerNonce records.
295316
296 We remove all OpenIDConsumerNonce records older than 1 day.317 We remove all OpenIDConsumerNonce records older than 1 day.
297 """318 """
298 maximum_chunk_size = 6*60*60 # 6 hours in seconds.319 maximum_chunk_size = 6 * 60 * 60 # 6 hours in seconds.
299320
300 def __init__(self, log, abort_time=None):321 def __init__(self, log, abort_time=None):
301 super(OpenIDConsumerNoncePruner, self).__init__(log, abort_time)322 super(OpenIDConsumerNoncePruner, self).__init__(log, abort_time)
@@ -601,7 +622,7 @@
601 self.max_offset = self.store.execute(622 self.max_offset = self.store.execute(
602 "SELECT MAX(id) FROM UnlinkedPeople").get_one()[0]623 "SELECT MAX(id) FROM UnlinkedPeople").get_one()[0]
603 if self.max_offset is None:624 if self.max_offset is None:
604 self.max_offset = -1 # Trigger isDone() now.625 self.max_offset = -1 # Trigger isDone() now.
605 self.log.debug("No Person records to remove.")626 self.log.debug("No Person records to remove.")
606 else:627 else:
607 self.log.info("%d Person records to remove." % self.max_offset)628 self.log.info("%d Person records to remove." % self.max_offset)
@@ -684,36 +705,6 @@
684 """705 """
685706
686707
687class MirrorBugMessageOwner(TunableLoop):
688 """Mirror BugMessage.owner from Message.
689
690 Only needed until they are all set, after that triggers will maintain it.
691 """
692
693 # Test migration did 3M in 2 hours, so 5000 is ~ 10 seconds - and thats the
694 # max we want to hold a DB lock open for.
695 minimum_chunk_size = 1000
696 maximum_chunk_size = 5000
697
698 def __init__(self, log, abort_time=None):
699 super(MirrorBugMessageOwner, self).__init__(log, abort_time)
700 self.store = IMasterStore(BugMessage)
701 self.isDone = IMasterStore(BugMessage).find(
702 BugMessage, BugMessage.ownerID==None).is_empty
703
704 def __call__(self, chunk_size):
705 """See `ITunableLoop`."""
706 transaction.begin()
707 updated = self.store.execute("""update bugmessage set
708 owner=message.owner from message where
709 bugmessage.message=message.id and bugmessage.id in
710 (select id from bugmessage where owner is NULL limit %s);"""
711 % int(chunk_size)
712 ).rowcount
713 self.log.debug("Updated %s bugmessages." % updated)
714 transaction.commit()
715
716
717class BugHeatUpdater(TunableLoop):708class BugHeatUpdater(TunableLoop):
718 """A `TunableLoop` for bug heat calculations."""709 """A `TunableLoop` for bug heat calculations."""
719710
@@ -802,7 +793,7 @@
802class OldTimeLimitedTokenDeleter(TunableLoop):793class OldTimeLimitedTokenDeleter(TunableLoop):
803 """Delete expired url access tokens from the session DB."""794 """Delete expired url access tokens from the session DB."""
804795
805 maximum_chunk_size = 24*60*60 # 24 hours in seconds.796 maximum_chunk_size = 24 * 60 * 60 # 24 hours in seconds.
806797
807 def __init__(self, log, abort_time=None):798 def __init__(self, log, abort_time=None):
808 super(OldTimeLimitedTokenDeleter, self).__init__(log, abort_time)799 super(OldTimeLimitedTokenDeleter, self).__init__(log, abort_time)
@@ -861,10 +852,10 @@
861852
862class BaseDatabaseGarbageCollector(LaunchpadCronScript):853class BaseDatabaseGarbageCollector(LaunchpadCronScript):
863 """Abstract base class to run a collection of TunableLoops."""854 """Abstract base class to run a collection of TunableLoops."""
864 script_name = None # Script name for locking and database user. Override.855 script_name = None # Script name for locking and database user. Override.
865 tunable_loops = None # Collection of TunableLoops. Override.856 tunable_loops = None # Collection of TunableLoops. Override.
866 continue_on_failure = False # If True, an exception in a tunable loop857 continue_on_failure = False # If True, an exception in a tunable loop
867 # does not cause the script to abort.858 # does not cause the script to abort.
868859
869 # Default run time of the script in seconds. Override.860 # Default run time of the script in seconds. Override.
870 default_abort_script_time = None861 default_abort_script_time = None
@@ -915,7 +906,7 @@
915 for count in range(0, self.options.threads):906 for count in range(0, self.options.threads):
916 thread = threading.Thread(907 thread = threading.Thread(
917 target=self.run_tasks_in_thread,908 target=self.run_tasks_in_thread,
918 name='Worker-%d' % (count+1,),909 name='Worker-%d' % (count + 1,),
919 args=(tunable_loops,))910 args=(tunable_loops,))
920 thread.start()911 thread.start()
921 threads.add(thread)912 threads.add(thread)
@@ -949,7 +940,7 @@
949940
950 @property941 @property
951 def script_timeout(self):942 def script_timeout(self):
952 a_very_long_time = 31536000 # 1 year943 a_very_long_time = 31536000 # 1 year
953 return self.options.abort_script or a_very_long_time944 return self.options.abort_script or a_very_long_time
954945
955 def get_loop_logger(self, loop_name):946 def get_loop_logger(self, loop_name):
@@ -962,7 +953,7 @@
962 loop_logger = logging.getLogger('garbo.' + loop_name)953 loop_logger = logging.getLogger('garbo.' + loop_name)
963 for filter in loop_logger.filters:954 for filter in loop_logger.filters:
964 if isinstance(filter, PrefixFilter):955 if isinstance(filter, PrefixFilter):
965 return loop_logger # Already have a PrefixFilter attached.956 return loop_logger # Already have a PrefixFilter attached.
966 loop_logger.addFilter(PrefixFilter(loop_name))957 loop_logger.addFilter(PrefixFilter(loop_name))
967 return loop_logger958 return loop_logger
968959
@@ -1034,7 +1025,7 @@
1034 loop_logger.debug3(1025 loop_logger.debug3(
1035 "Unable to acquire lock %s. Running elsewhere?",1026 "Unable to acquire lock %s. Running elsewhere?",
1036 loop_lock_path)1027 loop_lock_path)
1037 time.sleep(0.3) # Avoid spinning.1028 time.sleep(0.3) # Avoid spinning.
1038 tunable_loops.append(tunable_loop_class)1029 tunable_loops.append(tunable_loop_class)
1039 # Otherwise, emit a warning and skip the task.1030 # Otherwise, emit a warning and skip the task.
1040 else:1031 else:
@@ -1073,16 +1064,38 @@
1073 transaction.abort()1064 transaction.abort()
10741065
10751066
1076class HourlyDatabaseGarbageCollector(BaseDatabaseGarbageCollector):1067class FrequentDatabaseGarbageCollector(BaseDatabaseGarbageCollector):
1077 script_name = 'garbo-hourly'1068 """Run every 5 minutes.
1069
1070 This may become even more frequent in the future.
1071
1072 Jobs with low overhead can go here to distribute work more evenly.
1073 """
1074 script_name = 'garbo-frequently'
1078 tunable_loops = [1075 tunable_loops = [
1079 MirrorBugMessageOwner,1076 BugSummaryJournalRollup,
1080 OAuthNoncePruner,1077 OAuthNoncePruner,
1081 OpenIDConsumerNoncePruner,1078 OpenIDConsumerNoncePruner,
1082 OpenIDConsumerAssociationPruner,1079 OpenIDConsumerAssociationPruner,
1080 AntiqueSessionPruner,
1081 ]
1082 experimental_tunable_loops = []
1083
1084 # 5 minmutes minus 20 seconds for cleanup. This helps ensure the
1085 # script is fully terminated before the next scheduled hourly run
1086 # kicks in.
1087 default_abort_script_time = 60 * 5 - 20
1088
1089
1090class HourlyDatabaseGarbageCollector(BaseDatabaseGarbageCollector):
1091 """Run every hour.
1092
1093 Jobs we want to run fairly often but have noticable overhead go here.
1094 """
1095 script_name = 'garbo-hourly'
1096 tunable_loops = [
1083 RevisionCachePruner,1097 RevisionCachePruner,
1084 BugWatchScheduler,1098 BugWatchScheduler,
1085 AntiqueSessionPruner,
1086 UnusedSessionPruner,1099 UnusedSessionPruner,
1087 DuplicateSessionPruner,1100 DuplicateSessionPruner,
1088 BugHeatUpdater,1101 BugHeatUpdater,
@@ -1095,6 +1108,13 @@
10951108
10961109
1097class DailyDatabaseGarbageCollector(BaseDatabaseGarbageCollector):1110class DailyDatabaseGarbageCollector(BaseDatabaseGarbageCollector):
1111 """Run every day.
1112
1113 Jobs that don't need to be run frequently.
1114
1115 If there is low overhead, consider putting these tasks in more
1116 frequently invoked lists to distribute the work more evenly.
1117 """
1098 script_name = 'garbo-daily'1118 script_name = 'garbo-daily'
1099 tunable_loops = [1119 tunable_loops = [
1100 BranchJobPruner,1120 BranchJobPruner,
11011121
=== modified file 'lib/lp/scripts/tests/test_garbo.py'
--- lib/lp/scripts/tests/test_garbo.py 2011-07-05 05:46:02 +0000
+++ lib/lp/scripts/tests/test_garbo.py 2011-07-29 13:44:41 +0000
@@ -24,6 +24,10 @@
24 Storm,24 Storm,
25 )25 )
26from storm.store import Store26from storm.store import Store
27from testtools.matchers import (
28 Equals,
29 GreaterThan,
30 )
27import transaction31import transaction
28from zope.component import getUtility32from zope.component import getUtility
29from zope.security.proxy import removeSecurityProxy33from zope.security.proxy import removeSecurityProxy
@@ -55,7 +59,6 @@
55 LaunchpadZopelessLayer,59 LaunchpadZopelessLayer,
56 ZopelessDatabaseLayer,60 ZopelessDatabaseLayer,
57 )61 )
58from lp.bugs.model.bugmessage import BugMessage
59from lp.bugs.model.bugnotification import (62from lp.bugs.model.bugnotification import (
60 BugNotification,63 BugNotification,
61 BugNotificationRecipient,64 BugNotificationRecipient,
@@ -81,6 +84,7 @@
81 BulkPruner,84 BulkPruner,
82 DailyDatabaseGarbageCollector,85 DailyDatabaseGarbageCollector,
83 DuplicateSessionPruner,86 DuplicateSessionPruner,
87 FrequentDatabaseGarbageCollector,
84 HourlyDatabaseGarbageCollector,88 HourlyDatabaseGarbageCollector,
85 OpenIDConsumerAssociationPruner,89 OpenIDConsumerAssociationPruner,
86 UnusedSessionPruner,90 UnusedSessionPruner,
@@ -359,12 +363,23 @@
359 # starting us in a known state.363 # starting us in a known state.
360 self.runDaily()364 self.runDaily()
361 self.runHourly()365 self.runHourly()
366 self.runFrequently()
362367
363 # Capture garbo log output to tests can examine it.368 # Capture garbo log output to tests can examine it.
364 self.log_buffer = StringIO()369 self.log_buffer = StringIO()
365 handler = logging.StreamHandler(self.log_buffer)370 handler = logging.StreamHandler(self.log_buffer)
366 self.log.addHandler(handler)371 self.log.addHandler(handler)
367372
373 def runFrequently(self, maximum_chunk_size=2, test_args=()):
374 transaction.commit()
375 LaunchpadZopelessLayer.switchDbUser('garbo_daily')
376 collector = FrequentDatabaseGarbageCollector(
377 test_args=list(test_args))
378 collector._maximum_chunk_size = maximum_chunk_size
379 collector.logger = self.log
380 collector.main()
381 return collector
382
368 def runDaily(self, maximum_chunk_size=2, test_args=()):383 def runDaily(self, maximum_chunk_size=2, test_args=()):
369 transaction.commit()384 transaction.commit()
370 LaunchpadZopelessLayer.switchDbUser('garbo_daily')385 LaunchpadZopelessLayer.switchDbUser('garbo_daily')
@@ -385,10 +400,10 @@
385 def test_OAuthNoncePruner(self):400 def test_OAuthNoncePruner(self):
386 now = datetime.now(UTC)401 now = datetime.now(UTC)
387 timestamps = [402 timestamps = [
388 now - timedelta(days=2), # Garbage403 now - timedelta(days=2), # Garbage
389 now - timedelta(days=1) - timedelta(seconds=60), # Garbage404 now - timedelta(days=1) - timedelta(seconds=60), # Garbage
390 now - timedelta(days=1) + timedelta(seconds=60), # Not garbage405 now - timedelta(days=1) + timedelta(seconds=60), # Not garbage
391 now, # Not garbage406 now, # Not garbage
392 ]407 ]
393 LaunchpadZopelessLayer.switchDbUser('testadmin')408 LaunchpadZopelessLayer.switchDbUser('testadmin')
394 store = IMasterStore(OAuthNonce)409 store = IMasterStore(OAuthNonce)
@@ -399,14 +414,15 @@
399 for timestamp in timestamps:414 for timestamp in timestamps:
400 store.add(OAuthNonce(415 store.add(OAuthNonce(
401 access_token=OAuthAccessToken.get(1),416 access_token=OAuthAccessToken.get(1),
402 request_timestamp = timestamp,417 request_timestamp=timestamp,
403 nonce = str(timestamp)))418 nonce=str(timestamp)))
404 transaction.commit()419 transaction.commit()
405420
406 # Make sure we have 4 nonces now.421 # Make sure we have 4 nonces now.
407 self.failUnlessEqual(store.find(OAuthNonce).count(), 4)422 self.failUnlessEqual(store.find(OAuthNonce).count(), 4)
408423
409 self.runHourly(maximum_chunk_size=60) # 1 minute maximum chunk size424 self.runFrequently(
425 maximum_chunk_size=60) # 1 minute maximum chunk size
410426
411 store = IMasterStore(OAuthNonce)427 store = IMasterStore(OAuthNonce)
412428
@@ -428,10 +444,10 @@
428 HOURS = 60 * 60444 HOURS = 60 * 60
429 DAYS = 24 * HOURS445 DAYS = 24 * HOURS
430 timestamps = [446 timestamps = [
431 now - 2 * DAYS, # Garbage447 now - 2 * DAYS, # Garbage
432 now - 1 * DAYS - 1 * MINUTES, # Garbage448 now - 1 * DAYS - 1 * MINUTES, # Garbage
433 now - 1 * DAYS + 1 * MINUTES, # Not garbage449 now - 1 * DAYS + 1 * MINUTES, # Not garbage
434 now, # Not garbage450 now, # Not garbage
435 ]451 ]
436 LaunchpadZopelessLayer.switchDbUser('testadmin')452 LaunchpadZopelessLayer.switchDbUser('testadmin')
437453
@@ -449,7 +465,7 @@
449 self.failUnlessEqual(store.find(OpenIDConsumerNonce).count(), 4)465 self.failUnlessEqual(store.find(OpenIDConsumerNonce).count(), 4)
450466
451 # Run the garbage collector.467 # Run the garbage collector.
452 self.runHourly(maximum_chunk_size=60) # 1 minute maximum chunks.468 self.runFrequently(maximum_chunk_size=60) # 1 minute maximum chunks.
453469
454 store = IMasterStore(OpenIDConsumerNonce)470 store = IMasterStore(OpenIDConsumerNonce)
455471
@@ -458,7 +474,8 @@
458474
459 # And none of them are older than 1 day475 # And none of them are older than 1 day
460 earliest = store.find(Min(OpenIDConsumerNonce.timestamp)).one()476 earliest = store.find(Min(OpenIDConsumerNonce.timestamp)).one()
461 self.failUnless(earliest >= now - 24*60*60, 'Still have old nonces')477 self.failUnless(
478 earliest >= now - 24 * 60 * 60, 'Still have old nonces')
462479
463 def test_CodeImportResultPruner(self):480 def test_CodeImportResultPruner(self):
464 now = datetime.now(UTC)481 now = datetime.now(UTC)
@@ -485,7 +502,7 @@
485502
486 new_code_import_result(now - timedelta(days=60))503 new_code_import_result(now - timedelta(days=60))
487 for i in range(results_to_keep_count - 1):504 for i in range(results_to_keep_count - 1):
488 new_code_import_result(now - timedelta(days=19+i))505 new_code_import_result(now - timedelta(days=19 + i))
489506
490 # Run the garbage collector507 # Run the garbage collector
491 self.runDaily()508 self.runDaily()
@@ -558,7 +575,7 @@
558 store.execute("""575 store.execute("""
559 INSERT INTO %s (server_url, handle, issued, lifetime)576 INSERT INTO %s (server_url, handle, issued, lifetime)
560 VALUES (%s, %s, %d, %d)577 VALUES (%s, %s, %d, %d)
561 """ % (table_name, str(delta), str(delta), now-10, delta))578 """ % (table_name, str(delta), str(delta), now - 10, delta))
562 transaction.commit()579 transaction.commit()
563580
564 # Ensure that we created at least one expirable row (using the581 # Ensure that we created at least one expirable row (using the
@@ -571,7 +588,7 @@
571588
572 # Expire all those expirable rows, and possibly a few more if this589 # Expire all those expirable rows, and possibly a few more if this
573 # test is running slow.590 # test is running slow.
574 self.runHourly()591 self.runFrequently()
575592
576 LaunchpadZopelessLayer.switchDbUser('testadmin')593 LaunchpadZopelessLayer.switchDbUser('testadmin')
577 store = store_selector.get(MAIN_STORE, MASTER_FLAVOR)594 store = store_selector.get(MAIN_STORE, MASTER_FLAVOR)
@@ -879,21 +896,22 @@
879896
880 self.assertEqual(1, count)897 self.assertEqual(1, count)
881898
882 def test_mirror_bugmessages(self):899 def test_BugSummaryJournalRollup(self):
883 # Nuke the owner in sampledata.900 LaunchpadZopelessLayer.switchDbUser('testadmin')
884 con = DatabaseLayer._db_fixture.superuser_connection()901 store = getUtility(IStoreSelector).get(MAIN_STORE, MASTER_FLAVOR)
885 try:902
886 cur = con.cursor()903 # Generate a load of entries in BugSummaryJournal.
887 cur.execute("ALTER TABLE bugmessage "904 store.execute("UPDATE BugTask SET status=42")
888 "DISABLE TRIGGER bugmessage__owner__mirror")905
889 cur.execute("UPDATE bugmessage set owner=NULL")906 # We only need a few to test.
890 cur.execute("ALTER TABLE bugmessage "907 num_rows = store.execute(
891 "ENABLE TRIGGER bugmessage__owner__mirror")908 "SELECT COUNT(*) FROM BugSummaryJournal").get_one()[0]
892 con.commit()909 self.assertThat(num_rows, GreaterThan(10))
893 finally:910
894 con.close()911 self.runFrequently()
895 store = IMasterStore(BugMessage)912
896 unmigrated = store.find(BugMessage, BugMessage.ownerID==None).count913 # We just care that the rows have been removed. The bugsummary
897 self.assertNotEqual(0, unmigrated())914 # tests confirm that the rollup stored method is working correctly.
898 self.runHourly()915 num_rows = store.execute(
899 self.assertEqual(0, unmigrated())916 "SELECT COUNT(*) FROM BugSummaryJournal").get_one()[0]
917 self.assertThat(num_rows, Equals(0))

Subscribers

People subscribed via source and target branches

to status/vote changes: