The branch looks good. I don't have anything to add but do ask for
you to add a comment on one test -- it's probably just the case of me
being paranoid.
Also the logic in bug 348874 seems to be wrong w.r.t. '>' vs '<' but
is switched and correct in theimplementation here. For completeness
would you make a note on that bug?
> === modified file 'lib/canonical/launchpad/scripts/tests/test_garbo.py'
> --- lib/canonical/launchpad/scripts/tests/test_garbo.py 2009-03-27 11:15:14 +0000
> +++ lib/canonical/launchpad/scripts/tests/test_garbo.py 2009-04-03 07:16:43 +0000
> @@ -12,6 +12,7 @@
> from pytz import UTC
> from storm.locals import Min
> import transaction
> +from zope.component import getUtility
>
> from canonical.launchpad.database.codeimportresult import CodeImportResult
> from canonical.launchpad.database.oauth import OAuthNonce
> @@ -21,9 +22,12 @@
> CodeImportResultStatus)
> from canonical.launchpad.testing import TestCase
> from canonical.launchpad.scripts.garbo import (
> - DailyDatabaseGarbageCollector, HourlyDatabaseGarbageCollector)
> + DailyDatabaseGarbageCollector, HourlyDatabaseGarbageCollector,
> + OpenIDAssociationPruner, OpenIDConsumerAssociationPruner)
> from canonical.launchpad.scripts.tests import run_script
> from canonical.launchpad.scripts.logger import QuietFakeLogger
> +from canonical.launchpad.webapp.interfaces import (
> + IStoreSelector, MASTER_FLAVOR)
> from canonical.testing.layers import (
> DatabaseLayer, LaunchpadScriptLayer, LaunchpadZopelessLayer)
>
> @@ -57,20 +61,21 @@
> self.runDaily()
> self.runHourly()
>
> - def runDaily(self):
> - LaunchpadZopelessLayer.switchDbUser('garbo-daily')
> + def runDaily(self, maximum_chunk_size=2):
> + LaunchpadZopelessLayer.switchDbUser('garbo_daily')
> collector = DailyDatabaseGarbageCollector(test_args=[])
> + collector._maximum_chunk_size = maximum_chunk_size
> collector.logger = QuietFakeLogger()
> collector.main()
>
> - def runHourly(self):
> - LaunchpadZopelessLayer.switchDbUser('garbo-hourly')
> + def runHourly(self, maximum_chunk_size=2):
> + LaunchpadZopelessLayer.switchDbUser('garbo_hourly')
> collector = HourlyDatabaseGarbageCollector(test_args=[])
> + collector._maximum_chunk_size = maximum_chunk_size
> collector.logger = QuietFakeLogger()
> collector.main()
>
> def test_OAuthNoncePruner(self):
> - store = IMasterStore(OAuthNonce)
> now = datetime.utcnow().replace(tzinfo=UTC)
> timestamps = [
> now - timedelta(days=2), # Garbage
> @@ -79,6 +84,7 @@
> now, # Not garbage
> ]
> LaunchpadZopelessLayer.switchDbUser('testadmin')
> + store = IMasterStore(OAuthNonce)
>
> # Make sure we start with 0 nonces.
> self.failUnlessEqual(store.find(OAuthNonce).count(), 0)
> @@ -93,7 +99,9 @@
> # Make sure we have 4 nonces now.
> self.failUnlessEqual(store.find(OAuthNonce).count(), 4)
>
> - self.runHourly()
> + self.runHourly(maximum_chunk_size=60) # 1 minute maximum chunk size
> +
> + store = IMasterStore(OAuthNonce)
>
> # Now back to two, having removed the two garbage entries.
> self.failUnlessEqual(store.find(OAuthNonce).count(), 2)
> @@ -137,7 +145,9 @@
> self.failUnlessEqual(store.find(OpenIDConsumerNonce).count(), 4)
>
> # Run the garbage collector.
> - self.runHourly()
> + self.runHourly(maximum_chunk_size=60) # 1 minute maximum chunks.
> +
> + store = IMasterStore(OpenIDConsumerNonce)
>
> # We should now have 2 nonces.
> self.failUnlessEqual(store.find(OpenIDConsumerNonce).count(), 2)
> @@ -168,16 +178,19 @@
> self.runDaily()
>
> # Nothing is removed, because we always keep the 4 latest.
> + store = IMasterStore(CodeImportResult)
> self.failUnlessEqual(
> store.find(CodeImportResult).count(), 4)
>
> new_code_import_result(now - timedelta(days=31))
> self.runDaily()
> + store = IMasterStore(CodeImportResult)
> self.failUnlessEqual(
> store.find(CodeImportResult).count(), 4)
>
> new_code_import_result(now - timedelta(days=29))
> self.runDaily()
> + store = IMasterStore(CodeImportResult)
> self.failUnlessEqual(
> store.find(CodeImportResult).count(), 4)
>
> @@ -187,6 +200,40 @@
> Min(CodeImportResult.date_created)).one().replace(tzinfo=UTC)
> >= now - timedelta(days=30))
>
> + def test_OpenIDAssociationPruner(self, pruner=OpenIDAssociationPruner):
> + store_name = pruner.store_name
> + table_name = pruner.table_name
> + LaunchpadZopelessLayer.switchDbUser('testadmin')
> + store_selector = getUtility(IStoreSelector)
> + store = store_selector.get(store_name, MASTER_FLAVOR)
> + now = time.time()
> + # Create some associations in the past with lifetimes
> + for delta in range(0, 20):
> + store.execute("""
> + INSERT INTO %s (server_url, handle, issued, lifetime)
> + VALUES (%s, %s, %d, %d)
> + """ % (table_name, str(delta), str(delta), now-10, delta))
> + transaction.commit()
> +
> + num_expired = store.execute("""
> + SELECT COUNT(*) FROM %s
> + WHERE issued + lifetime < %f
> + """ % (table_name, now)).get_one()[0]
> + self.failUnless(num_expired > 0)
> +
> + self.runHourly()
> +
> + LaunchpadZopelessLayer.switchDbUser('testadmin')
> + store = store_selector.get(store_name, MASTER_FLAVOR)
> + num_expired = store.execute("""
> + SELECT COUNT(*) FROM %s
> + WHERE issued + lifetime < %f
> + """ % (table_name, now)).get_one()[0]
> + self.failUnlessEqual(num_expired, 0)
This test depends on all three parts completing within one second, the
granularity of your delta. While I'm having a hard time envisioning
that being a problem it does seem like a potential spot for random
failure. Perhaps if you just state the assumption in a comment that
will suffice and be a big clue if it should ever fail.
Hi Stuart,
The branch looks good. I don't have anything to add but do ask for
you to add a comment on one test -- it's probably just the case of me
being paranoid.
Also the logic in bug 348874 seems to be wrong w.r.t. '>' vs '<' but
is switched and correct in theimplementation here. For completeness
would you make a note on that bug?
> === modified file 'lib/canonical/ launchpad/ scripts/ tests/test_ garbo.py' launchpad/ scripts/ tests/test_ garbo.py 2009-03-27 11:15:14 +0000 launchpad/ scripts/ tests/test_ garbo.py 2009-04-03 07:16:43 +0000 launchpad. database. codeimportresul t import CodeImportResult launchpad. database. oauth import OAuthNonce tStatus) launchpad. testing import TestCase launchpad. scripts. garbo import ( rbageCollector, HourlyDatabaseG arbageCollector ) rbageCollector, HourlyDatabaseG arbageCollector , onPruner, OpenIDConsumerA ssociationPrune r) launchpad. scripts. tests import run_script launchpad. scripts. logger import QuietFakeLogger launchpad. webapp. interfaces import ( testing. layers import ( Layer, LaunchpadZopele ssLayer) ssLayer. switchDbUser( 'garbo- daily') chunk_size= 2): ssLayer. switchDbUser( 'garbo_ daily') rbageCollector( test_args= []) _maximum_ chunk_size = maximum_chunk_size ssLayer. switchDbUser( 'garbo- hourly' ) chunk_size= 2): ssLayer. switchDbUser( 'garbo_ hourly' ) arbageCollector (test_args= []) _maximum_ chunk_size = maximum_chunk_size Pruner( self): OAuthNonce) utcnow( ).replace( tzinfo= UTC) ssLayer. switchDbUser( 'testadmin' ) OAuthNonce) Equal(store. find(OAuthNonce ).count( ), 0) Equal(store. find(OAuthNonce ).count( ), 4) maximum_ chunk_size= 60) # 1 minute maximum chunk size OAuthNonce) Equal(store. find(OAuthNonce ).count( ), 2) Equal(store. find(OpenIDCons umerNonce) .count( ), 4) maximum_ chunk_size= 60) # 1 minute maximum chunks. OpenIDConsumerN once) Equal(store. find(OpenIDCons umerNonce) .count( ), 2) CodeImportResul t) Equal( CodeImportResul t).count( ), 4) import_ result( now - timedelta(days=31)) CodeImportResul t) Equal( CodeImportResul t).count( ), 4) import_ result( now - timedelta(days=29)) CodeImportResul t) Equal( CodeImportResul t).count( ), 4) esult.date_ created) ).one() .replace( tzinfo= UTC) ciationPruner( self, pruner= OpenIDAssociati onPruner) : ssLayer. switchDbUser( 'testadmin' ) IStoreSelector) get(store_ name, MASTER_FLAVOR) commit( ) (num_expired > 0) ssLayer. switchDbUser( 'testadmin' ) get(store_ name, MASTER_FLAVOR) Equal(num_ expired, 0)
> --- lib/canonical/
> +++ lib/canonical/
> @@ -12,6 +12,7 @@
> from pytz import UTC
> from storm.locals import Min
> import transaction
> +from zope.component import getUtility
>
> from canonical.
> from canonical.
> @@ -21,9 +22,12 @@
> CodeImportResul
> from canonical.
> from canonical.
> - DailyDatabaseGa
> + DailyDatabaseGa
> + OpenIDAssociati
> from canonical.
> from canonical.
> +from canonical.
> + IStoreSelector, MASTER_FLAVOR)
> from canonical.
> DatabaseLayer, LaunchpadScript
>
> @@ -57,20 +61,21 @@
> self.runDaily()
> self.runHourly()
>
> - def runDaily(self):
> - LaunchpadZopele
> + def runDaily(self, maximum_
> + LaunchpadZopele
> collector = DailyDatabaseGa
> + collector.
> collector.logger = QuietFakeLogger()
> collector.main()
>
> - def runHourly(self):
> - LaunchpadZopele
> + def runHourly(self, maximum_
> + LaunchpadZopele
> collector = HourlyDatabaseG
> + collector.
> collector.logger = QuietFakeLogger()
> collector.main()
>
> def test_OAuthNonce
> - store = IMasterStore(
> now = datetime.
> timestamps = [
> now - timedelta(days=2), # Garbage
> @@ -79,6 +84,7 @@
> now, # Not garbage
> ]
> LaunchpadZopele
> + store = IMasterStore(
>
> # Make sure we start with 0 nonces.
> self.failUnless
> @@ -93,7 +99,9 @@
> # Make sure we have 4 nonces now.
> self.failUnless
>
> - self.runHourly()
> + self.runHourly(
> +
> + store = IMasterStore(
>
> # Now back to two, having removed the two garbage entries.
> self.failUnless
> @@ -137,7 +145,9 @@
> self.failUnless
>
> # Run the garbage collector.
> - self.runHourly()
> + self.runHourly(
> +
> + store = IMasterStore(
>
> # We should now have 2 nonces.
> self.failUnless
> @@ -168,16 +178,19 @@
> self.runDaily()
>
> # Nothing is removed, because we always keep the 4 latest.
> + store = IMasterStore(
> self.failUnless
> store.find(
>
> new_code_
> self.runDaily()
> + store = IMasterStore(
> self.failUnless
> store.find(
>
> new_code_
> self.runDaily()
> + store = IMasterStore(
> self.failUnless
> store.find(
>
> @@ -187,6 +200,40 @@
> Min(CodeImportR
> >= now - timedelta(days=30))
>
> + def test_OpenIDAsso
> + store_name = pruner.store_name
> + table_name = pruner.table_name
> + LaunchpadZopele
> + store_selector = getUtility(
> + store = store_selector.
> + now = time.time()
> + # Create some associations in the past with lifetimes
> + for delta in range(0, 20):
> + store.execute("""
> + INSERT INTO %s (server_url, handle, issued, lifetime)
> + VALUES (%s, %s, %d, %d)
> + """ % (table_name, str(delta), str(delta), now-10, delta))
> + transaction.
> +
> + num_expired = store.execute("""
> + SELECT COUNT(*) FROM %s
> + WHERE issued + lifetime < %f
> + """ % (table_name, now)).get_one()[0]
> + self.failUnless
> +
> + self.runHourly()
> +
> + LaunchpadZopele
> + store = store_selector.
> + num_expired = store.execute("""
> + SELECT COUNT(*) FROM %s
> + WHERE issued + lifetime < %f
> + """ % (table_name, now)).get_one()[0]
> + self.failUnless
This test depends on all three parts completing within one second, the
granularity of your delta. While I'm having a hard time envisioning
that being a problem it does seem like a potential spot for random
failure. Perhaps if you just state the assumption in a comment that
will suffice and be a big clue if it should ever fail.
> + umerAssociation Pruner( self): OpenIDAssociati onPruner( OpenIDConsumerA ssociationPrune r) TestLoader( ).loadTestsFrom Name(__ name__)
> + def test_OpenIDCons
> + self.test_
> +
>
> def test_suite():
> return unittest.