Code review comment for lp:~stub/launchpad/garbo

Revision history for this message
Brad Crittenden (bac) wrote :

Hi Stuart,

The branch looks good. I don't have anything to add but do ask for
you to add a comment on one test -- it's probably just the case of me
being paranoid.

Also the logic in bug 348874 seems to be wrong w.r.t. '>' vs '<' but
is switched and correct in theimplementation here. For completeness
would you make a note on that bug?

> === modified file 'lib/canonical/launchpad/scripts/tests/test_garbo.py'
> --- lib/canonical/launchpad/scripts/tests/test_garbo.py 2009-03-27 11:15:14 +0000
> +++ lib/canonical/launchpad/scripts/tests/test_garbo.py 2009-04-03 07:16:43 +0000
> @@ -12,6 +12,7 @@
> from pytz import UTC
> from storm.locals import Min
> import transaction
> +from zope.component import getUtility
>
> from canonical.launchpad.database.codeimportresult import CodeImportResult
> from canonical.launchpad.database.oauth import OAuthNonce
> @@ -21,9 +22,12 @@
> CodeImportResultStatus)
> from canonical.launchpad.testing import TestCase
> from canonical.launchpad.scripts.garbo import (
> - DailyDatabaseGarbageCollector, HourlyDatabaseGarbageCollector)
> + DailyDatabaseGarbageCollector, HourlyDatabaseGarbageCollector,
> + OpenIDAssociationPruner, OpenIDConsumerAssociationPruner)
> from canonical.launchpad.scripts.tests import run_script
> from canonical.launchpad.scripts.logger import QuietFakeLogger
> +from canonical.launchpad.webapp.interfaces import (
> + IStoreSelector, MASTER_FLAVOR)
> from canonical.testing.layers import (
> DatabaseLayer, LaunchpadScriptLayer, LaunchpadZopelessLayer)
>
> @@ -57,20 +61,21 @@
> self.runDaily()
> self.runHourly()
>
> - def runDaily(self):
> - LaunchpadZopelessLayer.switchDbUser('garbo-daily')
> + def runDaily(self, maximum_chunk_size=2):
> + LaunchpadZopelessLayer.switchDbUser('garbo_daily')
> collector = DailyDatabaseGarbageCollector(test_args=[])
> + collector._maximum_chunk_size = maximum_chunk_size
> collector.logger = QuietFakeLogger()
> collector.main()
>
> - def runHourly(self):
> - LaunchpadZopelessLayer.switchDbUser('garbo-hourly')
> + def runHourly(self, maximum_chunk_size=2):
> + LaunchpadZopelessLayer.switchDbUser('garbo_hourly')
> collector = HourlyDatabaseGarbageCollector(test_args=[])
> + collector._maximum_chunk_size = maximum_chunk_size
> collector.logger = QuietFakeLogger()
> collector.main()
>
> def test_OAuthNoncePruner(self):
> - store = IMasterStore(OAuthNonce)
> now = datetime.utcnow().replace(tzinfo=UTC)
> timestamps = [
> now - timedelta(days=2), # Garbage
> @@ -79,6 +84,7 @@
> now, # Not garbage
> ]
> LaunchpadZopelessLayer.switchDbUser('testadmin')
> + store = IMasterStore(OAuthNonce)
>
> # Make sure we start with 0 nonces.
> self.failUnlessEqual(store.find(OAuthNonce).count(), 0)
> @@ -93,7 +99,9 @@
> # Make sure we have 4 nonces now.
> self.failUnlessEqual(store.find(OAuthNonce).count(), 4)
>
> - self.runHourly()
> + self.runHourly(maximum_chunk_size=60) # 1 minute maximum chunk size
> +
> + store = IMasterStore(OAuthNonce)
>
> # Now back to two, having removed the two garbage entries.
> self.failUnlessEqual(store.find(OAuthNonce).count(), 2)
> @@ -137,7 +145,9 @@
> self.failUnlessEqual(store.find(OpenIDConsumerNonce).count(), 4)
>
> # Run the garbage collector.
> - self.runHourly()
> + self.runHourly(maximum_chunk_size=60) # 1 minute maximum chunks.
> +
> + store = IMasterStore(OpenIDConsumerNonce)
>
> # We should now have 2 nonces.
> self.failUnlessEqual(store.find(OpenIDConsumerNonce).count(), 2)
> @@ -168,16 +178,19 @@
> self.runDaily()
>
> # Nothing is removed, because we always keep the 4 latest.
> + store = IMasterStore(CodeImportResult)
> self.failUnlessEqual(
> store.find(CodeImportResult).count(), 4)
>
> new_code_import_result(now - timedelta(days=31))
> self.runDaily()
> + store = IMasterStore(CodeImportResult)
> self.failUnlessEqual(
> store.find(CodeImportResult).count(), 4)
>
> new_code_import_result(now - timedelta(days=29))
> self.runDaily()
> + store = IMasterStore(CodeImportResult)
> self.failUnlessEqual(
> store.find(CodeImportResult).count(), 4)
>
> @@ -187,6 +200,40 @@
> Min(CodeImportResult.date_created)).one().replace(tzinfo=UTC)
> >= now - timedelta(days=30))
>
> + def test_OpenIDAssociationPruner(self, pruner=OpenIDAssociationPruner):
> + store_name = pruner.store_name
> + table_name = pruner.table_name
> + LaunchpadZopelessLayer.switchDbUser('testadmin')
> + store_selector = getUtility(IStoreSelector)
> + store = store_selector.get(store_name, MASTER_FLAVOR)
> + now = time.time()
> + # Create some associations in the past with lifetimes
> + for delta in range(0, 20):
> + store.execute("""
> + INSERT INTO %s (server_url, handle, issued, lifetime)
> + VALUES (%s, %s, %d, %d)
> + """ % (table_name, str(delta), str(delta), now-10, delta))
> + transaction.commit()
> +
> + num_expired = store.execute("""
> + SELECT COUNT(*) FROM %s
> + WHERE issued + lifetime < %f
> + """ % (table_name, now)).get_one()[0]
> + self.failUnless(num_expired > 0)
> +
> + self.runHourly()
> +
> + LaunchpadZopelessLayer.switchDbUser('testadmin')
> + store = store_selector.get(store_name, MASTER_FLAVOR)
> + num_expired = store.execute("""
> + SELECT COUNT(*) FROM %s
> + WHERE issued + lifetime < %f
> + """ % (table_name, now)).get_one()[0]
> + self.failUnlessEqual(num_expired, 0)

This test depends on all three parts completing within one second, the
granularity of your delta. While I'm having a hard time envisioning
that being a problem it does seem like a potential spot for random
failure. Perhaps if you just state the assumption in a comment that
will suffice and be a big clue if it should ever fail.

> +
> + def test_OpenIDConsumerAssociationPruner(self):
> + self.test_OpenIDAssociationPruner(OpenIDConsumerAssociationPruner)
> +
>
> def test_suite():
> return unittest.TestLoader().loadTestsFromName(__name__)

review: Approve (code)

« Back to merge proposal