Merge lp:~stub/launchpad/garbo into lp:launchpad/db-devel

Proposed by Stuart Bishop
Status: Merged
Approved by: Brad Crittenden
Approved revision: no longer in the source branch.
Merged at revision: not available
Proposed branch: lp:~stub/launchpad/garbo
Merge into: lp:launchpad/db-devel
Diff against target: 709 lines
To merge this branch: bzr merge lp:~stub/launchpad/garbo
Reviewer Review Type Date Requested Status
Brad Crittenden (community) code Approve
Review via email: mp+5189@code.launchpad.net
To post a comment you must log in.
Revision history for this message
Stuart Bishop (stub) wrote :

Addresses:

  Bug #351363: DBLoopTuner often cannot detect long running transactions
  Bug #348874: OpenIDAssociations need to be garbage collected

We now use a SECURITY DEFINER wrapper around pg_stat_activity to allow DBLoopTuner to query this information even when not connected as a database superuser.

The OpenIDAssociation and OpenIDConsumerAssociation tables are now garbage collected.

Scripts using the auth Store now connect as a sane database user.

When the test infrastructure is used to reset the Zopeless connection settings, ZStorm is reset and all Stores closed. After changing the connection settings, Stores need to be refetched but they will be connected using the correct database user.

Revision history for this message
Brad Crittenden (bac) wrote :
Download full text (6.5 KiB)

Hi Stuart,

The branch looks good. I don't have anything to add but do ask for
you to add a comment on one test -- it's probably just the case of me
being paranoid.

Also the logic in bug 348874 seems to be wrong w.r.t. '>' vs '<' but
is switched and correct in theimplementation here. For completeness
would you make a note on that bug?

> === modified file 'lib/canonical/launchpad/scripts/tests/test_garbo.py'
> --- lib/canonical/launchpad/scripts/tests/test_garbo.py 2009-03-27 11:15:14 +0000
> +++ lib/canonical/launchpad/scripts/tests/test_garbo.py 2009-04-03 07:16:43 +0000
> @@ -12,6 +12,7 @@
> from pytz import UTC
> from storm.locals import Min
> import transaction
> +from zope.component import getUtility
>
> from canonical.launchpad.database.codeimportresult import CodeImportResult
> from canonical.launchpad.database.oauth import OAuthNonce
> @@ -21,9 +22,12 @@
> CodeImportResultStatus)
> from canonical.launchpad.testing import TestCase
> from canonical.launchpad.scripts.garbo import (
> - DailyDatabaseGarbageCollector, HourlyDatabaseGarbageCollector)
> + DailyDatabaseGarbageCollector, HourlyDatabaseGarbageCollector,
> + OpenIDAssociationPruner, OpenIDConsumerAssociationPruner)
> from canonical.launchpad.scripts.tests import run_script
> from canonical.launchpad.scripts.logger import QuietFakeLogger
> +from canonical.launchpad.webapp.interfaces import (
> + IStoreSelector, MASTER_FLAVOR)
> from canonical.testing.layers import (
> DatabaseLayer, LaunchpadScriptLayer, LaunchpadZopelessLayer)
>
> @@ -57,20 +61,21 @@
> self.runDaily()
> self.runHourly()
>
> - def runDaily(self):
> - LaunchpadZopelessLayer.switchDbUser('garbo-daily')
> + def runDaily(self, maximum_chunk_size=2):
> + LaunchpadZopelessLayer.switchDbUser('garbo_daily')
> collector = DailyDatabaseGarbageCollector(test_args=[])
> + collector._maximum_chunk_size = maximum_chunk_size
> collector.logger = QuietFakeLogger()
> collector.main()
>
> - def runHourly(self):
> - LaunchpadZopelessLayer.switchDbUser('garbo-hourly')
> + def runHourly(self, maximum_chunk_size=2):
> + LaunchpadZopelessLayer.switchDbUser('garbo_hourly')
> collector = HourlyDatabaseGarbageCollector(test_args=[])
> + collector._maximum_chunk_size = maximum_chunk_size
> collector.logger = QuietFakeLogger()
> collector.main()
>
> def test_OAuthNoncePruner(self):
> - store = IMasterStore(OAuthNonce)
> now = datetime.utcnow().replace(tzinfo=UTC)
> timestamps = [
> now - timedelta(days=2), # Garbage
> @@ -79,6 +84,7 @@
> now, # Not garbage
> ]
> LaunchpadZopelessLayer.switchDbUser('testadmin')
> + store = IMasterStore(OAuthNonce)
>
> # Make sure we start with 0 nonces.
> self.failUnlessEqual(store.find(OAuthNonce).count(), 0)
> @@ -93,7 +99,9 @@
> # Make sure we have 4 nonces now.
> self.failUnlessEqual(store.find(OAuthNonce).count(), 4)
>
> - self.runHourly()
> + self.runHourly(maximum_chunk_size=60) # 1 minute maxi...

Read more...

review: Approve (code)
Revision history for this message
Brad Crittenden (bac) wrote :
Download full text (6.5 KiB)

Hi Stuart,

The branch looks good. I don't have anything to add but do ask for
you to add a comment on one test -- it's probably just the case of me
being paranoid.

Also the logic in bug 348874 seems to be wrong w.r.t. '>' vs '<' but
is switched and correct in theimplementation here. For completeness
would you make a note on that bug?

> === modified file 'lib/canonical/launchpad/scripts/tests/test_garbo.py'
> --- lib/canonical/launchpad/scripts/tests/test_garbo.py 2009-03-27 11:15:14 +0000
> +++ lib/canonical/launchpad/scripts/tests/test_garbo.py 2009-04-03 07:16:43 +0000
> @@ -12,6 +12,7 @@
> from pytz import UTC
> from storm.locals import Min
> import transaction
> +from zope.component import getUtility
>
> from canonical.launchpad.database.codeimportresult import CodeImportResult
> from canonical.launchpad.database.oauth import OAuthNonce
> @@ -21,9 +22,12 @@
> CodeImportResultStatus)
> from canonical.launchpad.testing import TestCase
> from canonical.launchpad.scripts.garbo import (
> - DailyDatabaseGarbageCollector, HourlyDatabaseGarbageCollector)
> + DailyDatabaseGarbageCollector, HourlyDatabaseGarbageCollector,
> + OpenIDAssociationPruner, OpenIDConsumerAssociationPruner)
> from canonical.launchpad.scripts.tests import run_script
> from canonical.launchpad.scripts.logger import QuietFakeLogger
> +from canonical.launchpad.webapp.interfaces import (
> + IStoreSelector, MASTER_FLAVOR)
> from canonical.testing.layers import (
> DatabaseLayer, LaunchpadScriptLayer, LaunchpadZopelessLayer)
>
> @@ -57,20 +61,21 @@
> self.runDaily()
> self.runHourly()
>
> - def runDaily(self):
> - LaunchpadZopelessLayer.switchDbUser('garbo-daily')
> + def runDaily(self, maximum_chunk_size=2):
> + LaunchpadZopelessLayer.switchDbUser('garbo_daily')
> collector = DailyDatabaseGarbageCollector(test_args=[])
> + collector._maximum_chunk_size = maximum_chunk_size
> collector.logger = QuietFakeLogger()
> collector.main()
>
> - def runHourly(self):
> - LaunchpadZopelessLayer.switchDbUser('garbo-hourly')
> + def runHourly(self, maximum_chunk_size=2):
> + LaunchpadZopelessLayer.switchDbUser('garbo_hourly')
> collector = HourlyDatabaseGarbageCollector(test_args=[])
> + collector._maximum_chunk_size = maximum_chunk_size
> collector.logger = QuietFakeLogger()
> collector.main()
>
> def test_OAuthNoncePruner(self):
> - store = IMasterStore(OAuthNonce)
> now = datetime.utcnow().replace(tzinfo=UTC)
> timestamps = [
> now - timedelta(days=2), # Garbage
> @@ -79,6 +84,7 @@
> now, # Not garbage
> ]
> LaunchpadZopelessLayer.switchDbUser('testadmin')
> + store = IMasterStore(OAuthNonce)
>
> # Make sure we start with 0 nonces.
> self.failUnlessEqual(store.find(OAuthNonce).count(), 0)
> @@ -93,7 +99,9 @@
> # Make sure we have 4 nonces now.
> self.failUnlessEqual(store.find(OAuthNonce).count(), 4)
>
> - self.runHourly()
> + self.runHourly(maximum_chunk_size=60) # 1 minute maxi...

Read more...

review: Approve (code)
Revision history for this message
Stuart Bishop (stub) wrote :

On Tue, Apr 7, 2009 at 8:11 PM, Brad Crittenden <email address hidden> wrote:

>> +    def test_OpenIDAssociationPruner(self, pruner=OpenIDAssociationPruner):
>> +        store_name = pruner.store_name
>> +        table_name = pruner.table_name
>> +        LaunchpadZopelessLayer.switchDbUser('testadmin')
>> +        store_selector = getUtility(IStoreSelector)
>> +        store = store_selector.get(store_name, MASTER_FLAVOR)
>> +        now = time.time()
>> +        # Create some associations in the past with lifetimes
>> +        for delta in range(0, 20):
>> +            store.execute("""
>> +                INSERT INTO %s (server_url, handle, issued, lifetime)
>> +                VALUES (%s, %s, %d, %d)
>> +                """ % (table_name, str(delta), str(delta), now-10, delta))
>> +        transaction.commit()
>> +
>> +        num_expired = store.execute("""
>> +            SELECT COUNT(*) FROM %s
>> +            WHERE issued + lifetime < %f
>> +            """ % (table_name, now)).get_one()[0]
>> +        self.failUnless(num_expired > 0)
>> +
>> +        self.runHourly()
>> +
>> +        LaunchpadZopelessLayer.switchDbUser('testadmin')
>> +        store = store_selector.get(store_name, MASTER_FLAVOR)
>> +        num_expired = store.execute("""
>> +            SELECT COUNT(*) FROM %s
>> +            WHERE issued + lifetime < %f
>> +            """ % (table_name, now)).get_one()[0]
>> +        self.failUnlessEqual(num_expired, 0)
>
> This test depends on all three parts completing within one second, the
> granularity of your delta.  While I'm having a hard time envisioning
> that being a problem it does seem like a potential spot for random
> failure.  Perhaps if you just state the assumption in a comment that
> will suffice and be a big clue if it should ever fail.

I thought I'd fixed the logic to stop that already. I have a
consistent 'now' at the top of the test. The first test checks that
there is at least one item expired at time 'now'. The second check
tests that there are 0 items expired at time 'now'. The only change I
see if things pause or run slow is self.runHourly(), and this just
means more items might get expired. The final check doesn't care,
because it is just ensuring that all the items at the test start time
where expired and it couldn't care less if more where expired.

So if my logic is correct (it is 4:30am), the problem is that I'm not
checking that items that should not have been expired have not been
expired.

--
Stuart Bishop <email address hidden>
http://www.stuartbishop.net/

Revision history for this message
Stuart Bishop (stub) wrote :

On Wed, Apr 8, 2009 at 4:42 AM, Stuart Bishop <email address hidden> wrote:

> I thought I'd fixed the logic to stop that already. I have a
> consistent 'now' at the top of the test. The first test checks that
> there is at least one item expired at time 'now'. The second check
> tests that there are 0 items expired at time 'now'.  The only change I
> see if things pause or run slow is self.runHourly(), and this just
> means more items might get expired. The final check doesn't care,
> because it is just ensuring that all the items at the test start time
> where expired and it couldn't care less if more where expired.
>
> So if my logic is correct (it is 4:30am), the problem is that I'm not
> checking that items that should not have been expired have not been
> expired.

I've commented this test better and added the extra 'make sure we didn't just trash everything' check.

--
Stuart Bishop <email address hidden>
http://www.stuartbishop.net/

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
=== modified file 'database/schema/security.cfg'
--- database/schema/security.cfg 2009-05-08 02:33:14 +0000
+++ database/schema/security.cfg 2009-05-10 03:41:25 +0000
@@ -11,6 +11,7 @@
11[public]11[public]
12# The public role is automatically granted to all users by PostgreSQL12# The public role is automatically granted to all users by PostgreSQL
13type=group13type=group
14public.activity() = EXECUTE
14public.person_sort_key(text, text) = EXECUTE15public.person_sort_key(text, text) = EXECUTE
15public.debversion_sort_key(text) = EXECUTE16public.debversion_sort_key(text) = EXECUTE
16public.null_count(anyarray) = EXECUTE17public.null_count(anyarray) = EXECUTE
@@ -98,6 +99,9 @@
98public.personlanguage = SELECT99public.personlanguage = SELECT
99public.teammembership = SELECT100public.teammembership = SELECT
100public.teamparticipation = SELECT101public.teamparticipation = SELECT
102# XXX 2009-05-07 stub bug=373252: SELECT and DELETE permissions required
103# for garbo.py. INSERT permission needed for the tests.
104public.openidassociation = SELECT, INSERT, DELETE
101105
102[launchpad_main]106[launchpad_main]
103# lpmain replication set access from the main Z3 application.107# lpmain replication set access from the main Z3 application.
@@ -1613,7 +1617,7 @@
1613public.libraryfiledownloadcount = SELECT, INSERT, UPDATE, DELETE1617public.libraryfiledownloadcount = SELECT, INSERT, UPDATE, DELETE
16141618
1615[garbo]1619[garbo]
1616# garbo-hourly and garbo-daily script permissions. We define the1620# garbo_hourly and garbo_daily script permissions. We define the
1617# permissions here in this group instead of in the users, so tasks can1621# permissions here in this group instead of in the users, so tasks can
1618# be shuffled around between the daily and hourly sections without1622# be shuffled around between the daily and hourly sections without
1619# changing DB permissions.1623# changing DB permissions.
@@ -1621,6 +1625,8 @@
1621groups=script1625groups=script
1622public.codeimportresult = SELECT, DELETE1626public.codeimportresult = SELECT, DELETE
1623public.oauthnonce = SELECT, DELETE1627public.oauthnonce = SELECT, DELETE
1628public.openidassociation = SELECT, DELETE
1629public.openidconsumerassociation = SELECT, DELETE
1624public.openidconsumernonce = SELECT, DELETE1630public.openidconsumernonce = SELECT, DELETE
1625public.revisioncache = SELECT, DELETE1631public.revisioncache = SELECT, DELETE
1626public.person = SELECT1632public.person = SELECT
@@ -1628,11 +1634,11 @@
1628public.hwsubmission = SELECT, UPDATE1634public.hwsubmission = SELECT, UPDATE
1629public.mailinglistsubscription = SELECT, DELETE1635public.mailinglistsubscription = SELECT, DELETE
16301636
1631[garbo-daily]1637[garbo_daily]
1632type=user1638type=user
1633groups=garbo1639groups=garbo
16341640
1635[garbo-hourly]1641[garbo_hourly]
1636type=user1642type=user
1637groups=garbo1643groups=garbo
16381644
16391645
=== modified file 'database/schema/trusted.sql'
--- database/schema/trusted.sql 2009-04-24 14:43:14 +0000
+++ database/schema/trusted.sql 2009-05-10 03:41:25 +0000
@@ -48,10 +48,52 @@
4848
49COMMENT ON FUNCTION null_count(anyarray) IS 'Return the number of NULLs in the first row of the given array.';49COMMENT ON FUNCTION null_count(anyarray) IS 'Return the number of NULLs in the first row of the given array.';
5050
51
52CREATE OR REPLACE FUNCTION replication_lag() RETURNS interval
53LANGUAGE plpgsql STABLE SECURITY DEFINER AS
54$$
55 DECLARE
56 v_lag interval;
57 BEGIN
58 SELECT INTO v_lag max(st_lag_time) FROM _sl.sl_status;
59 RETURN v_lag;
60 -- Slony-I not installed here - non-replicated setup.
61 EXCEPTION
62 WHEN invalid_schema_name THEN
63 RETURN NULL;
64 WHEN undefined_table THEN
65 RETURN NULL;
66 END;
67$$;
68
69COMMENT ON FUNCTION replication_lag() IS
70'Returns the worst lag time known to this node in our cluster, or NULL if not a replicated installation.';
71
72
73CREATE OR REPLACE FUNCTION activity()
74RETURNS SETOF pg_catalog.pg_stat_activity
75LANGUAGE SQL VOLATILE SECURITY DEFINER AS
76$$
77 SELECT
78 datid, datname, procpid, usesysid, usename,
79 CASE
80 WHEN current_query LIKE '<IDLE>%'
81 THEN current_query
82 ELSE
83 NULL
84 END AS current_query,
85 waiting, xact_start, query_start,
86 backend_start, client_addr, client_port
87 FROM pg_catalog.pg_stat_activity;
88$$;
89
90COMMENT ON FUNCTION activity() IS
91'SECURITY DEFINER wrapper around pg_stat_activity allowing unprivileged users to access most of its information.';
92
93
51/* This is created as a function so the same definition can be used with94/* This is created as a function so the same definition can be used with
52 many tables95 many tables
53*/96*/
54
55CREATE OR REPLACE FUNCTION valid_name(text) RETURNS boolean97CREATE OR REPLACE FUNCTION valid_name(text) RETURNS boolean
56LANGUAGE plpythonu IMMUTABLE RETURNS NULL ON NULL INPUT AS98LANGUAGE plpythonu IMMUTABLE RETURNS NULL ON NULL INPUT AS
57$$99$$
@@ -858,27 +900,6 @@
858'AFTER UPDATE trigger on BugAffectsPerson maintaining the Bug.users_affected_count column';900'AFTER UPDATE trigger on BugAffectsPerson maintaining the Bug.users_affected_count column';
859901
860902
861CREATE OR REPLACE FUNCTION replication_lag() RETURNS interval
862LANGUAGE plpgsql STABLE SECURITY DEFINER AS
863$$
864 DECLARE
865 v_lag interval;
866 BEGIN
867 SELECT INTO v_lag max(st_lag_time) FROM _sl.sl_status;
868 RETURN v_lag;
869 -- Slony-I not installed here - non-replicated setup.
870 EXCEPTION
871 WHEN invalid_schema_name THEN
872 RETURN NULL;
873 WHEN undefined_table THEN
874 RETURN NULL;
875 END;
876$$;
877
878COMMENT ON FUNCTION replication_lag() IS
879'Returns the worst lag time known to this node in our cluster, or NULL if not a replicated installation.';
880
881
882CREATE OR REPLACE FUNCTION set_bugtask_date_milestone_set() RETURNS TRIGGER903CREATE OR REPLACE FUNCTION set_bugtask_date_milestone_set() RETURNS TRIGGER
883LANGUAGE plpgsql AS904LANGUAGE plpgsql AS
884$$905$$
885906
=== modified file 'lib/canonical/database/sqlbase.py'
--- lib/canonical/database/sqlbase.py 2009-04-17 10:32:16 +0000
+++ lib/canonical/database/sqlbase.py 2009-05-10 03:41:25 +0000
@@ -5,6 +5,7 @@
5import warnings5import warnings
6from datetime import datetime6from datetime import datetime
7import re7import re
8from textwrap import dedent
89
9import psycopg210import psycopg2
10from psycopg2.extensions import (11from psycopg2.extensions import (
@@ -293,15 +294,27 @@
293 if dbuser is None:294 if dbuser is None:
294 dbuser = config.launchpad.dbuser295 dbuser = config.launchpad.dbuser
295296
296 # Construct a config fragment:297 isolation_level = {
297 overlay = '[database]\n'
298 overlay += 'main_master: %s\n' % connection_string
299 overlay += 'isolation_level: %s\n' % {
300 ISOLATION_LEVEL_AUTOCOMMIT: 'autocommit',298 ISOLATION_LEVEL_AUTOCOMMIT: 'autocommit',
301 ISOLATION_LEVEL_READ_COMMITTED: 'read_committed',299 ISOLATION_LEVEL_READ_COMMITTED: 'read_committed',
302 ISOLATION_LEVEL_SERIALIZABLE: 'serializable'}[isolation]300 ISOLATION_LEVEL_SERIALIZABLE: 'serializable'}[isolation]
301
302 # Construct a config fragment:
303 overlay = dedent("""\
304 [database]
305 main_master: %(connection_string)s
306 auth_master: %(connection_string)s
307 isolation_level: %(isolation_level)s
308 """ % vars())
309
303 if dbuser:310 if dbuser:
304 overlay += '\n[launchpad]\ndbuser: %s\n' % dbuser311 # XXX 2009-05-07 stub bug=373252: Scripts should not be connecting
312 # as the launchpad_auth database user.
313 overlay += dedent("""\
314 [launchpad]
315 dbuser: %(dbuser)s
316 auth_dbuser: launchpad_auth
317 """ % vars())
305318
306 if cls._installed is not None:319 if cls._installed is not None:
307 if cls._config_overlay != overlay:320 if cls._config_overlay != overlay:
308321
=== modified file 'lib/canonical/launchpad/doc/hwdb.txt'
--- lib/canonical/launchpad/doc/hwdb.txt 2009-05-07 02:02:51 +0000
+++ lib/canonical/launchpad/doc/hwdb.txt 2009-05-10 03:41:25 +0000
@@ -371,7 +371,8 @@
371 >>> user.validateAndEnsurePreferredEmail(email)371 >>> user.validateAndEnsurePreferredEmail(email)
372 >>> transaction.commit()372 >>> transaction.commit()
373 >>> from canonical.launchpad.scripts.garbo import HWSubmissionEmailLinker373 >>> from canonical.launchpad.scripts.garbo import HWSubmissionEmailLinker
374 >>> HWSubmissionEmailLinker().run()374 >>> from canonical.launchpad.ftests.logger import MockLogger
375 >>> HWSubmissionEmailLinker(log=MockLogger()).run()
375 >>> submission = hw_submission_set.getBySubmissionKey(u'unique-id-2')376 >>> submission = hw_submission_set.getBySubmissionKey(u'unique-id-2')
376 >>> print submission.owner.displayname377 >>> print submission.owner.displayname
377 Beeblebrox378 Beeblebrox
@@ -405,7 +406,7 @@
405 >>> login_person(user)406 >>> login_person(user)
406 >>> user.validateAndEnsurePreferredEmail(email)407 >>> user.validateAndEnsurePreferredEmail(email)
407 >>> transaction.commit()408 >>> transaction.commit()
408 >>> HWSubmissionEmailLinker().run()409 >>> HWSubmissionEmailLinker(log=MockLogger()).run()
409 >>> submission = hw_submission_set.getBySubmissionKey(u'unique-id-2')410 >>> submission = hw_submission_set.getBySubmissionKey(u'unique-id-2')
410 >>> print submission.owner.displayname411 >>> print submission.owner.displayname
411 Beeblebrox412 Beeblebrox
412413
=== modified file 'lib/canonical/launchpad/doc/script-monitoring.txt'
--- lib/canonical/launchpad/doc/script-monitoring.txt 2009-04-17 10:32:16 +0000
+++ lib/canonical/launchpad/doc/script-monitoring.txt 2009-05-10 03:41:25 +0000
@@ -28,7 +28,7 @@
28 >>> from canonical.testing.layers import LaunchpadZopelessLayer28 >>> from canonical.testing.layers import LaunchpadZopelessLayer
2929
30 >>> UTC = pytz.timezone('UTC')30 >>> UTC = pytz.timezone('UTC')
31 >>> LaunchpadZopelessLayer.switchDbUser('garbo-daily') # A script db user31 >>> LaunchpadZopelessLayer.switchDbUser('garbo_daily') # A script db user
3232
33 >>> activity = getUtility(IScriptActivitySet).recordSuccess(33 >>> activity = getUtility(IScriptActivitySet).recordSuccess(
34 ... name='script-name',34 ... name='script-name',
@@ -89,7 +89,7 @@
89 ... raise RuntimeError('Some failure')89 ... raise RuntimeError('Some failure')
90 ...90 ...
91 ... if __name__ == '__main__':91 ... if __name__ == '__main__':
92 ... script = TestScript('test-script', 'garbo-daily')92 ... script = TestScript('test-script', 'garbo_daily')
93 ... script.run()93 ... script.run()
94 ... """)94 ... """)
95 >>> script_file.flush()95 >>> script_file.flush()
9696
=== modified file 'lib/canonical/launchpad/scripts/garbo.py'
--- lib/canonical/launchpad/scripts/garbo.py 2009-05-08 02:33:14 +0000
+++ lib/canonical/launchpad/scripts/garbo.py 2009-05-10 03:41:25 +0000
@@ -23,16 +23,13 @@
23from canonical.launchpad.interfaces.emailaddress import EmailAddressStatus23from canonical.launchpad.interfaces.emailaddress import EmailAddressStatus
24from canonical.launchpad.interfaces.looptuner import ITunableLoop24from canonical.launchpad.interfaces.looptuner import ITunableLoop
25from canonical.launchpad.scripts.base import LaunchpadCronScript25from canonical.launchpad.scripts.base import LaunchpadCronScript
26from canonical.launchpad.utilities.looptuner import LoopTuner26from canonical.launchpad.utilities.looptuner import DBLoopTuner
27from canonical.launchpad.webapp.interfaces import (27from canonical.launchpad.webapp.interfaces import (
28 IStoreSelector, MAIN_STORE, MASTER_FLAVOR)28 IStoreSelector, AUTH_STORE, MAIN_STORE, MASTER_FLAVOR)
29from lp.code.model.codeimportresult import CodeImportResult29from lp.code.model.codeimportresult import CodeImportResult
30from lp.code.model.revision import RevisionAuthor30from lp.code.model.revision import RevisionAuthor, RevisionCache
31from lp.registry.model.mailinglist import MailingListSubscription31from lp.registry.model.mailinglist import MailingListSubscription
3232
33from lp.code.model.codeimportresult import CodeImportResult
34from lp.code.model.revision import RevisionCache
35
3633
37ONE_DAY_IN_SECONDS = 24*60*6034ONE_DAY_IN_SECONDS = 24*60*60
3835
@@ -45,9 +42,12 @@
45 maximum_chunk_size = None # Override42 maximum_chunk_size = None # Override
46 cooldown_time = 043 cooldown_time = 0
4744
45 def __init__(self, log):
46 self.log = log
47
48 def run(self):48 def run(self):
49 assert self.maximum_chunk_size is not None, "Did not override."49 assert self.maximum_chunk_size is not None, "Did not override."
50 LoopTuner(50 DBLoopTuner(
51 self, self.goal_seconds,51 self, self.goal_seconds,
52 minimum_chunk_size = self.minimum_chunk_size,52 minimum_chunk_size = self.minimum_chunk_size,
53 maximum_chunk_size = self.maximum_chunk_size,53 maximum_chunk_size = self.maximum_chunk_size,
@@ -61,7 +61,8 @@
61 """61 """
62 maximum_chunk_size = 6*60*60 # 6 hours in seconds.62 maximum_chunk_size = 6*60*60 # 6 hours in seconds.
6363
64 def __init__(self):64 def __init__(self, log):
65 super(OAuthNoncePruner, self).__init__(log)
65 self.store = IMasterStore(OAuthNonce)66 self.store = IMasterStore(OAuthNonce)
66 self.oldest_age = self.store.execute("""67 self.oldest_age = self.store.execute("""
67 SELECT COALESCE(EXTRACT(EPOCH FROM68 SELECT COALESCE(EXTRACT(EPOCH FROM
@@ -77,6 +78,10 @@
77 self.oldest_age = max(78 self.oldest_age = max(
78 ONE_DAY_IN_SECONDS, self.oldest_age - chunk_size)79 ONE_DAY_IN_SECONDS, self.oldest_age - chunk_size)
7980
81 self.log.debug(
82 "Removed OAuthNonce rows older than %d seconds"
83 % self.oldest_age)
84
80 self.store.find(85 self.store.find(
81 OAuthNonce,86 OAuthNonce,
82 OAuthNonce.request_timestamp < SQL(87 OAuthNonce.request_timestamp < SQL(
@@ -92,7 +97,8 @@
92 """97 """
93 maximum_chunk_size = 6*60*60 # 6 hours in seconds.98 maximum_chunk_size = 6*60*60 # 6 hours in seconds.
9499
95 def __init__(self):100 def __init__(self, log):
101 super(OpenIDConsumerNoncePruner, self).__init__(log)
96 self.store = getUtility(IStoreSelector).get(MAIN_STORE, MASTER_FLAVOR)102 self.store = getUtility(IStoreSelector).get(MAIN_STORE, MASTER_FLAVOR)
97 self.earliest_timestamp = self.store.find(103 self.earliest_timestamp = self.store.find(
98 Min(OpenIDConsumerNonce.timestamp)).one()104 Min(OpenIDConsumerNonce.timestamp)).one()
@@ -109,12 +115,52 @@
109 self.earliest_wanted_timestamp,115 self.earliest_wanted_timestamp,
110 self.earliest_timestamp + chunk_size)116 self.earliest_timestamp + chunk_size)
111117
118 self.log.debug(
119 "Removing OpenIDConsumerNonce rows older than %s"
120 % self.earliest_timestamp)
121
112 self.store.find(122 self.store.find(
113 OpenIDConsumerNonce,123 OpenIDConsumerNonce,
114 OpenIDConsumerNonce.timestamp < self.earliest_timestamp).remove()124 OpenIDConsumerNonce.timestamp < self.earliest_timestamp).remove()
115 transaction.commit()125 transaction.commit()
116126
117127
128class OpenIDAssociationPruner(TunableLoop):
129 minimum_chunk_size = 3500
130 maximum_chunk_size = 50000
131
132 table_name = 'OpenIDAssociation'
133 store_name = AUTH_STORE
134
135 _num_removed = None
136
137 def __init__(self, log):
138 super(OpenIDAssociationPruner, self).__init__(log)
139 self.store = getUtility(IStoreSelector).get(
140 self.store_name, MASTER_FLAVOR)
141
142 def __call__(self, chunksize):
143 result = self.store.execute("""
144 DELETE FROM %s
145 WHERE (server_url, handle) IN (
146 SELECT server_url, handle FROM %s
147 WHERE issued + lifetime <
148 EXTRACT(EPOCH FROM CURRENT_TIMESTAMP)
149 LIMIT %d
150 )
151 """ % (self.table_name, self.table_name, int(chunksize)))
152 self._num_removed = result._raw_cursor.rowcount
153 transaction.commit()
154
155 def isDone(self):
156 return self._num_removed == 0
157
158
159class OpenIDConsumerAssociationPruner(OpenIDAssociationPruner):
160 table_name = 'OpenIDConsumerAssociation'
161 store_name = MAIN_STORE
162
163
118class RevisionCachePruner(TunableLoop):164class RevisionCachePruner(TunableLoop):
119 """A tunable loop to remove old revisions from the cache."""165 """A tunable loop to remove old revisions from the cache."""
120166
@@ -141,8 +187,9 @@
141 and they are not one of the 4 most recent results for that187 and they are not one of the 4 most recent results for that
142 CodeImport.188 CodeImport.
143 """189 """
144 maximum_chunk_size = 100190 maximum_chunk_size = 1000
145 def __init__(self):191 def __init__(self, log):
192 super(CodeImportResultPruner, self).__init__(log)
146 self.store = IMasterStore(CodeImportResult)193 self.store = IMasterStore(CodeImportResult)
147194
148 self.min_code_import = self.store.find(195 self.min_code_import = self.store.find(
@@ -158,6 +205,11 @@
158 or self.next_code_import_id > self.max_code_import)205 or self.next_code_import_id > self.max_code_import)
159206
160 def __call__(self, chunk_size):207 def __call__(self, chunk_size):
208 self.log.debug(
209 "Removing expired CodeImportResults for CodeImports %d -> %d" % (
210 self.next_code_import_id,
211 self.next_code_import_id + chunk_size - 1))
212
161 self.store.execute("""213 self.store.execute("""
162 DELETE FROM CodeImportResult214 DELETE FROM CodeImportResult
163 WHERE215 WHERE
@@ -193,7 +245,8 @@
193245
194 maximum_chunk_size = 1000246 maximum_chunk_size = 1000
195247
196 def __init__(self):248 def __init__(self, log):
249 super(RevisionAuthorEmailLinker, self).__init__(log)
197 self.author_store = IMasterStore(RevisionAuthor)250 self.author_store = IMasterStore(RevisionAuthor)
198 self.email_store = IMasterStore(EmailAddress)251 self.email_store = IMasterStore(EmailAddress)
199252
@@ -252,7 +305,8 @@
252305
253 maximum_chunk_size = 1000306 maximum_chunk_size = 1000
254307
255 def __init__(self):308 def __init__(self, log):
309 super(HWSubmissionEmailLinker, self).__init__(log)
256 self.submission_store = IMasterStore(HWSubmission)310 self.submission_store = IMasterStore(HWSubmission)
257 self.email_store = IMasterStore(EmailAddress)311 self.email_store = IMasterStore(EmailAddress)
258312
@@ -311,7 +365,8 @@
311365
312 maximum_chunk_size = 1000366 maximum_chunk_size = 1000
313367
314 def __init__(self):368 def __init__(self, log):
369 super(MailingListSubscriptionPruner, self).__init__(log)
315 self.subscription_store = IMasterStore(MailingListSubscription)370 self.subscription_store = IMasterStore(MailingListSubscription)
316 self.email_store = IMasterStore(EmailAddress)371 self.email_store = IMasterStore(EmailAddress)
317372
@@ -354,14 +409,24 @@
354 script_name = None # Script name for locking and database user. Override.409 script_name = None # Script name for locking and database user. Override.
355 tunable_loops = None # Collection of TunableLoops. Override.410 tunable_loops = None # Collection of TunableLoops. Override.
356411
412 # _maximum_chunk_size is used to override the defined
413 # maximum_chunk_size to allow our tests to ensure multiple calls to
414 # __call__ are required without creating huge amounts of test data.
415 _maximum_chunk_size = None
416
357 def __init__(self, test_args=None):417 def __init__(self, test_args=None):
358 super(BaseDatabaseGarbageCollector, self).__init__(418 super(BaseDatabaseGarbageCollector, self).__init__(
359 self.script_name, dbuser=self.script_name, test_args=test_args)419 self.script_name,
420 dbuser=self.script_name.replace('-','_'),
421 test_args=test_args)
360422
361 def main(self):423 def main(self):
362 for tunable_loop in self.tunable_loops:424 for tunable_loop in self.tunable_loops:
363 self.logger.info("Running %s" % tunable_loop.__name__)425 self.logger.info("Running %s" % tunable_loop.__name__)
364 tunable_loop().run()426 tunable_loop = tunable_loop(log=self.logger)
427 if self._maximum_chunk_size is not None:
428 tunable_loop.maximum_chunk_size = self._maximum_chunk_size
429 tunable_loop.run()
365430
366431
367class HourlyDatabaseGarbageCollector(BaseDatabaseGarbageCollector):432class HourlyDatabaseGarbageCollector(BaseDatabaseGarbageCollector):
@@ -369,9 +434,12 @@
369 tunable_loops = [434 tunable_loops = [
370 OAuthNoncePruner,435 OAuthNoncePruner,
371 OpenIDConsumerNoncePruner,436 OpenIDConsumerNoncePruner,
437 OpenIDAssociationPruner,
438 OpenIDConsumerAssociationPruner,
372 RevisionCachePruner,439 RevisionCachePruner,
373 ]440 ]
374441
442
375class DailyDatabaseGarbageCollector(BaseDatabaseGarbageCollector):443class DailyDatabaseGarbageCollector(BaseDatabaseGarbageCollector):
376 script_name = 'garbo-daily'444 script_name = 'garbo-daily'
377 tunable_loops = [445 tunable_loops = [
378446
=== modified file 'lib/canonical/launchpad/scripts/tests/test_garbo.py'
--- lib/canonical/launchpad/scripts/tests/test_garbo.py 2009-05-04 09:16:25 +0000
+++ lib/canonical/launchpad/scripts/tests/test_garbo.py 2009-05-10 03:41:25 +0000
@@ -13,6 +13,7 @@
13from storm.expr import Min13from storm.expr import Min
14from storm.store import Store14from storm.store import Store
15import transaction15import transaction
16from zope.component import getUtility
1617
17from lp.code.model.codeimportresult import CodeImportResult18from lp.code.model.codeimportresult import CodeImportResult
18from canonical.launchpad.database.oauth import OAuthNonce19from canonical.launchpad.database.oauth import OAuthNonce
@@ -22,9 +23,12 @@
22from lp.code.interfaces.codeimportresult import CodeImportResultStatus23from lp.code.interfaces.codeimportresult import CodeImportResultStatus
23from canonical.launchpad.testing import TestCase, TestCaseWithFactory24from canonical.launchpad.testing import TestCase, TestCaseWithFactory
24from canonical.launchpad.scripts.garbo import (25from canonical.launchpad.scripts.garbo import (
25 DailyDatabaseGarbageCollector, HourlyDatabaseGarbageCollector)26 DailyDatabaseGarbageCollector, HourlyDatabaseGarbageCollector,
27 OpenIDAssociationPruner, OpenIDConsumerAssociationPruner)
26from canonical.launchpad.scripts.tests import run_script28from canonical.launchpad.scripts.tests import run_script
27from canonical.launchpad.scripts.logger import QuietFakeLogger29from canonical.launchpad.scripts.logger import QuietFakeLogger
30from canonical.launchpad.webapp.interfaces import (
31 IStoreSelector, MASTER_FLAVOR)
28from canonical.testing.layers import (32from canonical.testing.layers import (
29 DatabaseLayer, LaunchpadScriptLayer, LaunchpadZopelessLayer)33 DatabaseLayer, LaunchpadScriptLayer, LaunchpadZopelessLayer)
30from lp.registry.interfaces.person import PersonCreationRationale34from lp.registry.interfaces.person import PersonCreationRationale
@@ -59,20 +63,21 @@
59 self.runDaily()63 self.runDaily()
60 self.runHourly()64 self.runHourly()
6165
62 def runDaily(self):66 def runDaily(self, maximum_chunk_size=2):
63 LaunchpadZopelessLayer.switchDbUser('garbo-daily')67 LaunchpadZopelessLayer.switchDbUser('garbo_daily')
64 collector = DailyDatabaseGarbageCollector(test_args=[])68 collector = DailyDatabaseGarbageCollector(test_args=[])
69 collector._maximum_chunk_size = maximum_chunk_size
65 collector.logger = QuietFakeLogger()70 collector.logger = QuietFakeLogger()
66 collector.main()71 collector.main()
6772
68 def runHourly(self):73 def runHourly(self, maximum_chunk_size=2):
69 LaunchpadZopelessLayer.switchDbUser('garbo-hourly')74 LaunchpadZopelessLayer.switchDbUser('garbo_hourly')
70 collector = HourlyDatabaseGarbageCollector(test_args=[])75 collector = HourlyDatabaseGarbageCollector(test_args=[])
76 collector._maximum_chunk_size = maximum_chunk_size
71 collector.logger = QuietFakeLogger()77 collector.logger = QuietFakeLogger()
72 collector.main()78 collector.main()
7379
74 def test_OAuthNoncePruner(self):80 def test_OAuthNoncePruner(self):
75 store = IMasterStore(OAuthNonce)
76 now = datetime.utcnow().replace(tzinfo=UTC)81 now = datetime.utcnow().replace(tzinfo=UTC)
77 timestamps = [82 timestamps = [
78 now - timedelta(days=2), # Garbage83 now - timedelta(days=2), # Garbage
@@ -81,6 +86,7 @@
81 now, # Not garbage86 now, # Not garbage
82 ]87 ]
83 LaunchpadZopelessLayer.switchDbUser('testadmin')88 LaunchpadZopelessLayer.switchDbUser('testadmin')
89 store = IMasterStore(OAuthNonce)
8490
85 # Make sure we start with 0 nonces.91 # Make sure we start with 0 nonces.
86 self.failUnlessEqual(store.find(OAuthNonce).count(), 0)92 self.failUnlessEqual(store.find(OAuthNonce).count(), 0)
@@ -95,7 +101,9 @@
95 # Make sure we have 4 nonces now.101 # Make sure we have 4 nonces now.
96 self.failUnlessEqual(store.find(OAuthNonce).count(), 4)102 self.failUnlessEqual(store.find(OAuthNonce).count(), 4)
97103
98 self.runHourly()104 self.runHourly(maximum_chunk_size=60) # 1 minute maximum chunk size
105
106 store = IMasterStore(OAuthNonce)
99107
100 # Now back to two, having removed the two garbage entries.108 # Now back to two, having removed the two garbage entries.
101 self.failUnlessEqual(store.find(OAuthNonce).count(), 2)109 self.failUnlessEqual(store.find(OAuthNonce).count(), 2)
@@ -139,7 +147,9 @@
139 self.failUnlessEqual(store.find(OpenIDConsumerNonce).count(), 4)147 self.failUnlessEqual(store.find(OpenIDConsumerNonce).count(), 4)
140148
141 # Run the garbage collector.149 # Run the garbage collector.
142 self.runHourly()150 self.runHourly(maximum_chunk_size=60) # 1 minute maximum chunks.
151
152 store = IMasterStore(OpenIDConsumerNonce)
143153
144 # We should now have 2 nonces.154 # We should now have 2 nonces.
145 self.failUnlessEqual(store.find(OpenIDConsumerNonce).count(), 2)155 self.failUnlessEqual(store.find(OpenIDConsumerNonce).count(), 2)
@@ -170,16 +180,19 @@
170 self.runDaily()180 self.runDaily()
171181
172 # Nothing is removed, because we always keep the 4 latest.182 # Nothing is removed, because we always keep the 4 latest.
183 store = IMasterStore(CodeImportResult)
173 self.failUnlessEqual(184 self.failUnlessEqual(
174 store.find(CodeImportResult).count(), 4)185 store.find(CodeImportResult).count(), 4)
175186
176 new_code_import_result(now - timedelta(days=31))187 new_code_import_result(now - timedelta(days=31))
177 self.runDaily()188 self.runDaily()
189 store = IMasterStore(CodeImportResult)
178 self.failUnlessEqual(190 self.failUnlessEqual(
179 store.find(CodeImportResult).count(), 4)191 store.find(CodeImportResult).count(), 4)
180192
181 new_code_import_result(now - timedelta(days=29))193 new_code_import_result(now - timedelta(days=29))
182 self.runDaily()194 self.runDaily()
195 store = IMasterStore(CodeImportResult)
183 self.failUnlessEqual(196 self.failUnlessEqual(
184 store.find(CodeImportResult).count(), 4)197 store.find(CodeImportResult).count(), 4)
185198
@@ -189,6 +202,53 @@
189 Min(CodeImportResult.date_created)).one().replace(tzinfo=UTC)202 Min(CodeImportResult.date_created)).one().replace(tzinfo=UTC)
190 >= now - timedelta(days=30))203 >= now - timedelta(days=30))
191204
205 def test_OpenIDAssociationPruner(self, pruner=OpenIDAssociationPruner):
206 store_name = pruner.store_name
207 table_name = pruner.table_name
208 LaunchpadZopelessLayer.switchDbUser('testadmin')
209 store_selector = getUtility(IStoreSelector)
210 store = store_selector.get(store_name, MASTER_FLAVOR)
211 now = time.time()
212 # Create some associations in the past with lifetimes
213 for delta in range(0, 20):
214 store.execute("""
215 INSERT INTO %s (server_url, handle, issued, lifetime)
216 VALUES (%s, %s, %d, %d)
217 """ % (table_name, str(delta), str(delta), now-10, delta))
218 transaction.commit()
219
220 # Ensure that we created at least one expirable row (using the
221 # test start time as 'now').
222 num_expired = store.execute("""
223 SELECT COUNT(*) FROM %s
224 WHERE issued + lifetime < %f
225 """ % (table_name, now)).get_one()[0]
226 self.failUnless(num_expired > 0)
227
228 # Expire all those expirable rows, and possibly a few more if this
229 # test is running slow.
230 self.runHourly()
231
232 LaunchpadZopelessLayer.switchDbUser('testadmin')
233 store = store_selector.get(store_name, MASTER_FLAVOR)
234 # Confirm all the rows we know should have been expired have
235 # been expired. These are the ones that would be expired using
236 # the test start time as 'now'.
237 num_expired = store.execute("""
238 SELECT COUNT(*) FROM %s
239 WHERE issued + lifetime < %f
240 """ % (table_name, now)).get_one()[0]
241 self.failUnlessEqual(num_expired, 0)
242
243 # Confirm that we haven't expired everything. This test will fail
244 # if it has taken 10 seconds to get this far.
245 num_unexpired = store.execute(
246 "SELECT COUNT(*) FROM %s" % table_name).get_one()[0]
247 self.failUnless(num_unexpired > 0)
248
249 def test_OpenIDConsumerAssociationPruner(self):
250 self.test_OpenIDAssociationPruner(OpenIDConsumerAssociationPruner)
251
192 def test_RevisionAuthorEmailLinker(self):252 def test_RevisionAuthorEmailLinker(self):
193 LaunchpadZopelessLayer.switchDbUser('testadmin')253 LaunchpadZopelessLayer.switchDbUser('testadmin')
194 rev1 = self.factory.makeRevision('Author 1 <author-1@Example.Org>')254 rev1 = self.factory.makeRevision('Author 1 <author-1@Example.Org>')
@@ -301,5 +361,6 @@
301 self.runDaily()361 self.runDaily()
302 self.assertEqual(mailing_list.getSubscription(person), None)362 self.assertEqual(mailing_list.getSubscription(person), None)
303363
364
304def test_suite():365def test_suite():
305 return unittest.TestLoader().loadTestsFromName(__name__)366 return unittest.TestLoader().loadTestsFromName(__name__)
306367
=== modified file 'lib/canonical/launchpad/utilities/looptuner.py'
--- lib/canonical/launchpad/utilities/looptuner.py 2009-04-17 10:32:16 +0000
+++ lib/canonical/launchpad/utilities/looptuner.py 2009-05-10 03:41:25 +0000
@@ -180,10 +180,10 @@
180 """180 """
181181
182 # We block until replication lag is under this threshold.182 # We block until replication lag is under this threshold.
183 acceptable_replication_lag = timedelta(seconds=90) # In seconds.183 acceptable_replication_lag = timedelta(seconds=30) # In seconds.
184184
185 # We block if there are transactions running longer than this threshold.185 # We block if there are transactions running longer than this threshold.
186 long_running_transaction = 60*60 # In seconds186 long_running_transaction = 30*60 # In seconds
187187
188 def _blockWhenLagged(self):188 def _blockWhenLagged(self):
189 """When database replication lag is high, block until it drops."""189 """When database replication lag is high, block until it drops."""
@@ -222,7 +222,7 @@
222 usename,222 usename,
223 datname,223 datname,
224 current_query224 current_query
225 FROM pg_stat_activity225 FROM activity()
226 WHERE xact_start < CURRENT_TIMESTAMP - interval '%f seconds'226 WHERE xact_start < CURRENT_TIMESTAMP - interval '%f seconds'
227 """ % self.long_running_transaction).get_all())227 """ % self.long_running_transaction).get_all())
228 if not results:228 if not results:
229229
=== modified file 'lib/lp/code/model/tests/test_revision.py'
--- lib/lp/code/model/tests/test_revision.py 2009-05-08 02:33:14 +0000
+++ lib/lp/code/model/tests/test_revision.py 2009-05-10 03:41:25 +0000
@@ -17,6 +17,7 @@
1717
18from canonical.database.sqlbase import cursor18from canonical.database.sqlbase import cursor
19from canonical.launchpad.ftests import login, logout19from canonical.launchpad.ftests import login, logout
20from canonical.launchpad.ftests.logger import MockLogger
20from canonical.launchpad.interfaces.lpstorm import IMasterObject21from canonical.launchpad.interfaces.lpstorm import IMasterObject
21from canonical.launchpad.interfaces.account import AccountStatus22from canonical.launchpad.interfaces.account import AccountStatus
22from canonical.launchpad.scripts.garbo import RevisionAuthorEmailLinker23from canonical.launchpad.scripts.garbo import RevisionAuthorEmailLinker
@@ -165,7 +166,7 @@
165 # The person registers with Launchpad.166 # The person registers with Launchpad.
166 author = self.factory.makePerson(email=email)167 author = self.factory.makePerson(email=email)
167 # Garbo runs the RevisionAuthorEmailLinker job.168 # Garbo runs the RevisionAuthorEmailLinker job.
168 RevisionAuthorEmailLinker().run()169 RevisionAuthorEmailLinker(log=MockLogger()).run()
169 # Now the kama needs allocating.170 # Now the kama needs allocating.
170 self.assertEqual(171 self.assertEqual(
171 [rev], list(RevisionSet.getRevisionsNeedingKarmaAllocated()))172 [rev], list(RevisionSet.getRevisionsNeedingKarmaAllocated()))
172173
=== modified file 'lib/lp/code/model/tests/test_revisionauthor.py'
--- lib/lp/code/model/tests/test_revisionauthor.py 2009-05-07 02:02:51 +0000
+++ lib/lp/code/model/tests/test_revisionauthor.py 2009-05-10 03:41:26 +0000
@@ -11,6 +11,7 @@
1111
12from canonical.config import config12from canonical.config import config
13from canonical.launchpad.interfaces.emailaddress import EmailAddressStatus13from canonical.launchpad.interfaces.emailaddress import EmailAddressStatus
14from canonical.launchpad.ftests.logger import MockLogger
14from canonical.launchpad.scripts.garbo import RevisionAuthorEmailLinker15from canonical.launchpad.scripts.garbo import RevisionAuthorEmailLinker
15from canonical.launchpad.testing import LaunchpadObjectFactory, TestCase16from canonical.launchpad.testing import LaunchpadObjectFactory, TestCase
16from canonical.testing import LaunchpadZopelessLayer17from canonical.testing import LaunchpadZopelessLayer
@@ -157,7 +158,7 @@
157158
158 # After the garbo RevisionAuthorEmailLinker job runs, the link159 # After the garbo RevisionAuthorEmailLinker job runs, the link
159 # is made.160 # is made.
160 RevisionAuthorEmailLinker().run()161 RevisionAuthorEmailLinker(log=MockLogger()).run()
161 self.assertEqual(harry, self.author.person,162 self.assertEqual(harry, self.author.person,
162 'Harry should now be the author.')163 'Harry should now be the author.')
163164
164165
=== modified file 'lib/lp/code/scripts/tests/test_revisionkarma.py'
--- lib/lp/code/scripts/tests/test_revisionkarma.py 2009-05-07 02:02:51 +0000
+++ lib/lp/code/scripts/tests/test_revisionkarma.py 2009-05-10 03:41:26 +0000
@@ -10,6 +10,7 @@
1010
11from canonical.config import config11from canonical.config import config
12from canonical.launchpad.database.emailaddress import EmailAddressSet12from canonical.launchpad.database.emailaddress import EmailAddressSet
13from canonical.launchpad.ftests.logger import MockLogger
13from canonical.launchpad.scripts.garbo import RevisionAuthorEmailLinker14from canonical.launchpad.scripts.garbo import RevisionAuthorEmailLinker
14from canonical.launchpad.testing import TestCaseWithFactory15from canonical.launchpad.testing import TestCaseWithFactory
15from canonical.testing import LaunchpadZopelessLayer16from canonical.testing import LaunchpadZopelessLayer
@@ -74,7 +75,7 @@
74 author = self.factory.makePerson(email=email)75 author = self.factory.makePerson(email=email)
75 transaction.commit()76 transaction.commit()
76 # Run the RevisionAuthorEmailLinker garbo job.77 # Run the RevisionAuthorEmailLinker garbo job.
77 RevisionAuthorEmailLinker().run()78 RevisionAuthorEmailLinker(log=MockLogger()).run()
78 LaunchpadZopelessLayer.switchDbUser(config.revisionkarma.dbuser)79 LaunchpadZopelessLayer.switchDbUser(config.revisionkarma.dbuser)
79 script = RevisionKarmaAllocator(80 script = RevisionKarmaAllocator(
80 'test', config.revisionkarma.dbuser, ['-q'])81 'test', config.revisionkarma.dbuser, ['-q'])
@@ -106,7 +107,7 @@
106 EmailAddressSet().new(email, author, account=author.account))107 EmailAddressSet().new(email, author, account=author.account))
107 transaction.commit()108 transaction.commit()
108 # Run the RevisionAuthorEmailLinker garbo job.109 # Run the RevisionAuthorEmailLinker garbo job.
109 RevisionAuthorEmailLinker().run()110 RevisionAuthorEmailLinker(log=MockLogger()).run()
110111
111 # Now that the revision author is linked to the person, the revision112 # Now that the revision author is linked to the person, the revision
112 # needs karma allocated.113 # needs karma allocated.
113114
=== modified file 'scripts/rosetta/message-sharing-merge.py' (properties changed: +x to -x)

Subscribers

People subscribed via source and target branches

to status/vote changes: