Merge lp:~mwhudson/launchpad/no-hosted-area into lp:launchpad

Proposed by Michael Hudson-Doyle
Status: Merged
Approved by: Tim Penhey
Approved revision: no longer in the source branch.
Merged at revision: 10828
Proposed branch: lp:~mwhudson/launchpad/no-hosted-area
Merge into: lp:launchpad
Diff against target: 9694 lines (+3175/-2889)
146 files modified
Makefile (+4/-5)
bzrplugins/lpserve.py (+4/-4)
configs/development/launchpad-lazr.conf (+2/-2)
configs/testrunner/launchpad-lazr.conf (+3/-7)
cronscripts/merge-proposal-jobs.py (+22/-30)
cronscripts/mirror-prober.sh (+2/-2)
cronscripts/nightly.sh (+14/-14)
cronscripts/publishing/cron.germinate (+1/-1)
cronscripts/publishing/maintenance-check.py (+217/-29)
cronscripts/update_preview_diffs.py (+0/-38)
database/replication/Makefile (+15/-36)
database/replication/authdb_create.sql (+0/-885)
database/replication/authdb_drop.sql (+0/-14)
database/replication/authdb_sequences.sql (+0/-22)
database/replication/helpers.py (+13/-30)
database/replication/initialize.py (+7/-54)
database/replication/new-slave.py (+37/-43)
database/replication/populate_auth_replication_set.py (+0/-177)
database/replication/preamble.py (+1/-1)
database/replication/repair-restored-db.py (+1/-1)
database/replication/report.py (+1/-1)
database/replication/slon_ctl.py (+1/-1)
database/replication/sync.py (+1/-1)
database/schema/diagram.py (+1/-1)
database/schema/emptytables.py (+1/-1)
database/schema/fti.py (+1/-1)
database/schema/online_fti_updater.py (+1/-1)
database/schema/patch-2207-47-0.sql (+6/-0)
database/schema/patch-2207-48-0.sql (+27/-0)
database/schema/pending/add-mailing-list-experts.py (+1/-1)
database/schema/pending/create-openid-rp-configs.py (+1/-1)
database/schema/pending/gnu-savannah-celebrity.py (+1/-1)
database/schema/pending/migrate_kde_potemplates.py (+1/-1)
database/schema/pending/new-person-columns.py (+1/-1)
database/schema/pending/patch-2207-49-0.sql (+16/-0)
database/schema/pending/prune-nonce.py (+1/-1)
database/schema/pending/update-shippingrequest-types.py (+1/-1)
database/schema/pending/update-translation-credits.py (+3/-3)
database/schema/reset_sequences.py (+1/-1)
database/schema/security.cfg (+10/-96)
database/schema/security.py (+1/-1)
database/schema/sort_sql.py (+1/-1)
database/schema/trusted.sql (+33/-33)
database/schema/unautovacuumable.py (+1/-1)
database/schema/upgrade.py (+5/-4)
lib/canonical/config/schema-lazr.conf (+28/-0)
lib/canonical/launchpad/daemons/tachandler.py (+4/-4)
lib/canonical/launchpad/doc/product-update-remote-product-script.txt (+1/-1)
lib/canonical/launchpad/scripts/garbo.py (+2/-151)
lib/canonical/launchpad/scripts/tests/test_garbo.py (+0/-56)
lib/contrib/glock.py (+1/-1)
lib/lp/answers/doc/expiration.txt (+1/-1)
lib/lp/archivepublisher/publishing.py (+31/-1)
lib/lp/archivepublisher/scripts/generate_ppa_htaccess.py (+8/-1)
lib/lp/archivepublisher/tests/test_generate_ppa_htaccess.py (+42/-0)
lib/lp/archivepublisher/tests/test_publisher.py (+51/-31)
lib/lp/bugs/browser/bugwatch.py (+60/-1)
lib/lp/bugs/browser/configure.zcml (+6/-0)
lib/lp/bugs/browser/tests/bugwatch-views.txt (+83/-1)
lib/lp/bugs/configure.zcml (+4/-1)
lib/lp/bugs/doc/bug-watch-activity.txt (+9/-5)
lib/lp/bugs/doc/bugnotification-sending.txt (+1/-1)
lib/lp/bugs/doc/bugtask-expiration.txt (+1/-1)
lib/lp/bugs/doc/bugtask.txt (+1/-1)
lib/lp/bugs/doc/bugwatch.txt (+84/-0)
lib/lp/bugs/doc/checkwatches.txt (+1/-1)
lib/lp/bugs/doc/cve-update.txt (+2/-2)
lib/lp/bugs/interfaces/bugwatch.py (+52/-0)
lib/lp/bugs/model/bugwatch.py (+63/-3)
lib/lp/bugs/scripts/bugheat.py (+5/-5)
lib/lp/bugs/scripts/checkwatches/scheduler.py (+4/-4)
lib/lp/bugs/scripts/tests/test_bugheat.py (+3/-3)
lib/lp/bugs/stories/bugwatches/xx-bugwatch-errors.txt (+16/-2)
lib/lp/bugs/stories/bugwatches/xx-edit-bugwatch.txt (+123/-0)
lib/lp/bugs/templates/bugwatch-editform.pt (+4/-0)
lib/lp/bugs/templates/bugwatch-portlet-activity.pt (+44/-0)
lib/lp/bugs/tests/test_apportjob.py (+1/-1)
lib/lp/bugs/tests/test_bugheat.py (+1/-1)
lib/lp/code/configure.zcml (+68/-26)
lib/lp/code/doc/branch-merge-proposal-notifications.txt (+11/-5)
lib/lp/code/doc/codereviewcomment.txt (+18/-0)
lib/lp/code/interfaces/branchmergeproposal.py (+71/-13)
lib/lp/code/interfaces/codehosting.py (+12/-0)
lib/lp/code/interfaces/codereviewcomment.py (+4/-0)
lib/lp/code/mail/branch.py (+11/-12)
lib/lp/code/mail/branchmergeproposal.py (+8/-57)
lib/lp/code/mail/codereviewcomment.py (+14/-5)
lib/lp/code/mail/tests/test_branch.py (+33/-5)
lib/lp/code/mail/tests/test_branchmergeproposal.py (+150/-79)
lib/lp/code/mail/tests/test_codehandler.py (+22/-26)
lib/lp/code/mail/tests/test_codereviewcomment.py (+11/-2)
lib/lp/code/model/branchmergeproposal.py (+10/-13)
lib/lp/code/model/branchmergeproposaljob.py (+368/-25)
lib/lp/code/model/branchtarget.py (+0/-17)
lib/lp/code/model/codereviewcomment.py (+8/-0)
lib/lp/code/model/tests/test_branchcloud.py (+4/-3)
lib/lp/code/model/tests/test_branchmergeproposaljobs.py (+349/-0)
lib/lp/code/model/tests/test_branchmergeproposals.py (+30/-248)
lib/lp/code/model/tests/test_diff.py (+4/-0)
lib/lp/code/scripts/tests/test_create_merge_proposals.py (+3/-3)
lib/lp/code/scripts/tests/test_merge_proposal_jobs.py (+9/-47)
lib/lp/code/scripts/tests/test_reclaim_branch_space.py (+3/-4)
lib/lp/code/scripts/tests/test_scan_branches.py (+2/-2)
lib/lp/code/scripts/tests/test_sendbranchmail.py (+13/-8)
lib/lp/code/scripts/tests/test_update_preview_diffs.py (+0/-93)
lib/lp/code/scripts/tests/test_upgrade_branches.py (+4/-4)
lib/lp/code/stories/webservice/xx-code-import.txt (+1/-0)
lib/lp/code/subscribers/branchmergeproposal.py (+55/-0)
lib/lp/code/tests/helpers.py (+16/-0)
lib/lp/code/xmlrpc/codehosting.py (+33/-8)
lib/lp/code/xmlrpc/tests/test_codehosting.py (+86/-1)
lib/lp/codehosting/inmemory.py (+26/-3)
lib/lp/codehosting/scanner/tests/test_bzrsync.py (+6/-5)
lib/lp/codehosting/scanner/tests/test_mergedetection.py (+14/-2)
lib/lp/codehosting/sftp.py (+2/-5)
lib/lp/codehosting/tests/servers.py (+1/-1)
lib/lp/codehosting/tests/test_acceptance.py (+108/-114)
lib/lp/codehosting/vfs/branchfs.py (+106/-72)
lib/lp/codehosting/vfs/branchfsclient.py (+2/-2)
lib/lp/codehosting/vfs/tests/test_branchfs.py (+128/-51)
lib/lp/codehosting/vfs/tests/test_filesystem.py (+1/-1)
lib/lp/hardwaredb/doc/hwdb-submission.txt (+4/-4)
lib/lp/registry/doc/distribution-mirror.txt (+5/-5)
lib/lp/registry/doc/person-karma.txt (+1/-1)
lib/lp/registry/doc/sourceforge-remote-products.txt (+1/-1)
lib/lp/registry/doc/standing.txt (+2/-2)
lib/lp/services/job/runner.py (+29/-10)
lib/lp/services/job/tests/test_runner.py (+6/-2)
lib/lp/services/mail/sendmail.py (+27/-24)
lib/lp/soyuz/doc/buildd-slavescanner.txt (+2/-2)
lib/lp/soyuz/doc/gina.txt (+1/-1)
lib/lp/soyuz/doc/manage-chroot.txt (+1/-1)
lib/lp/soyuz/doc/package-cache-script.txt (+1/-1)
lib/lp/soyuz/scripts/publishdistro.py (+33/-20)
lib/lp/soyuz/scripts/tests/test_processupload.py (+1/-1)
lib/lp/testing/factory.py (+2/-0)
lib/lp/translations/doc/distroseries-translations-copy.txt (+4/-2)
lib/lp/translations/doc/fix_translation_credits.txt (+2/-1)
lib/lp/translations/doc/poexport-language-pack.txt (+2/-1)
lib/lp/translations/doc/poexport-request.txt (+1/-1)
lib/lp/translations/doc/pofile-verify-stats.txt (+2/-2)
lib/lp/translations/doc/rosetta-poimport-script.txt (+1/-1)
lib/lp/translations/doc/sourcepackagerelease-translations.txt (+3/-2)
lib/lp/translations/doc/translations-export-to-branch.txt (+1/-1)
lib/lp/translations/scripts/tests/test_translations_to_branch.py (+1/-1)
scripts/close-account.py (+19/-11)
To merge this branch: bzr merge lp:~mwhudson/launchpad/no-hosted-area
Reviewer Review Type Date Requested Status
Tim Penhey (community) Approve
Review via email: mp+23643@code.launchpad.net

Description of the change

Hi Tim,

This branch makes the ssh codehosting server only use one area (the mirrored area) rather than the mirrored and hosted area.

In addition, on branch unlock, rather than requesting the branch be mirrored, the codehosting server calls a method that updates the fields the puller updates for hosted branches, hopefully reducing latency.

In the next pipe I extend this branchChanged endpoint to also record the branch format.

There's obviously no way this branch can land on its own, it will break many many tests.

Cheers,
mwh

To post a comment you must log in.
Revision history for this message
Tim Penhey (thumper) wrote :

lib/lp/code/xmlrpc/codehosting.py
in: def branchChanged(self, branch_id, stacked_on_location, last_revision_id):
  + branch.last_mirrored = datetime.datetime.now(pytz.UTC)
should probably be using UTC_NOW

lib/lp/codehosting/inmemory.py branchChanged event should use UTC_NOW too.

then:
test_branchChanged_sets_last_mirrored
can use:
     self.assertSqlAttributeEqualsDate(
        branch, 'last_mirrored', UTC_NOW)

def test_branchChanged_doesnt_create_scan_job_for_noop_change(self):
    # XXX Is this even the right thing to do?
It will be with your next pipe.

lib/lp/codehosting/vfs/branchfs.py
class LaunchpadServer (I think - around line 558)
the __init__ method still refers to the authserver, also there is a
XXX comment that I'm wondering whether we can remove it or not.

lib/lp/codehosting/vfs/tests/test_branchfs.py - still refers to an
authserver too.

# XXX Maaaybe we could complain on stderr here?
  - Not following our XXX format, and is this something we want to do?

# XXX: JonathanLange 2007-05-29: The 'chroot' line lacks a unit test.
I don't suppose you want to add a unit test for this?

General Note: we probably want to rename config.codehosting.mirrored_branches
at some stage.

Revision history for this message
Michael Hudson-Doyle (mwhudson) wrote :

On 19/04/10 16:20, Tim Penhey wrote:
> lib/lp/code/xmlrpc/codehosting.py
> in: def branchChanged(self, branch_id, stacked_on_location, last_revision_id):
> + branch.last_mirrored = datetime.datetime.now(pytz.UTC)
> should probably be using UTC_NOW
>
> lib/lp/codehosting/inmemory.py branchChanged event should use UTC_NOW too.
>
> then:
> test_branchChanged_sets_last_mirrored
> can use:
> self.assertSqlAttributeEqualsDate(
> branch, 'last_mirrored', UTC_NOW)

Yeah OK.

> def test_branchChanged_doesnt_create_scan_job_for_noop_change(self):
> # XXX Is this even the right thing to do?
> It will be with your next pipe.

Right, I'll delete the comment.

> lib/lp/codehosting/vfs/branchfs.py
> class LaunchpadServer (I think - around line 558)
> the __init__ method still refers to the authserver, also there is a
> XXX comment that I'm wondering whether we can remove it or not.

Grar, can I fix this in the later pipe that combines the two endpoints?

> lib/lp/codehosting/vfs/tests/test_branchfs.py - still refers to an
> authserver too.

This too.

> # XXX Maaaybe we could complain on stderr here?
> - Not following our XXX format, and is this something we want to do?

Nah, I'll delete the comment.

> # XXX: JonathanLange 2007-05-29: The 'chroot' line lacks a unit test.
> I don't suppose you want to add a unit test for this?

OK. It's a bit terrible though.

> General Note: we probably want to rename config.codehosting.mirrored_branches
> at some stage.

Given recent edge rollout funnies, I don't know how we'd do this :/ But
yeah, it doesn't really make sense.

Interdiff attached.

Cheers,
mwh

=== modified file 'lib/lp/code/xmlrpc/codehosting.py'
--- lib/lp/code/xmlrpc/codehosting.py 2010-04-19 10:45:14 +0000
+++ lib/lp/code/xmlrpc/codehosting.py 2010-04-19 21:57:33 +0000
@@ -23,6 +23,16 @@
23from zope.security.proxy import removeSecurityProxy23from zope.security.proxy import removeSecurityProxy
24from zope.security.management import endInteraction24from zope.security.management import endInteraction
2525
26from canonical.database.constants import UTC_NOW
27from canonical.launchpad.validators import LaunchpadValidationError
28from canonical.launchpad.webapp import LaunchpadXMLRPCView
29from canonical.launchpad.webapp.authorization import check_permission
30from canonical.launchpad.webapp.interaction import setupInteractionForPerson
31from canonical.launchpad.webapp.interfaces import (
32 NameLookupFailed, NotFoundError)
33from canonical.launchpad.xmlrpc import faults
34from canonical.launchpad.xmlrpc.helpers import return_fault
35
26from lp.code.errors import UnknownBranchTypeError36from lp.code.errors import UnknownBranchTypeError
27from lp.code.enums import BranchType37from lp.code.enums import BranchType
28from lp.code.interfaces.branch import BranchCreationException38from lp.code.interfaces.branch import BranchCreationException
@@ -38,14 +48,6 @@
38from lp.registry.interfaces.product import NoSuchProduct48from lp.registry.interfaces.product import NoSuchProduct
39from lp.services.scripts.interfaces.scriptactivity import IScriptActivitySet49from lp.services.scripts.interfaces.scriptactivity import IScriptActivitySet
40from lp.services.utils import iter_split50from lp.services.utils import iter_split
41from canonical.launchpad.validators import LaunchpadValidationError
42from canonical.launchpad.webapp import LaunchpadXMLRPCView
43from canonical.launchpad.webapp.authorization import check_permission
44from canonical.launchpad.webapp.interaction import setupInteractionForPerson
45from canonical.launchpad.webapp.interfaces import (
46 NameLookupFailed, NotFoundError)
47from canonical.launchpad.xmlrpc import faults
48from canonical.launchpad.xmlrpc.helpers import return_fault
4951
5052
51UTC = pytz.timezone('UTC')53UTC = pytz.timezone('UTC')
@@ -262,7 +264,7 @@
262 branch.mirror_status_message = (264 branch.mirror_status_message = (
263 'Invalid stacked on location: ' + stacked_on_location)265 'Invalid stacked on location: ' + stacked_on_location)
264 branch.stacked_on = stacked_on_branch266 branch.stacked_on = stacked_on_branch
265 branch.last_mirrored = datetime.datetime.now(pytz.UTC)267 branch.last_mirrored = UTC_NOW
266 if branch.last_mirrored_id != last_revision_id:268 if branch.last_mirrored_id != last_revision_id:
267 branch.last_mirrored_id = last_revision_id269 branch.last_mirrored_id = last_revision_id
268 getUtility(IBranchScanJobSource).create(branch)270 getUtility(IBranchScanJobSource).create(branch)
269271
=== modified file 'lib/lp/code/xmlrpc/tests/test_codehosting.py'
--- lib/lp/code/xmlrpc/tests/test_codehosting.py 2010-04-19 04:06:23 +0000
+++ lib/lp/code/xmlrpc/tests/test_codehosting.py 2010-04-19 22:09:34 +0000
@@ -15,7 +15,6 @@
15from zope.component import getUtility15from zope.component import getUtility
16from zope.security.proxy import removeSecurityProxy16from zope.security.proxy import removeSecurityProxy
1717
18from lp.codehosting.inmemory import InMemoryFrontend
19from canonical.database.constants import UTC_NOW18from canonical.database.constants import UTC_NOW
20from canonical.launchpad.ftests import ANONYMOUS, login, logout19from canonical.launchpad.ftests import ANONYMOUS, login, logout
21from lp.services.scripts.interfaces.scriptactivity import (20from lp.services.scripts.interfaces.scriptactivity import (
@@ -40,6 +39,8 @@
40 BranchFileSystem, BranchPuller, LAUNCHPAD_ANONYMOUS, LAUNCHPAD_SERVICES,39 BranchFileSystem, BranchPuller, LAUNCHPAD_ANONYMOUS, LAUNCHPAD_SERVICES,
41 run_with_login)40 run_with_login)
4241
42from lp.codehosting.inmemory import InMemoryFrontend
43
4344
44UTC = pytz.timezone('UTC')45UTC = pytz.timezone('UTC')
4546
@@ -739,9 +740,11 @@
739 # current time.740 # current time.
740 branch = self.factory.makeAnyBranch()741 branch = self.factory.makeAnyBranch()
741 self.branchfs.branchChanged(branch.id, '', '')742 self.branchfs.branchChanged(branch.id, '', '')
742 # We can't test "now" precisely, but lets check that last_mirrored was743 if self.frontend == LaunchpadDatabaseFrontend:
743 # set to _something_.744 self.assertSqlAttributeEqualsDate(
744 self.assertIsNot(None, branch.last_mirrored)745 branch, 'last_mirrored', UTC_NOW)
746 else:
747 self.assertIs(UTC_NOW, branch.last_mirrored)
745748
746 def test_branchChanged_records_bogus_stacked_on_url(self):749 def test_branchChanged_records_bogus_stacked_on_url(self):
747 # If a bogus location is passed in as the stacked_on parameter,750 # If a bogus location is passed in as the stacked_on parameter,
@@ -772,7 +775,7 @@
772775
773 def test_branchChanged_creates_scan_job(self):776 def test_branchChanged_creates_scan_job(self):
774 # branchChanged() creates a scan job for the branch.777 # branchChanged() creates a scan job for the branch.
775 if not isinstance(self.frontend, LaunchpadDatabaseFrontend):778 if self.frontend != LaunchpadDatabaseFrontend:
776 return779 return
777 branch = self.factory.makeAnyBranch()780 branch = self.factory.makeAnyBranch()
778 jobs = list(getUtility(IBranchScanJobSource).iterReady())781 jobs = list(getUtility(IBranchScanJobSource).iterReady())
@@ -782,8 +785,7 @@
782 self.assertEqual(1, len(jobs))785 self.assertEqual(1, len(jobs))
783786
784 def test_branchChanged_doesnt_create_scan_job_for_noop_change(self):787 def test_branchChanged_doesnt_create_scan_job_for_noop_change(self):
785 # XXX Is this even the right thing to do?788 if self.frontend != LaunchpadDatabaseFrontend:
786 if not isinstance(self.frontend, LaunchpadDatabaseFrontend):
787 return789 return
788 branch = self.factory.makeAnyBranch()790 branch = self.factory.makeAnyBranch()
789 removeSecurityProxy(branch).last_mirrored_id = 'rev1'791 removeSecurityProxy(branch).last_mirrored_id = 'rev1'
790792
=== modified file 'lib/lp/codehosting/inmemory.py'
--- lib/lp/codehosting/inmemory.py 2010-04-19 04:06:23 +0000
+++ lib/lp/codehosting/inmemory.py 2010-04-19 22:04:00 +0000
@@ -9,18 +9,18 @@
9 'XMLRPCWrapper',9 'XMLRPCWrapper',
10 ]10 ]
1111
12import datetime
13import operator12import operator
14from xmlrpclib import Fault13from xmlrpclib import Fault
1514
16from bzrlib.urlutils import escape, unescape15from bzrlib.urlutils import escape, unescape
1716
18import pytz
19
20from zope.component import adapter, getSiteManager17from zope.component import adapter, getSiteManager
21from zope.interface import implementer18from zope.interface import implementer
2219
23from canonical.database.constants import UTC_NOW20from canonical.database.constants import UTC_NOW
21from canonical.launchpad.validators import LaunchpadValidationError
22from canonical.launchpad.xmlrpc import faults
23
24from lp.code.errors import UnknownBranchTypeError24from lp.code.errors import UnknownBranchTypeError
25from lp.code.model.branchnamespace import BranchNamespaceSet25from lp.code.model.branchnamespace import BranchNamespaceSet
26from lp.code.model.branchtarget import (26from lp.code.model.branchtarget import (
@@ -31,12 +31,10 @@
31from lp.code.interfaces.codehosting import (31from lp.code.interfaces.codehosting import (
32 BRANCH_TRANSPORT, CONTROL_TRANSPORT, LAUNCHPAD_ANONYMOUS,32 BRANCH_TRANSPORT, CONTROL_TRANSPORT, LAUNCHPAD_ANONYMOUS,
33 LAUNCHPAD_SERVICES)33 LAUNCHPAD_SERVICES)
34from lp.code.xmlrpc.codehosting import datetime_from_tuple
34from lp.registry.interfaces.pocket import PackagePublishingPocket35from lp.registry.interfaces.pocket import PackagePublishingPocket
35from lp.services.utils import iter_split36from lp.services.utils import iter_split
36from lp.testing.factory import ObjectFactory37from lp.testing.factory import ObjectFactory
37from canonical.launchpad.validators import LaunchpadValidationError
38from lp.code.xmlrpc.codehosting import datetime_from_tuple
39from canonical.launchpad.xmlrpc import faults
4038
4139
42class FakeStore:40class FakeStore:
@@ -637,7 +635,7 @@
637 branch.mirror_status_message = (635 branch.mirror_status_message = (
638 'Invalid stacked on location: ' + stacked_on_location)636 'Invalid stacked on location: ' + stacked_on_location)
639 branch.stacked_on = stacked_on_branch637 branch.stacked_on = stacked_on_branch
640 branch.last_mirrored = datetime.datetime.now(pytz.UTC)638 branch.last_mirrored = UTC_NOW
641 if branch.last_mirrored_id != last_revision_id:639 if branch.last_mirrored_id != last_revision_id:
642 branch.last_mirrored_id = last_revision_id640 branch.last_mirrored_id = last_revision_id
643 return True641 return True
644642
=== modified file 'lib/lp/codehosting/vfs/branchfs.py'
--- lib/lp/codehosting/vfs/branchfs.py 2010-04-16 00:55:24 +0000
+++ lib/lp/codehosting/vfs/branchfs.py 2010-04-19 22:33:52 +0000
@@ -639,7 +639,6 @@
639 # Assume it's a relative path.639 # Assume it's a relative path.
640 return stacked_on_url640 return stacked_on_url
641 uri = URI(stacked_on_url)641 uri = URI(stacked_on_url)
642 # XXX Maaaybe we could complain on stderr here?
643 if uri.scheme not in ['http', 'bzr+ssh', 'sftp']:642 if uri.scheme not in ['http', 'bzr+ssh', 'sftp']:
644 return stacked_on_url643 return stacked_on_url
645 launchpad_domain = config.vhost.mainsite.hostname644 launchpad_domain = config.vhost.mainsite.hostname
@@ -710,7 +709,6 @@
710709
711 branch_url = urlutils.local_path_to_url(branch_directory)710 branch_url = urlutils.local_path_to_url(branch_directory)
712 branchfs_client = xmlrpclib.ServerProxy(branchfs_endpoint_url)711 branchfs_client = xmlrpclib.ServerProxy(branchfs_endpoint_url)
713 # XXX: JonathanLange 2007-05-29: The 'chroot' line lacks a unit test.
714 branch_transport = get_chrooted_transport(branch_url)712 branch_transport = get_chrooted_transport(branch_url)
715 lp_server = LaunchpadServer(713 lp_server = LaunchpadServer(
716 BlockingProxy(branchfs_client), user_id, branch_transport,714 BlockingProxy(branchfs_client), user_id, branch_transport,
717715
=== modified file 'lib/lp/codehosting/vfs/tests/test_branchfs.py'
--- lib/lp/codehosting/vfs/tests/test_branchfs.py 2010-04-09 06:28:34 +0000
+++ lib/lp/codehosting/vfs/tests/test_branchfs.py 2010-04-19 22:33:25 +0000
@@ -17,6 +17,7 @@
17from bzrlib.transport import (17from bzrlib.transport import (
18 get_transport, _get_protocol_handlers, register_transport, Server,18 get_transport, _get_protocol_handlers, register_transport, Server,
19 unregister_transport)19 unregister_transport)
20from bzrlib.transport.chroot import ChrootTransport
20from bzrlib.transport.memory import MemoryServer, MemoryTransport21from bzrlib.transport.memory import MemoryServer, MemoryTransport
21from bzrlib.urlutils import escape, local_path_to_url22from bzrlib.urlutils import escape, local_path_to_url
2223
@@ -26,7 +27,7 @@
26from lp.codehosting.vfs.branchfs import (27from lp.codehosting.vfs.branchfs import (
27 AsyncLaunchpadTransport, BranchTransportDispatch,28 AsyncLaunchpadTransport, BranchTransportDispatch,
28 DirectDatabaseLaunchpadServer, LaunchpadInternalServer, LaunchpadServer,29 DirectDatabaseLaunchpadServer, LaunchpadInternalServer, LaunchpadServer,
29 TransportDispatch, UnknownTransportType, branch_id_to_path)30 TransportDispatch, UnknownTransportType, branch_id_to_path, get_lp_server)
30from lp.codehosting.inmemory import InMemoryFrontend, XMLRPCWrapper31from lp.codehosting.inmemory import InMemoryFrontend, XMLRPCWrapper
31from lp.codehosting.sftp import FatLocalTransport32from lp.codehosting.sftp import FatLocalTransport
32from lp.codehosting.vfs.transport import AsyncVirtualTransport33from lp.codehosting.vfs.transport import AsyncVirtualTransport
@@ -1005,6 +1006,17 @@
1005 '/%s/.bzr/goodbye.txt' % self.read_only_branch)1006 '/%s/.bzr/goodbye.txt' % self.read_only_branch)
10061007
10071008
1009class TestGetLPServer(TestCase):
1010 """Tests for `get_lp_server`."""
1011
1012 def test_chrooting(self):
1013 # Test that get_lp_server return a server that ultimately backs onto a
1014 # ChrootTransport.
1015 lp_server = get_lp_server(1, 'http://xmlrpc.example.invalid', '')
1016 transport = lp_server._transport_dispatch._rw_dispatch.base_transport
1017 self.assertIsInstance(transport, ChrootTransport)
1018
1019
1008def test_suite():1020def test_suite():
1009 return unittest.TestLoader().loadTestsFromName(__name__)1021 return unittest.TestLoader().loadTestsFromName(__name__)
10101022
Revision history for this message
Tim Penhey (thumper) wrote :

 merge approved

On Tue, 20 Apr 2010 10:39:16 you wrote:
> On 19/04/10 16:20, Tim Penhey wrote:
> > lib/lp/codehosting/vfs/branchfs.py
> > class LaunchpadServer (I think - around line 558)
> > the __init__ method still refers to the authserver, also there is a
> > XXX comment that I'm wondering whether we can remove it or not.
>
> Grar, can I fix this in the later pipe that combines the two endpoints?

Yep, sure.

> > lib/lp/codehosting/vfs/tests/test_branchfs.py - still refers to an
> > authserver too.
>
> This too.

Yes.

Tim

review: Approve

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
=== modified file 'Makefile'
--- Makefile 2010-04-20 19:10:35 +0000
+++ Makefile 2010-04-27 02:13:38 +0000
@@ -209,9 +209,9 @@
209ftest_inplace: inplace209ftest_inplace: inplace
210 bin/test -f $(TESTFLAGS) $(TESTOPTS)210 bin/test -f $(TESTFLAGS) $(TESTOPTS)
211211
212mpcreationjobs:212merge-proposal-jobs:
213 # Handle merge proposal creations.213 # Handle merge proposal email jobs.
214 $(PY) cronscripts/mpcreationjobs.py214 $(PY) cronscripts/merge-proposal-jobs.py -v
215215
216run: check_schema inplace stop216run: check_schema inplace stop
217 $(RM) thread*.request217 $(RM) thread*.request
@@ -255,8 +255,7 @@
255 # Scan branches from the filesystem into the database.255 # Scan branches from the filesystem into the database.
256 $(PY) cronscripts/scan_branches.py256 $(PY) cronscripts/scan_branches.py
257257
258258sync_branches: pull_branches scan_branches merge-proposal-jobs
259sync_branches: pull_branches scan_branches mpcreationjobs
260259
261$(BZR_VERSION_INFO):260$(BZR_VERSION_INFO):
262 scripts/update-bzr-version-info.sh261 scripts/update-bzr-version-info.sh
263262
=== modified file 'bzrplugins/lpserve.py'
--- bzrplugins/lpserve.py 2010-03-24 00:43:45 +0000
+++ bzrplugins/lpserve.py 2010-04-27 02:13:38 +0000
@@ -85,8 +85,8 @@
85 finally:85 finally:
86 ui.ui_factory = old_factory86 ui.ui_factory = old_factory
8787
88 def run(self, user_id, port=None, upload_directory=None,88 def run(self, user_id, port=None, branch_directory=None,
89 mirror_directory=None, branchfs_endpoint_url=None, inet=False):89 branchfs_endpoint_url=None, inet=False):
90 from lp.codehosting.bzrutils import install_oops_handler90 from lp.codehosting.bzrutils import install_oops_handler
91 from lp.codehosting.vfs import get_lp_server, hooks91 from lp.codehosting.vfs import get_lp_server, hooks
92 install_oops_handler(user_id)92 install_oops_handler(user_id)
@@ -94,8 +94,8 @@
94 resource.setrlimit(resource.RLIMIT_AS, (four_gig, four_gig))94 resource.setrlimit(resource.RLIMIT_AS, (four_gig, four_gig))
95 seen_new_branch = hooks.SetProcTitleHook()95 seen_new_branch = hooks.SetProcTitleHook()
96 lp_server = get_lp_server(96 lp_server = get_lp_server(
97 int(user_id), branchfs_endpoint_url,97 int(user_id), branchfs_endpoint_url, branch_directory,
98 upload_directory, mirror_directory, seen_new_branch.seen)98 seen_new_branch.seen)
99 lp_server.start_server()99 lp_server.start_server()
100100
101 old_lockdir_timeout = lockdir._DEFAULT_TIMEOUT_SECONDS101 old_lockdir_timeout = lockdir._DEFAULT_TIMEOUT_SECONDS
102102
=== modified file 'configs/development/launchpad-lazr.conf'
--- configs/development/launchpad-lazr.conf 2010-04-19 03:44:27 +0000
+++ configs/development/launchpad-lazr.conf 2010-04-27 02:13:38 +0000
@@ -201,9 +201,9 @@
201port: 11217201port: 11217
202memory_size: 1202memory_size: 1
203203
204[mpcreationjobs]204[merge_proposal_jobs]
205error_dir: /var/tmp/codehosting.test205error_dir: /var/tmp/codehosting.test
206oops_prefix: DMPCR206oops_prefix: DMPJ
207207
208[personalpackagearchive]208[personalpackagearchive]
209root: /var/tmp/ppa/209root: /var/tmp/ppa/
210210
=== modified file 'configs/testrunner/launchpad-lazr.conf'
--- configs/testrunner/launchpad-lazr.conf 2010-04-19 03:44:27 +0000
+++ configs/testrunner/launchpad-lazr.conf 2010-04-27 02:13:38 +0000
@@ -34,7 +34,7 @@
34bzr_lp_prefix: lp://dev/34bzr_lp_prefix: lp://dev/
35hosted_branches_root: /tmp/sftp-test/branches35hosted_branches_root: /tmp/sftp-test/branches
36host_key_pair_path: lib/lp/codehosting/sshserver/tests/keys36host_key_pair_path: lib/lp/codehosting/sshserver/tests/keys
37port: tcp:22222:interface=127.0.0.137port: tcp:22222:interface=bazaar.launchpad.dev
38error_dir: /var/tmp/codehosting.test38error_dir: /var/tmp/codehosting.test
39oops_prefix: SMPSSH39oops_prefix: SMPSSH
40access_log: /tmp/test-codehosting-access.log40access_log: /tmp/test-codehosting-access.log
@@ -171,12 +171,8 @@
171# processes spawned through some other mechanism.171# processes spawned through some other mechanism.
172port: 11242172port: 11242
173173
174[mpcreationjobs]174[merge_proposal_jobs]
175oops_prefix: TMPCJ175oops_prefix: TMPJ
176error_dir: /var/tmp/codehosting.test
177
178[update_preview_diffs]
179oops_prefix: TUPD
180error_dir: /var/tmp/codehosting.test176error_dir: /var/tmp/codehosting.test
181177
182[upgrade_branches]178[upgrade_branches]
183179
=== renamed file 'cronscripts/mpcreationjobs.py' => 'cronscripts/merge-proposal-jobs.py'
--- cronscripts/mpcreationjobs.py 2010-02-16 15:25:52 +0000
+++ cronscripts/merge-proposal-jobs.py 2010-04-27 02:13:38 +0000
@@ -1,48 +1,40 @@
1#!/usr/bin/python2.5 -S1#!/usr/bin/python2.5 -S
2#2#
3# Copyright 2009 Canonical Ltd. This software is licensed under the3# Copyright 2009, 2010 Canonical Ltd. This software is licensed under the
4# GNU Affero General Public License version 3 (see the file LICENSE).4# GNU Affero General Public License version 3 (see the file LICENSE).
55
6# pylint: disable-msg=W04036# pylint: disable-msg=W0403
77
8"""Handle new BranchMergeProposals.8"""Handle jobs for BranchMergeProposals.
99
10This script generates a diff for the merge proposal if needed, then notifies10This script handles all job types for branch merge proposals.
11all interested parties about the merge proposal.
12"""11"""
1312
14__metaclass__ = type13__metaclass__ = type
1514
16import _pythonpath15import _pythonpath
17from zope.component import getUtility
1816
19from canonical.config import config17# The following line is a horrible hack, but unfortunately necessary right now
20from lp.codehosting.vfs import get_scanner_server18# to stop import errors from circular imports.
21from lp.services.job.runner import JobRunner19import canonical.launchpad.interfaces
22from lp.code.interfaces.branchmergeproposal import (20from lp.code.interfaces.branchmergeproposal import (
23 IMergeProposalCreatedJobSource,)21 IBranchMergeProposalJobSource,
24from lp.services.scripts.base import LaunchpadCronScript22 )
25from canonical.launchpad.webapp.errorlog import globalErrorUtility23from lp.services.job.runner import JobCronScript, TwistedJobRunner
2624
2725
28class RunMergeProposalCreatedJobs(LaunchpadCronScript):26class RunMergeProposalJobs(JobCronScript):
29 """Run merge proposal creation jobs."""27 """Run all merge proposal jobs."""
3028
31 def main(self):29 config_name = 'merge_proposal_jobs'
32 globalErrorUtility.configure('mpcreationjobs')30 source_interface = IBranchMergeProposalJobSource
33 job_source = getUtility(IMergeProposalCreatedJobSource)31
34 runner = JobRunner.fromReady(job_source, self.logger)32 def __init__(self):
35 server = get_scanner_server()33 super(RunMergeProposalJobs, self).__init__(
36 server.start_server()34 runner_class=TwistedJobRunner,
37 try:35 script_name='merge-proposal-jobs')
38 runner.runAll()
39 finally:
40 server.stop_server()
41 self.logger.info(
42 'Ran %d MergeProposalCreatedJobs.', len(runner.completed_jobs))
4336
4437
45if __name__ == '__main__':38if __name__ == '__main__':
46 script = RunMergeProposalCreatedJobs(39 script = RunMergeProposalJobs()
47 'mpcreationjobs', config.mpcreationjobs.dbuser)
48 script.lock_and_run()40 script.lock_and_run()
4941
=== modified file 'cronscripts/mirror-prober.sh'
--- cronscripts/mirror-prober.sh 2009-10-17 14:11:40 +0000
+++ cronscripts/mirror-prober.sh 2010-04-27 02:13:38 +0000
@@ -39,10 +39,10 @@
39cd /srv/launchpad.net/production/launchpad/cronscripts39cd /srv/launchpad.net/production/launchpad/cronscripts
4040
41echo '== Distribution mirror prober (archive)' `date` ==41echo '== Distribution mirror prober (archive)' `date` ==
42python2.5 distributionmirror-prober.py --content-type=archive --max-mirrors=2042python2.5 -S distributionmirror-prober.py --content-type=archive --max-mirrors=20
4343
44echo '== Distribution mirror prober (cdimage)' `date` ==44echo '== Distribution mirror prober (cdimage)' `date` ==
45python2.5 distributionmirror-prober.py --content-type=cdimage --max-mirrors=3045python2.5 -S distributionmirror-prober.py --content-type=cdimage --max-mirrors=30
4646
47rm -f $LOCK47rm -f $LOCK
4848
4949
=== modified file 'cronscripts/nightly.sh'
--- cronscripts/nightly.sh 2009-10-17 14:11:40 +0000
+++ cronscripts/nightly.sh 2010-04-27 02:13:38 +0000
@@ -3,11 +3,11 @@
3# Copyright 2009 Canonical Ltd. This software is licensed under the3# Copyright 2009 Canonical Ltd. This software is licensed under the
4# GNU Affero General Public License version 3 (see the file LICENSE).4# GNU Affero General Public License version 3 (see the file LICENSE).
55
6# This script performs nightly chores. It should be run from 6# This script performs nightly chores. It should be run from
7# cron as the launchpad user once a day. Typically the output7# cron as the launchpad user once a day. Typically the output
8# will be sent to an email address for inspection.8# will be sent to an email address for inspection.
99
10# Note that http/ftp proxies are needed by the product 10# Note that http/ftp proxies are needed by the product
11# release finder11# release finder
1212
13# Only run this script on loganberry13# Only run this script on loganberry
@@ -42,41 +42,41 @@
42cd /srv/launchpad.net/production/launchpad/cronscripts42cd /srv/launchpad.net/production/launchpad/cronscripts
4343
44echo == Expiring memberships `date` ==44echo == Expiring memberships `date` ==
45python2.5 flag-expired-memberships.py -q45python2.5 -S flag-expired-memberships.py -q
4646
47echo == Allocating revision karma `date` ==47echo == Allocating revision karma `date` ==
48python2.5 allocate-revision-karma.py -q48python2.5 -S allocate-revision-karma.py -q
4949
50echo == Recalculating karma `date` ==50echo == Recalculating karma `date` ==
51python2.5 foaf-update-karma-cache.py -q51python2.5 -S foaf-update-karma-cache.py -q
5252
53echo == Updating cached statistics `date` ==53echo == Updating cached statistics `date` ==
54python2.5 update-stats.py -q54python2.5 -S update-stats.py -q
5555
56echo == Expiring questions `date` ==56echo == Expiring questions `date` ==
57python2.5 expire-questions.py57python2.5 -S expire-questions.py
5858
59### echo == Expiring bugs `date` ==59### echo == Expiring bugs `date` ==
60### python2.5 expire-bugtasks.py60### python2.5 -S expire-bugtasks.py
6161
62# checkwatches.py is scheduled in the /code/pqm/launchpad_crontabs branch.62# checkwatches.py is scheduled in the /code/pqm/launchpad_crontabs branch.
63### echo == Updating bug watches `date` ==63### echo == Updating bug watches `date` ==
64### python2.5 checkwatches.py64### python2.5 -S checkwatches.py
6565
66echo == Updating bugtask target name caches `date` ==66echo == Updating bugtask target name caches `date` ==
67python2.5 update-bugtask-targetnamecaches.py -q67python2.5 -S update-bugtask-targetnamecaches.py -q
6868
69echo == Updating personal standings `date` ==69echo == Updating personal standings `date` ==
70python2.5 update-standing.py -q70python2.5 -S update-standing.py -q
7171
72echo == Updating CVE database `date` ==72echo == Updating CVE database `date` ==
73python2.5 update-cve.py -q73python2.5 -S update-cve.py -q
7474
75echo == Updating package cache `date` ==75echo == Updating package cache `date` ==
76python2.5 update-pkgcache.py -q76python2.5 -S update-pkgcache.py -q
7777
78echo == Product Release Finder `date` ==78echo == Product Release Finder `date` ==
79python2.5 product-release-finder.py -q79python2.5 -S product-release-finder.py -q
8080
8181
82rm -f $LOCK82rm -f $LOCK
8383
=== modified file 'cronscripts/publishing/cron.germinate'
--- cronscripts/publishing/cron.germinate 2010-03-02 09:58:34 +0000
+++ cronscripts/publishing/cron.germinate 2010-04-27 02:13:38 +0000
@@ -127,7 +127,7 @@
127echo " done."127echo " done."
128128
129# now generate the Supported extra overrides129# now generate the Supported extra overrides
130$MAINTAINCE_CHECK $suite > "$MISCROOT/more-extra.override.$suite.main.supported"130$MAINTAINCE_CHECK $suite > "$MISCROOT/more-extra.override.$suite.main.supported" 2> _maintenance-check.stderr
131if [ $? -eq 0 ]; then131if [ $? -eq 0 ]; then
132 cat "$MISCROOT/more-extra.override.$suite.main.supported" >> "$MISCROOT/more-extra.override.$suite.main.new"132 cat "$MISCROOT/more-extra.override.$suite.main.supported" >> "$MISCROOT/more-extra.override.$suite.main.new"
133fi133fi
134134
=== modified file 'cronscripts/publishing/maintenance-check.py'
--- cronscripts/publishing/maintenance-check.py 2010-01-22 13:57:45 +0000
+++ cronscripts/publishing/maintenance-check.py 2010-04-27 02:13:38 +0000
@@ -6,9 +6,19 @@
6# https://code.edge.launchpad.net/~mvo/ubuntu-maintenance-check/python-port6# https://code.edge.launchpad.net/~mvo/ubuntu-maintenance-check/python-port
7# (where it will vanish once taken here)7# (where it will vanish once taken here)
88
9# this warning filter is only needed on older versions of python-apt,
10# once the machine runs lucid it can be removed
11import warnings
12warnings.filterwarnings("ignore","apt API not stable yet")
13import apt
14warnings.resetwarnings()
15
16import apt_pkg
9import logging17import logging
18import os
10import sys19import sys
11import urllib220import urllib2
21import urlparse
1222
13from optparse import OptionParser23from optparse import OptionParser
1424
@@ -31,8 +41,8 @@
31SUPPORTED_ARCHES = PRIMARY_ARCHES + ["armel"]41SUPPORTED_ARCHES = PRIMARY_ARCHES + ["armel"]
3242
33# what defines the seeds is documented in wiki.ubuntu.com/SeedManagement43# what defines the seeds is documented in wiki.ubuntu.com/SeedManagement
34SERVER_SEEDS = [ "supported-server"]44SERVER_SEEDS = [ "supported-server", "server-ship"]
35DESKTOP_SEEDS = ["ship", "supported-desktop"]45DESKTOP_SEEDS = ["ship", "supported-desktop", "supported-desktop-extra"]
36SUPPORTED_SEEDS = [ "all" ]46SUPPORTED_SEEDS = [ "all" ]
3747
38# normal support timeframe48# normal support timeframe
@@ -51,32 +61,111 @@
5161
52# distro names and if they get LTS support (order is important)62# distro names and if they get LTS support (order is important)
53DISTRO_NAMES_AND_LTS_SUPPORT = [ ("ubuntu", True),63DISTRO_NAMES_AND_LTS_SUPPORT = [ ("ubuntu", True),
54 ("kubuntu", False),64 ("kubuntu", True),
55 ("edubuntu", False),
56 ("netbook", False),65 ("netbook", False),
57 ]66 ]
5867
59# germinate output base directory68# germinate output base directory
60BASE_URL = "http://people.canonical.com/~ubuntu-archive/germinate-output/"69BASE_URL = "http://people.canonical.com/~ubuntu-archive/germinate-output/"
6170
71# hints dir url, hints file is "$distro.hints" by default
72# (e.g. lucid.hints)
73HINTS_DIR_URL = "http://people.canonical.com/~ubuntu-archive/seeds/platform.%s/SUPPORTED_HINTS"
74
75# we need the archive root to parse the Sources file to support
76# by-source hints
77ARCHIVE_ROOT = "http://archive.ubuntu.com/ubuntu"
78
62# support timeframe tag used in the Packages file79# support timeframe tag used in the Packages file
63SUPPORT_TAG = "Supported"80SUPPORT_TAG = "Supported"
6481
6582def get_binaries_for_source_pkg(srcname):
66def get_structure(name, version):83 """ Return all binary package names for the given source package name.
67 """ 84
68 get structure file for named distro and distro version 85 :param srcname: The source package name.
69 (e.g. kubuntu, lucid)86 :return: A list of binary package names.
70 """87 """
71 f = urllib2.urlopen("%s/%s.%s/structure" % (BASE_URL, name, version))88 pkgnames = set()
89 recs = apt_pkg.GetPkgSrcRecords()
90 while recs.Lookup(srcname):
91 for binary in recs.Binaries:
92 pkgnames.add(binary)
93 return pkgnames
94
95def expand_src_pkgname(pkgname):
96 """ Expand a package name if it is prefixed with src.
97
98 If the package name is prefixed with src it will be expanded
99 to a list of binary package names. Otherwise the original
100 package name will be returned.
101
102 :param pkgname: The package name (that may include src:prefix).
103 :return: A list of binary package names (the list may be one element long).
104 """
105 if not pkgname.startswith("src:"):
106 return [pkgname]
107 return get_binaries_for_source_pkg(pkgname.split("src:")[1])
108
109def create_and_update_deb_src_source_list(distroseries):
110 """ Create sources.list and update cache.
111
112 This creates a sources.list file with deb-src entries for a given
113 distroseries and apt.Cache.update() to make sure the data is up-to-date.
114 :param distro: The code name of the distribution series (e.g. lucid).
115 :return: None
116 :raises: IOError: When cache update fails.
117 """
118 # apt root dir
119 rootdir="./aptroot.%s" % distroseries
120 sources_list_dir = os.path.join(rootdir, "etc","apt")
121 if not os.path.exists(sources_list_dir):
122 os.makedirs(sources_list_dir)
123 sources_list = open(os.path.join(sources_list_dir, "sources.list"),"w")
124 for pocket in [
125 "%s" % distroseries,
126 "%s-updates" % distroseries,
127 "%s-security" % distroseries]:
128 sources_list.write(
129 "deb-src %s %s main restricted\n" % (
130 ARCHIVE_ROOT, pocket))
131 sources_list.write(
132 "deb %s %s main restricted\n" % (
133 ARCHIVE_ROOT, pocket))
134 sources_list.close()
135 # create required dirs/files for apt.Cache(rootdir) to work on older
136 # versions of python-apt. once lucid is used it can be removed
137 for d in ["var/lib/dpkg",
138 "var/cache/apt/archives/partial",
139 "var/lib/apt/lists/partial"]:
140 if not os.path.exists(os.path.join(rootdir,d)):
141 os.makedirs(os.path.join(rootdir,d))
142 if not os.path.exists(os.path.join(rootdir,"var/lib/dpkg/status")):
143 open(os.path.join(rootdir,"var/lib/dpkg/status"),"w")
144 # open cache with our just prepared rootdir
145 cache = apt.Cache(rootdir=rootdir)
146 try:
147 cache.update(apt.progress.FetchProgress())
148 except SystemError:
149 logging.exception("cache.update() failed")
150
151def get_structure(distroname, version):
152 """ Get structure file conent for named distro and distro version.
153
154 :param name: Name of the distribution (e.g. kubuntu, ubuntu, xubuntu).
155 :param version: Code name of the distribution version (e.g. lucid).
156 :return: List of strings with the structure file content
157 """
158 f = urllib2.urlopen("%s/%s.%s/structure" % (BASE_URL, distroname, version))
72 structure = f.readlines()159 structure = f.readlines()
73 f.close()160 f.close()
74 return structure161 return structure
75162
76def expand_seeds(structure, seedname):163def expand_seeds(structure, seedname):
77 """ 164 """ Expand seed by its dependencies using the strucure file.
78 expand seed by its dependencies using the strucure file165
79 returns a set() for the seed dependencies (excluding the original seedname)166 :param structure: The content of the STRUCTURE file as string list.
167 :param seedname: The name of the seed as string that needs to be expanded.
168 :return: a set() for the seed dependencies (excluding the original seedname)
80 """169 """
81 seeds = []170 seeds = []
82 for line in structure:171 for line in structure:
@@ -122,6 +211,28 @@
122 in_seeds.add(s)211 in_seeds.add(s)
123 return in_seeds212 return in_seeds
124213
214def compare_support_level(x, y):
215 """
216 compare two support level strings of the form 18m, 3y etc
217 :parm x: the first support level
218 :parm y: the second support level
219 :return: negative if x < y, zero if x==y, positive if x > y
220 """
221 def support_to_int(support_time):
222 """
223 helper that takes a support time string and converts it to
224 a integer for cmp()
225 """
226 # allow strings like "5y (kubuntu-common)
227 x = support_time.split()[0]
228 if x.endswith("y"):
229 return 12 * int(x[0:-1])
230 elif x.endswith("m"):
231 return int(x[0:-1])
232 else:
233 raise ValueError("support time '%s' has to end with y or m" % x)
234 return cmp(support_to_int(x), support_to_int(y))
235
125def get_packages_support_time(structure, name, pkg_support_time, support_timeframe_list):236def get_packages_support_time(structure, name, pkg_support_time, support_timeframe_list):
126 """237 """
127 input a structure file and a list of pair<timeframe, seedlist>238 input a structure file and a list of pair<timeframe, seedlist>
@@ -137,8 +248,15 @@
137 for pkg in pkgs_in_seeds[seed]:248 for pkg in pkgs_in_seeds[seed]:
138 if not pkg in pkg_support_time:249 if not pkg in pkg_support_time:
139 pkg_support_time[pkg] = timeframe250 pkg_support_time[pkg] = timeframe
140 if options.with_seeds:251 else:
141 pkg_support_time[pkg] += " (%s)" % ", ".join(what_seeds(pkg, pkgs_in_seeds))252 old_timeframe = pkg_support_time[pkg]
253 if compare_support_level(old_timeframe, timeframe) < 0:
254 logging.debug("overwriting %s from %s to %s" % (
255 pkg, old_timeframe, timeframe))
256 pkg_support_time[pkg] = timeframe
257 if options.with_seeds:
258 pkg_support_time[pkg] += " (%s)" % ", ".join(what_seeds(pkg, pkgs_in_seeds))
259
142260
143 return pkg_support_time261 return pkg_support_time
144262
@@ -150,6 +268,8 @@
150 parser.add_option("--source-packages", "", default=False,268 parser.add_option("--source-packages", "", default=False,
151 action="store_true", 269 action="store_true",
152 help="show as source pkgs")270 help="show as source pkgs")
271 parser.add_option("--hints-file", "", default=None,
272 help="use diffenrt use hints file location")
153 (options, args) = parser.parse_args()273 (options, args) = parser.parse_args()
154274
155 # init275 # init
@@ -160,6 +280,17 @@
160 sys.exit(1)280 sys.exit(1)
161 else:281 else:
162 distro = "lucid"282 distro = "lucid"
283
284 # make sure our deb-src information is up-to-date
285 create_and_update_deb_src_source_list(distro)
286
287 if options.hints_file:
288 hints_file = options.hints_file
289 (schema, netloc, path, query, fragment) = urlparse.urlsplit(hints_file)
290 if not schema:
291 hints_file = "file:%s" % path
292 else:
293 hints_file = HINTS_DIR_URL % distro
163 294
164 # go over the distros we need to check295 # go over the distros we need to check
165 pkg_support_time = {}296 pkg_support_time = {}
@@ -175,20 +306,77 @@
175 else:306 else:
176 support_timeframe = SUPPORT_TIMEFRAME307 support_timeframe = SUPPORT_TIMEFRAME
177 get_packages_support_time(structure, name, pkg_support_time, support_timeframe)308 get_packages_support_time(structure, name, pkg_support_time, support_timeframe)
309
310 # now go over the bits in main that we have not seen (because
311 # they are not in any seed and got added manually into "main"
312 for arch in PRIMARY_ARCHES:
313 rootdir="./aptroot.%s" % distro
314 apt_pkg.Config.Set("APT::Architecture", arch)
315 cache = apt.Cache(rootdir=rootdir)
316 try:
317 cache.update(apt.progress.FetchProgress())
318 except SystemError:
319 logging.exception("cache.update() failed")
320 cache.open(apt.progress.OpProgress())
321 for pkg in cache:
322 if not pkg.name in pkg_support_time:
323 pkg_support_time[pkg.name] = support_timeframe[-1][0]
324 logging.warn("add package in main but not in seeds %s with %s" %
325 (pkg.name, pkg_support_time[pkg.name]))
326
327 # now check the hints file that is used to overwrite
328 # the default seeds
329 try:
330 for line in urllib2.urlopen(hints_file):
331 line = line.strip()
332 if not line or line.startswith("#"):
333 continue
334 try:
335 (raw_pkgname, support_time) = line.split()
336 for pkgname in expand_src_pkgname(raw_pkgname):
337 if support_time == 'unsupported':
338 try:
339 del pkg_support_time[pkgname]
340 sys.stderr.write("hints-file: marking %s unsupported\n" % pkgname)
341 except KeyError:
342 pass
343 else:
344 if pkg_support_time.get(pkgname) != support_time:
345 sys.stderr.write(
346 "hints-file: changing %s from %s to %s\n" % (
347 pkgname, pkg_support_time.get(pkgname),
348 support_time))
349 pkg_support_time[pkgname] = support_time
350 except:
351 logging.exception("can not parse line '%s'" % line)
352 except urllib2.HTTPError, e:
353 if e.getcode() != 404:
354 raise
355 sys.stderr.write("hints-file: %s gave 404 error\n" % hints_file)
178 356
179 # output suitable for the extra-override file357 # output suitable for the extra-override file
180 for pkgname in sorted(pkg_support_time.keys()):358 for pkgname in sorted(pkg_support_time.keys()):
181 # go over the supported arches, they are divided in 359 # special case, the hints file may contain overrides that
182 # first-class (PRIMARY) and second-class with different360 # are arch-specific (like zsh-doc/armel)
183 # support levels361 if "/" in pkgname:
184 for arch in SUPPORTED_ARCHES:362 print "%s %s %s" % (
185 # full LTS support363 pkgname, SUPPORT_TAG, pkg_support_time[pkgname])
186 if arch in PRIMARY_ARCHES:364 else:
187 print "%s/%s %s %s" % (365 # go over the supported arches, they are divided in
188 pkgname, arch, SUPPORT_TAG, pkg_support_time[pkgname])366 # first-class (PRIMARY) and second-class with different
189 else:367 # support levels
190 # not a LTS supported architecture, gets only regular368 for arch in SUPPORTED_ARCHES:
191 # support_timeframe369 # ensure we do not overwrite arch-specific overwrites
192 print "%s/%s %s %s" % (370 pkgname_and_arch = "%s/%s" % (pkgname, arch)
193 pkgname, arch, SUPPORT_TAG, SUPPORT_TIMEFRAME[0][0])371 if pkgname_and_arch in pkg_support_time:
372 break
373 if arch in PRIMARY_ARCHES:
374 # arch with full LTS support
375 print "%s %s %s" % (
376 pkgname_and_arch, SUPPORT_TAG, pkg_support_time[pkgname])
377 else:
378 # not a LTS supported architecture, gets only regular
379 # support_timeframe
380 print "%s %s %s" % (
381 pkgname_and_arch, SUPPORT_TAG, SUPPORT_TIMEFRAME[0][0])
194 382
195383
=== removed file 'cronscripts/update_preview_diffs.py'
--- cronscripts/update_preview_diffs.py 2010-02-16 15:25:52 +0000
+++ cronscripts/update_preview_diffs.py 1970-01-01 00:00:00 +0000
@@ -1,38 +0,0 @@
1#!/usr/bin/python2.5 -S
2#
3# Copyright 2009 Canonical Ltd. This software is licensed under the
4# GNU Affero General Public License version 3 (see the file LICENSE).
5
6# pylint: disable-msg=W0403
7
8"""Update or create previews diffs for branch merge proposals."""
9
10__metaclass__ = type
11
12import _pythonpath
13
14from lp.services.job.runner import JobCronScript, JobRunner, TwistedJobRunner
15from lp.code.interfaces.branchmergeproposal import (
16 IUpdatePreviewDiffJobSource,)
17
18
19class RunUpdatePreviewDiffJobs(JobCronScript):
20 """Run UpdatePreviewDiff jobs."""
21
22 config_name = 'update_preview_diffs'
23 source_interface = IUpdatePreviewDiffJobSource
24
25 def __init__(self):
26 super(RunUpdatePreviewDiffJobs, self).__init__()
27 if self.options.twisted:
28 self.runner_class = TwistedJobRunner
29 else:
30 self.runner_class = JobRunner
31
32 def add_my_options(self):
33 self.parser.add_option('--twisted', action='store_true')
34
35
36if __name__ == '__main__':
37 script = RunUpdatePreviewDiffJobs()
38 script.lock_and_run()
390
=== modified file 'database/replication/Makefile'
--- database/replication/Makefile 2010-03-26 08:43:39 +0000
+++ database/replication/Makefile 2010-04-27 02:13:38 +0000
@@ -44,14 +44,16 @@
4444
45PGMASSACRE=../../utilities/pgmassacre.py45PGMASSACRE=../../utilities/pgmassacre.py
4646
47# Turn off silencing for now so we details on staging deployments.47CREATEDB_83=createdb --encoding=UTF8
48CREATEDB_84=createdb --encoding=UTF8 --locale=C --template=template0
49CREATEDB=${CREATEDB_83}
50
51# Turn off output silencing so we can see details of staging deployments.
52# Without the timestamps, we are unable to estimate production deployment
53# times.
48#SHHH=../../utilities/shhh.py54#SHHH=../../utilities/shhh.py
49SHHH=55SHHH=
5056
51AUTHDB_TABLES=\
52 account accountpassword authkoken emailaddress \
53 openidassociation openidauthorization openidnonce openidrpsummary
54
55default:57default:
56 echo Usage: make [start|stop|restart]58 echo Usage: make [start|stop|restart]
5759
@@ -76,7 +78,7 @@
76 78
77 # Replicate it again, so we can test with multiple slaves.79 # Replicate it again, so we can test with multiple slaves.
78 -${PGMASSACRE} launchpad_dev_slave280 -${PGMASSACRE} launchpad_dev_slave2
79 createdb --encoding=UTF8 launchpad_dev_slave281 ${CREATEDB} launchpad_dev_slave2
80 LPCONFIG=${DEV_CONFIG} ./slon_ctl.py start \82 LPCONFIG=${DEV_CONFIG} ./slon_ctl.py start \
81 node3_node 'dbname=launchpad_dev_slave2 user=slony'83 node3_node 'dbname=launchpad_dev_slave2 user=slony'
82 LPCONFIG=${DEV_CONFIG} ./new-slave.py 3 launchpad_dev_slave284 LPCONFIG=${DEV_CONFIG} ./new-slave.py 3 launchpad_dev_slave2
@@ -96,23 +98,12 @@
96 _MASTER=lpmain_staging_new _SLAVE=lpmain_staging_slave_new \98 _MASTER=lpmain_staging_new _SLAVE=lpmain_staging_slave_new \
97 LAG="0 seconds"99 LAG="0 seconds"
98 # Create the DB with the desired default tablespace.100 # Create the DB with the desired default tablespace.
99 createdb --encoding UTF8 --tablespace ${STAGING_TABLESPACE} \101 ${CREATEDB} --tablespace ${STAGING_TABLESPACE} lpmain_staging_new
100 lpmain_staging_new102 # Restore the database. We need to restore permissions, despite
101 # Restore the DB schema. We need to restore permissions, despite
102 # later running security.py, to pull in permissions granted on103 # later running security.py, to pull in permissions granted on
103 # production to users not maintained by security.py.104 # production to users not maintained by security.py.
104 pg_restore --dbname=lpmain_staging_new \105 pg_restore --dbname=lpmain_staging_new \
105 --no-owner --exit-on-error ${STAGING_DUMP}106 --no-owner --exit-on-error ${STAGING_DUMP}
106 psql -q -d lpmain_staging_new -f authdb_drop.sql
107 psql -q -d lpmain_staging_new -f authdb_create.sql \
108 2>&1 | grep -v _sl || true
109 # Restore the authdb data.
110 for table in ${AUTHDB_TABLES}; do \
111 pg_restore --dbname=lpmain_staging_new \
112 --no-acl --no-owner --disable-triggers --data-only \
113 --table=$$table ${STAGING_DUMP}; \
114 done
115 psql -q -d lpmain_staging_new -f authdb_sequences.sql
116 # Uninstall Slony-I if it is installed - a pg_dump of a DB with107 # Uninstall Slony-I if it is installed - a pg_dump of a DB with
117 # Slony-I installed isn't usable without this step.108 # Slony-I installed isn't usable without this step.
118 LPCONFIG=${NEW_STAGING_CONFIG} ./repair-restored-db.py109 LPCONFIG=${NEW_STAGING_CONFIG} ./repair-restored-db.py
@@ -144,17 +135,9 @@
144 LPCONFIG=${STAGING_CONFIG} ./slon_ctl.py --lag="${LAG}" start135 LPCONFIG=${STAGING_CONFIG} ./slon_ctl.py --lag="${LAG}" start
145136
146dogfood:137dogfood:
147 createdb --encoding UTF8 ${DOGFOOD_DBNAME}138 ${CREATEDB} ${DOGFOOD_DBNAME}
148 pg_restore --dbname=${DOGFOOD_DBNAME} --no-acl --no-owner \139 pg_restore --dbname=${DOGFOOD_DBNAME} --no-acl --no-owner \
149 --exit-on-error ${DOGFOOD_DUMP}140 --exit-on-error ${DOGFOOD_DUMP}
150 psql -q -d ${DOGFOOD_DBNAME} -f authdb_drop.sql
151 psql -q -d ${DOGFOOD_DBNAME} -f authdb_create.sql \
152 2>&1 | grep -v _sl || true
153 for table in ${AUTHDB_TABLES}; do \
154 pg_restore --dbname=${DOGFOOD_DBNAME} \
155 --no-acl --no-owner --disable-triggers --data-only \
156 --table=$$table ${DOGFOOD_DUMP}; \
157 done
158 ./repair-restored-db.py -d ${DOGFOOD_DBNAME}141 ./repair-restored-db.py -d ${DOGFOOD_DBNAME}
159 ../schema/upgrade.py -d ${DOGFOOD_DBNAME}142 ../schema/upgrade.py -d ${DOGFOOD_DBNAME}
160 ../schema/fti.py -d ${DOGFOOD_DBNAME}143 ../schema/fti.py -d ${DOGFOOD_DBNAME}
@@ -174,14 +157,15 @@
174_replicate:157_replicate:
175 @echo LPCONFIG currently ${LPCONFIG}158 @echo LPCONFIG currently ${LPCONFIG}
176 # Start the slon daemon for the master.159 # Start the slon daemon for the master.
177 ./slon_ctl.py start \160 ./slon_ctl.py --lag="0 seconds" start \
178 node1_node "dbname=${_MASTER} user=slony"161 node1_node "dbname=${_MASTER} user=slony"
179 # Initialize the cluster and create replication sets.162 # Initialize the cluster and create replication sets.
180 ./initialize.py163 ./initialize.py
181 # Create the soon-to-be-slave database, empty at this point.164 # Create the soon-to-be-slave database, empty at this point.
182 createdb --encoding=UTF8 --tablespace=${_SLAVE_TABLESPACE} ${_SLAVE}165 ${CREATEDB} --tablespace=${_SLAVE_TABLESPACE} ${_SLAVE}
183 # Start the slon daemon for the slave166 # Start the slon daemon for the slave
184 ./slon_ctl.py start node2_node "dbname=${_SLAVE} user=slony"167 ./slon_ctl.py --lag="0 seconds" start \
168 node2_node "dbname=${_SLAVE} user=slony"
185 # Setup the slave169 # Setup the slave
186 ./new-slave.py 2 "dbname=${_SLAVE}"170 ./new-slave.py 2 "dbname=${_SLAVE}"
187 # Upgrade all databases in the cluster and reset security.171 # Upgrade all databases in the cluster and reset security.
@@ -192,12 +176,7 @@
192 @echo Running security.py `date`176 @echo Running security.py `date`
193 ./slon_ctl.py stop # security.py can deadlock with slony177 ./slon_ctl.py stop # security.py can deadlock with slony
194 ${SHHH} ../schema/security.py --cluster -U slony178 ${SHHH} ../schema/security.py --cluster -U slony
195 ./slon_ctl.py --lag="0 seconds" start
196 # Migrate tables to the authdb replication set, creating the set
197 # and subscribing nodes to it as necessary.
198 ./populate_auth_replication_set.py -U slony
199 # Restart slon daemons with default lag setting.179 # Restart slon daemons with default lag setting.
200 ./slon_ctl.py stop
201 ./slon_ctl.py --lag="${LAG}" start180 ./slon_ctl.py --lag="${LAG}" start
202 # Generate a preamble for manual slonik(1) usage.181 # Generate a preamble for manual slonik(1) usage.
203 ./preamble.py > preamble.sk182 ./preamble.py > preamble.sk
204183
=== removed file 'database/replication/authdb_create.sql'
--- database/replication/authdb_create.sql 2010-03-30 05:51:30 +0000
+++ database/replication/authdb_create.sql 1970-01-01 00:00:00 +0000
@@ -1,885 +0,0 @@
1-- Copyright 2009 Canonical Ltd. This software is licensed under the
2-- GNU Affero General Public License version 3 (see the file LICENSE).
3
4-- Generated by:
5-- pg_dump --format=p --schema-only --no-owner --no-privileges \
6-- --table=Account --table=AccountPassword --table=AuthToken \
7-- --table=EmailAddress --table=OpenIDAssociation \
8-- --table=OpenIDAuthorization --table=OpenIDNonce \
9-- --table=OpenIDRPSummary --table=ValidPersonCache \
10-- --table=ValidPersonOrTeamCache launchpad_prod_4
11
12--
13-- PostgreSQL database dump
14--
15
16SET client_encoding = 'UTF8';
17SET standard_conforming_strings = off;
18SET check_function_bodies = false;
19SET client_min_messages = warning;
20SET escape_string_warning = off;
21
22SET search_path = public, pg_catalog;
23
24SET default_tablespace = '';
25
26SET default_with_oids = false;
27
28--
29-- Name: account; Type: TABLE; Schema: public; Owner: -; Tablespace:
30--
31
32CREATE TABLE account (
33 id integer NOT NULL,
34 date_created timestamp without time zone DEFAULT timezone('UTC'::text, now()) NOT NULL,
35 creation_rationale integer NOT NULL,
36 status integer NOT NULL,
37 date_status_set timestamp without time zone DEFAULT timezone('UTC'::text, now()) NOT NULL,
38 displayname text NOT NULL,
39 openid_identifier text DEFAULT generate_openid_identifier() NOT NULL,
40 status_comment text,
41 old_openid_identifier text
42);
43
44
45--
46-- Name: TABLE account; Type: COMMENT; Schema: public; Owner: -
47--
48
49COMMENT ON TABLE account IS 'An account that may be used for authenticating to Canonical or other systems.';
50
51
52--
53-- Name: COLUMN account.status; Type: COMMENT; Schema: public; Owner: -
54--
55
56COMMENT ON COLUMN account.status IS 'The status of the account.';
57
58
59--
60-- Name: COLUMN account.date_status_set; Type: COMMENT; Schema: public; Owner: -
61--
62
63COMMENT ON COLUMN account.date_status_set IS 'When the status was last changed.';
64
65
66--
67-- Name: COLUMN account.displayname; Type: COMMENT; Schema: public; Owner: -
68--
69
70COMMENT ON COLUMN account.displayname IS 'Name to display when rendering information about this account.';
71
72
73--
74-- Name: COLUMN account.openid_identifier; Type: COMMENT; Schema: public; Owner: -
75--
76
77COMMENT ON COLUMN account.openid_identifier IS 'The key used to construct an OpenID identity URL for this account.';
78
79
80--
81-- Name: COLUMN account.status_comment; Type: COMMENT; Schema: public; Owner: -
82--
83
84COMMENT ON COLUMN account.status_comment IS 'The comment on the status of the account.';
85
86
87--
88-- Name: COLUMN account.old_openid_identifier; Type: COMMENT; Schema: public; Owner: -
89--
90
91COMMENT ON COLUMN account.old_openid_identifier IS 'The previous openid_identifier, used for transitions to the current openid_identifier.';
92
93
94--
95-- Name: account_id_seq; Type: SEQUENCE; Schema: public; Owner: -
96--
97
98CREATE SEQUENCE account_id_seq
99 INCREMENT BY 1
100 NO MAXVALUE
101 NO MINVALUE
102 CACHE 1;
103
104
105--
106-- Name: account_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
107--
108
109ALTER SEQUENCE account_id_seq OWNED BY account.id;
110
111
112--
113-- Name: accountpassword; Type: TABLE; Schema: public; Owner: -; Tablespace:
114--
115
116CREATE TABLE accountpassword (
117 id integer NOT NULL,
118 account integer NOT NULL,
119 password text NOT NULL
120);
121
122
123--
124-- Name: TABLE accountpassword; Type: COMMENT; Schema: public; Owner: -
125--
126
127COMMENT ON TABLE accountpassword IS 'A password used to authenticate an Account.';
128
129
130--
131-- Name: COLUMN accountpassword.password; Type: COMMENT; Schema: public; Owner: -
132--
133
134COMMENT ON COLUMN accountpassword.password IS 'SSHA digest encrypted password.';
135
136
137--
138-- Name: accountpassword_id_seq; Type: SEQUENCE; Schema: public; Owner: -
139--
140
141CREATE SEQUENCE accountpassword_id_seq
142 INCREMENT BY 1
143 NO MAXVALUE
144 NO MINVALUE
145 CACHE 1;
146
147
148--
149-- Name: accountpassword_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
150--
151
152ALTER SEQUENCE accountpassword_id_seq OWNED BY accountpassword.id;
153
154
155--
156-- Name: authtoken; Type: TABLE; Schema: public; Owner: -; Tablespace:
157--
158
159CREATE TABLE authtoken (
160 id integer NOT NULL,
161 date_created timestamp without time zone DEFAULT timezone('UTC'::text, now()) NOT NULL,
162 date_consumed timestamp without time zone,
163 token_type integer NOT NULL,
164 token text NOT NULL,
165 requester integer,
166 requester_email text,
167 email text NOT NULL,
168 redirection_url text
169);
170
171
172--
173-- Name: TABLE authtoken; Type: COMMENT; Schema: public; Owner: -
174--
175
176COMMENT ON TABLE authtoken IS 'AuthToken stores one time tokens used by the authentication service for validating email addresses and other tasks that require verifying an email address is valid such as password recovery and account merging. This table will be cleaned occasionally to remove expired tokens. Expiry time is not yet defined.';
177
178
179--
180-- Name: COLUMN authtoken.date_created; Type: COMMENT; Schema: public; Owner: -
181--
182
183COMMENT ON COLUMN authtoken.date_created IS 'The timestamp that this request was made.';
184
185
186--
187-- Name: COLUMN authtoken.date_consumed; Type: COMMENT; Schema: public; Owner: -
188--
189
190COMMENT ON COLUMN authtoken.date_consumed IS 'The date and time when this token was consumed. It''s NULL if it hasn''t been consumed yet.';
191
192
193--
194-- Name: COLUMN authtoken.token_type; Type: COMMENT; Schema: public; Owner: -
195--
196
197COMMENT ON COLUMN authtoken.token_type IS 'The type of request, as per dbschema.TokenType.';
198
199
200--
201-- Name: COLUMN authtoken.token; Type: COMMENT; Schema: public; Owner: -
202--
203
204COMMENT ON COLUMN authtoken.token IS 'The token (not the URL) emailed used to uniquely identify this request. This token will be used to generate a URL that when clicked on will continue a workflow.';
205
206
207--
208-- Name: COLUMN authtoken.requester; Type: COMMENT; Schema: public; Owner: -
209--
210
211COMMENT ON COLUMN authtoken.requester IS 'The Account that made this request. This will be null for password recovery requests.';
212
213
214--
215-- Name: COLUMN authtoken.requester_email; Type: COMMENT; Schema: public; Owner: -
216--
217
218COMMENT ON COLUMN authtoken.requester_email IS 'The email address that was used to login when making this request. This provides an audit trail to help the end user confirm that this is a valid request. It is not a link to the EmailAddress table as this may be changed after the request is made. This field will be null for password recovery requests.';
219
220
221--
222-- Name: COLUMN authtoken.email; Type: COMMENT; Schema: public; Owner: -
223--
224
225COMMENT ON COLUMN authtoken.email IS 'The email address that this request was sent to.';
226
227
228--
229-- Name: authtoken_id_seq; Type: SEQUENCE; Schema: public; Owner: -
230--
231
232CREATE SEQUENCE authtoken_id_seq
233 INCREMENT BY 1
234 NO MAXVALUE
235 NO MINVALUE
236 CACHE 1;
237
238
239--
240-- Name: authtoken_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
241--
242
243ALTER SEQUENCE authtoken_id_seq OWNED BY authtoken.id;
244
245
246--
247-- Name: emailaddress; Type: TABLE; Schema: public; Owner: -; Tablespace:
248--
249
250CREATE TABLE emailaddress (
251 id integer NOT NULL,
252 email text NOT NULL,
253 person integer,
254 status integer NOT NULL,
255 date_created timestamp without time zone DEFAULT timezone('UTC'::text, now()) NOT NULL,
256 account integer,
257 CONSTRAINT emailaddress__is_linked__chk CHECK (((person IS NOT NULL) OR (account IS NOT NULL)))
258);
259
260
261--
262-- Name: COLUMN emailaddress.email; Type: COMMENT; Schema: public; Owner: -
263--
264
265COMMENT ON COLUMN emailaddress.email IS 'An email address used by a Person. The email address is stored in a casesensitive way, but must be case insensitivly unique.';
266
267
268--
269-- Name: emailaddress_id_seq; Type: SEQUENCE; Schema: public; Owner: -
270--
271
272CREATE SEQUENCE emailaddress_id_seq
273 INCREMENT BY 1
274 NO MAXVALUE
275 NO MINVALUE
276 CACHE 1;
277
278
279--
280-- Name: emailaddress_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
281--
282
283ALTER SEQUENCE emailaddress_id_seq OWNED BY emailaddress.id;
284
285
286--
287-- Name: openidassociation; Type: TABLE; Schema: public; Owner: -; Tablespace:
288--
289
290CREATE TABLE openidassociation (
291 server_url character varying(2047) NOT NULL,
292 handle character varying(255) NOT NULL,
293 secret bytea,
294 issued integer,
295 lifetime integer,
296 assoc_type character varying(64),
297 CONSTRAINT secret_length_constraint CHECK ((length(secret) <= 128))
298);
299
300
301--
302-- Name: openidauthorization; Type: TABLE; Schema: public; Owner: -; Tablespace:
303--
304
305CREATE TABLE openidauthorization (
306 id integer NOT NULL,
307 account integer NOT NULL,
308 client_id text,
309 date_created timestamp without time zone DEFAULT timezone('UTC'::text, now()) NOT NULL,
310 date_expires timestamp without time zone NOT NULL,
311 trust_root text NOT NULL
312);
313
314
315--
316-- Name: openidauthorization_id_seq; Type: SEQUENCE; Schema: public; Owner: -
317--
318
319CREATE SEQUENCE openidauthorization_id_seq
320 INCREMENT BY 1
321 NO MAXVALUE
322 NO MINVALUE
323 CACHE 1;
324
325
326--
327-- Name: openidauthorization_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
328--
329
330ALTER SEQUENCE openidauthorization_id_seq OWNED BY openidauthorization.id;
331
332
333--
334-- Name: openidnonce; Type: TABLE; Schema: public; Owner: -; Tablespace:
335--
336
337CREATE TABLE openidnonce (
338 server_url character varying(2047) NOT NULL,
339 "timestamp" integer NOT NULL,
340 salt character(40) NOT NULL
341);
342
343
344--
345-- Name: TABLE openidnonce; Type: COMMENT; Schema: public; Owner: -
346--
347
348COMMENT ON TABLE openidnonce IS 'Nonces for our OpenID consumer.';
349
350
351--
352-- Name: openidrpsummary; Type: TABLE; Schema: public; Owner: -; Tablespace:
353--
354
355CREATE TABLE openidrpsummary (
356 id integer NOT NULL,
357 account integer NOT NULL,
358 openid_identifier text NOT NULL,
359 trust_root text NOT NULL,
360 date_created timestamp without time zone DEFAULT timezone('UTC'::text, now()) NOT NULL,
361 date_last_used timestamp without time zone DEFAULT timezone('UTC'::text, now()) NOT NULL,
362 total_logins integer DEFAULT 1 NOT NULL
363);
364
365
366--
367-- Name: TABLE openidrpsummary; Type: COMMENT; Schema: public; Owner: -
368--
369
370COMMENT ON TABLE openidrpsummary IS 'The summary of the activity between a person and an RP.';
371
372
373--
374-- Name: COLUMN openidrpsummary.account; Type: COMMENT; Schema: public; Owner: -
375--
376
377COMMENT ON COLUMN openidrpsummary.account IS 'The account who used the RP.';
378
379
380--
381-- Name: COLUMN openidrpsummary.openid_identifier; Type: COMMENT; Schema: public; Owner: -
382--
383
384COMMENT ON COLUMN openidrpsummary.openid_identifier IS 'The OpenID identifier used to login.';
385
386
387--
388-- Name: COLUMN openidrpsummary.trust_root; Type: COMMENT; Schema: public; Owner: -
389--
390
391COMMENT ON COLUMN openidrpsummary.trust_root IS 'The trust root for the RP';
392
393
394--
395-- Name: COLUMN openidrpsummary.date_created; Type: COMMENT; Schema: public; Owner: -
396--
397
398COMMENT ON COLUMN openidrpsummary.date_created IS 'The creation date of this summary; the first time the person used the RP.';
399
400
401--
402-- Name: COLUMN openidrpsummary.date_last_used; Type: COMMENT; Schema: public; Owner: -
403--
404
405COMMENT ON COLUMN openidrpsummary.date_last_used IS 'The date the RP was last used.';
406
407
408--
409-- Name: COLUMN openidrpsummary.total_logins; Type: COMMENT; Schema: public; Owner: -
410--
411
412COMMENT ON COLUMN openidrpsummary.total_logins IS 'The total number of times the RP was used by the person.';
413
414
415--
416-- Name: openidrpsummary_id_seq; Type: SEQUENCE; Schema: public; Owner: -
417--
418
419CREATE SEQUENCE openidrpsummary_id_seq
420 INCREMENT BY 1
421 NO MAXVALUE
422 NO MINVALUE
423 CACHE 1;
424
425
426--
427-- Name: openidrpsummary_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
428--
429
430ALTER SEQUENCE openidrpsummary_id_seq OWNED BY openidrpsummary.id;
431
432
433--
434-- Name: validpersoncache; Type: VIEW; Schema: public; Owner: -
435--
436
437CREATE VIEW validpersoncache AS
438 SELECT emailaddress.person AS id FROM emailaddress, account WHERE ((((emailaddress.account = account.id) AND (emailaddress.person IS NOT NULL)) AND (emailaddress.status = 4)) AND (account.status = 20));
439
440
441--
442-- Name: VIEW validpersoncache; Type: COMMENT; Schema: public; Owner: -
443--
444
445COMMENT ON VIEW validpersoncache IS 'A materialized view listing the Person.ids of all valid people (but not teams).';
446
447
448--
449-- Name: validpersonorteamcache; Type: VIEW; Schema: public; Owner: -
450--
451
452CREATE VIEW validpersonorteamcache AS
453 SELECT person.id FROM ((person LEFT JOIN emailaddress ON ((person.id = emailaddress.person))) LEFT JOIN account ON ((emailaddress.account = account.id))) WHERE ((person.teamowner IS NOT NULL) OR ((account.status = 20) AND (emailaddress.status = 4)));
454
455
456--
457-- Name: id; Type: DEFAULT; Schema: public; Owner: -
458--
459
460ALTER TABLE account ALTER COLUMN id SET DEFAULT nextval('account_id_seq'::regclass);
461
462
463--
464-- Name: id; Type: DEFAULT; Schema: public; Owner: -
465--
466
467ALTER TABLE accountpassword ALTER COLUMN id SET DEFAULT nextval('accountpassword_id_seq'::regclass);
468
469
470--
471-- Name: id; Type: DEFAULT; Schema: public; Owner: -
472--
473
474ALTER TABLE authtoken ALTER COLUMN id SET DEFAULT nextval('authtoken_id_seq'::regclass);
475
476
477--
478-- Name: id; Type: DEFAULT; Schema: public; Owner: -
479--
480
481ALTER TABLE emailaddress ALTER COLUMN id SET DEFAULT nextval('emailaddress_id_seq'::regclass);
482
483
484--
485-- Name: id; Type: DEFAULT; Schema: public; Owner: -
486--
487
488ALTER TABLE openidauthorization ALTER COLUMN id SET DEFAULT nextval('openidauthorization_id_seq'::regclass);
489
490
491--
492-- Name: id; Type: DEFAULT; Schema: public; Owner: -
493--
494
495ALTER TABLE openidrpsummary ALTER COLUMN id SET DEFAULT nextval('openidrpsummary_id_seq'::regclass);
496
497
498--
499-- Name: account_openid_identifier_key; Type: CONSTRAINT; Schema: public; Owner: -; Tablespace:
500--
501
502ALTER TABLE ONLY account
503 ADD CONSTRAINT account_openid_identifier_key UNIQUE (openid_identifier);
504
505
506--
507-- Name: account_pkey; Type: CONSTRAINT; Schema: public; Owner: -; Tablespace:
508--
509
510ALTER TABLE ONLY account
511 ADD CONSTRAINT account_pkey PRIMARY KEY (id);
512
513
514--
515-- Name: accountpassword_account_key; Type: CONSTRAINT; Schema: public; Owner: -; Tablespace:
516--
517
518ALTER TABLE ONLY accountpassword
519 ADD CONSTRAINT accountpassword_account_key UNIQUE (account);
520
521
522--
523-- Name: accountpassword_pkey; Type: CONSTRAINT; Schema: public; Owner: -; Tablespace:
524--
525
526ALTER TABLE ONLY accountpassword
527 ADD CONSTRAINT accountpassword_pkey PRIMARY KEY (id);
528
529
530--
531-- Name: authtoken__token__key; Type: CONSTRAINT; Schema: public; Owner: -; Tablespace:
532--
533
534ALTER TABLE ONLY authtoken
535 ADD CONSTRAINT authtoken__token__key UNIQUE (token);
536
537
538--
539-- Name: authtoken_pkey; Type: CONSTRAINT; Schema: public; Owner: -; Tablespace:
540--
541
542ALTER TABLE ONLY authtoken
543 ADD CONSTRAINT authtoken_pkey PRIMARY KEY (id);
544
545
546--
547-- Name: emailaddress_pkey; Type: CONSTRAINT; Schema: public; Owner: -; Tablespace:
548--
549
550ALTER TABLE ONLY emailaddress
551 ADD CONSTRAINT emailaddress_pkey PRIMARY KEY (id);
552
553
554--
555-- Name: openidassociation_pkey; Type: CONSTRAINT; Schema: public; Owner: -; Tablespace:
556--
557
558ALTER TABLE ONLY openidassociation
559 ADD CONSTRAINT openidassociation_pkey PRIMARY KEY (server_url, handle);
560
561
562--
563-- Name: openidauthorization_pkey; Type: CONSTRAINT; Schema: public; Owner: -; Tablespace:
564--
565
566ALTER TABLE ONLY openidauthorization
567 ADD CONSTRAINT openidauthorization_pkey PRIMARY KEY (id);
568
569
570--
571-- Name: openidnonce_pkey; Type: CONSTRAINT; Schema: public; Owner: -; Tablespace:
572--
573
574ALTER TABLE ONLY openidnonce
575 ADD CONSTRAINT openidnonce_pkey PRIMARY KEY (server_url, "timestamp", salt);
576
577
578--
579-- Name: openidrpsummary__account__trust_root__openid_identifier__key; Type: CONSTRAINT; Schema: public; Owner: -; Tablespace:
580--
581
582ALTER TABLE ONLY openidrpsummary
583 ADD CONSTRAINT openidrpsummary__account__trust_root__openid_identifier__key UNIQUE (account, trust_root, openid_identifier);
584
585
586--
587-- Name: openidrpsummary_pkey; Type: CONSTRAINT; Schema: public; Owner: -; Tablespace:
588--
589
590ALTER TABLE ONLY openidrpsummary
591 ADD CONSTRAINT openidrpsummary_pkey PRIMARY KEY (id);
592
593
594--
595-- Name: account__old_openid_identifier__idx; Type: INDEX; Schema: public; Owner: -; Tablespace:
596--
597
598CREATE INDEX account__old_openid_identifier__idx ON account USING btree (old_openid_identifier);
599
600
601--
602-- Name: authtoken__date_consumed__idx; Type: INDEX; Schema: public; Owner: -; Tablespace:
603--
604
605CREATE INDEX authtoken__date_consumed__idx ON authtoken USING btree (date_consumed);
606
607
608--
609-- Name: authtoken__date_created__idx; Type: INDEX; Schema: public; Owner: -; Tablespace:
610--
611
612CREATE INDEX authtoken__date_created__idx ON authtoken USING btree (date_created);
613
614
615--
616-- Name: authtoken__requester__idx; Type: INDEX; Schema: public; Owner: -; Tablespace:
617--
618
619CREATE INDEX authtoken__requester__idx ON authtoken USING btree (requester);
620
621
622--
623-- Name: emailaddress__account__key; Type: INDEX; Schema: public; Owner: -; Tablespace:
624--
625
626CREATE UNIQUE INDEX emailaddress__account__key ON emailaddress USING btree (account) WHERE ((status = 4) AND (account IS NOT NULL));
627
628
629--
630-- Name: INDEX emailaddress__account__key; Type: COMMENT; Schema: public; Owner: -
631--
632
633COMMENT ON INDEX emailaddress__account__key IS 'Ensures that an Account only has one preferred email address';
634
635
636--
637-- Name: emailaddress__lower_email__key; Type: INDEX; Schema: public; Owner: -; Tablespace:
638--
639
640CREATE INDEX emailaddress__lower_email__key ON emailaddress USING btree (lower(email));
641
642
643--
644-- Name: emailaddress__person__key; Type: INDEX; Schema: public; Owner: -; Tablespace:
645--
646
647CREATE UNIQUE INDEX emailaddress__person__key ON emailaddress USING btree (person) WHERE ((status = 4) AND (person IS NOT NULL));
648
649
650--
651-- Name: INDEX emailaddress__person__key; Type: COMMENT; Schema: public; Owner: -
652--
653
654COMMENT ON INDEX emailaddress__person__key IS 'Ensures that a Person only has one preferred email address';
655
656
657--
658-- Name: emailaddress__person__status__idx; Type: INDEX; Schema: public; Owner: -; Tablespace:
659--
660
661CREATE INDEX emailaddress__person__status__idx ON emailaddress USING btree (person, status);
662
663
664--
665-- Name: openidauthorixation__account__troot__expires__client_id__idx; Type: INDEX; Schema: public; Owner: -; Tablespace:
666--
667
668CREATE INDEX openidauthorixation__account__troot__expires__client_id__idx ON openidauthorization USING btree (account, trust_root, date_expires, client_id);
669
670
671--
672-- Name: openidauthorixation__account__trust_root__key; Type: INDEX; Schema: public; Owner: -; Tablespace:
673--
674
675CREATE UNIQUE INDEX openidauthorixation__account__trust_root__key ON openidauthorization USING btree (account, trust_root) WHERE (client_id IS NULL);
676
677
678--
679-- Name: openidauthorization__account__client_id__trust_root__key; Type: INDEX; Schema: public; Owner: -; Tablespace:
680--
681
682CREATE UNIQUE INDEX openidauthorization__account__client_id__trust_root__key ON openidauthorization USING btree (account, client_id, trust_root) WHERE (client_id IS NOT NULL);
683
684
685--
686-- Name: openidrpsummary__openid_identifier__idx; Type: INDEX; Schema: public; Owner: -; Tablespace:
687--
688
689CREATE INDEX openidrpsummary__openid_identifier__idx ON openidrpsummary USING btree (openid_identifier);
690
691
692--
693-- Name: openidrpsummary__trust_root__idx; Type: INDEX; Schema: public; Owner: -; Tablespace:
694--
695
696CREATE INDEX openidrpsummary__trust_root__idx ON openidrpsummary USING btree (trust_root);
697
698
699--
700-- Name: _sl_logtrigger_200; Type: TRIGGER; Schema: public; Owner: -
701--
702
703CREATE TRIGGER _sl_logtrigger_200
704 AFTER INSERT OR DELETE OR UPDATE ON account
705 FOR EACH ROW
706 EXECUTE PROCEDURE _sl.logtrigger('_sl', '200', 'kvvvvvvvv');
707
708
709--
710-- Name: _sl_logtrigger_201; Type: TRIGGER; Schema: public; Owner: -
711--
712
713CREATE TRIGGER _sl_logtrigger_201
714 AFTER INSERT OR DELETE OR UPDATE ON accountpassword
715 FOR EACH ROW
716 EXECUTE PROCEDURE _sl.logtrigger('_sl', '201', 'kvv');
717
718
719--
720-- Name: _sl_logtrigger_274; Type: TRIGGER; Schema: public; Owner: -
721--
722
723CREATE TRIGGER _sl_logtrigger_274
724 AFTER INSERT OR DELETE OR UPDATE ON emailaddress
725 FOR EACH ROW
726 EXECUTE PROCEDURE _sl.logtrigger('_sl', '274', 'kvvvvv');
727
728
729--
730-- Name: _sl_logtrigger_335; Type: TRIGGER; Schema: public; Owner: -
731--
732
733CREATE TRIGGER _sl_logtrigger_335
734 AFTER INSERT OR DELETE OR UPDATE ON openidauthorization
735 FOR EACH ROW
736 EXECUTE PROCEDURE _sl.logtrigger('_sl', '335', 'kvvvvv');
737
738
739--
740-- Name: _sl_logtrigger_337; Type: TRIGGER; Schema: public; Owner: -
741--
742
743CREATE TRIGGER _sl_logtrigger_337
744 AFTER INSERT OR DELETE OR UPDATE ON openidrpsummary
745 FOR EACH ROW
746 EXECUTE PROCEDURE _sl.logtrigger('_sl', '337', 'kvvvvvv');
747
748
749--
750-- Name: _sl_logtrigger_438; Type: TRIGGER; Schema: public; Owner: -
751--
752
753CREATE TRIGGER _sl_logtrigger_438
754 AFTER INSERT OR DELETE OR UPDATE ON authtoken
755 FOR EACH ROW
756 EXECUTE PROCEDURE _sl.logtrigger('_sl', '438', 'kvvvvvvvv');
757
758
759--
760-- Name: _sl_logtrigger_439; Type: TRIGGER; Schema: public; Owner: -
761--
762
763CREATE TRIGGER _sl_logtrigger_439
764 AFTER INSERT OR DELETE OR UPDATE ON openidassociation
765 FOR EACH ROW
766 EXECUTE PROCEDURE _sl.logtrigger('_sl', '439', 'kkvvvv');
767
768
769--
770-- Name: _sl_logtrigger_445; Type: TRIGGER; Schema: public; Owner: -
771--
772
773CREATE TRIGGER _sl_logtrigger_445
774 AFTER INSERT OR DELETE OR UPDATE ON openidnonce
775 FOR EACH ROW
776 EXECUTE PROCEDURE _sl.logtrigger('_sl', '445', 'kkk');
777
778
779--
780-- Name: set_date_status_set_t; Type: TRIGGER; Schema: public; Owner: -
781--
782
783CREATE TRIGGER set_date_status_set_t
784 BEFORE UPDATE ON account
785 FOR EACH ROW
786 EXECUTE PROCEDURE set_date_status_set();
787
788
789--
790-- Name: accountpassword_account_fkey; Type: FK CONSTRAINT; Schema: public; Owner: -
791--
792
793ALTER TABLE ONLY accountpassword
794 ADD CONSTRAINT accountpassword_account_fkey FOREIGN KEY (account) REFERENCES account(id) ON DELETE CASCADE;
795
796
797--
798-- Name: authtoken__requester__fk; Type: FK CONSTRAINT; Schema: public; Owner: -
799--
800
801ALTER TABLE ONLY authtoken
802 ADD CONSTRAINT authtoken__requester__fk FOREIGN KEY (requester) REFERENCES account(id);
803
804
805--
806-- Name: emailaddress__account__fk; Type: FK CONSTRAINT; Schema: public; Owner: -
807--
808
809ALTER TABLE ONLY emailaddress
810 ADD CONSTRAINT emailaddress__account__fk FOREIGN KEY (account) REFERENCES account(id) ON DELETE SET NULL;
811
812
813--
814-- Name: openidauthorization__account__fk; Type: FK CONSTRAINT; Schema: public; Owner: -
815--
816
817ALTER TABLE ONLY openidauthorization
818 ADD CONSTRAINT openidauthorization__account__fk FOREIGN KEY (account) REFERENCES account(id);
819
820
821--
822-- Name: openidrpsummary_account_fkey; Type: FK CONSTRAINT; Schema: public; Owner: -
823--
824
825ALTER TABLE ONLY openidrpsummary
826 ADD CONSTRAINT openidrpsummary_account_fkey FOREIGN KEY (account) REFERENCES account(id);
827
828
829--
830-- PostgreSQL database dump complete
831--
832
833CREATE INDEX emailaddress__account__status__idx
834 ON EmailAddress(account, status);
835
836
837-- Permissions for Ubuntu SSO server testing on staging.
838
839-- Mirrored from sso_auth user 2010-01-12.
840-- These tables will eventually not be available.
841--
842GRANT SELECT,INSERT,DELETE,UPDATE ON TABLE account TO ubuntu_sso;
843GRANT USAGE ON SEQUENCE account_id_seq TO ubuntu_sso;
844GRANT SELECT,INSERT,DELETE,UPDATE ON TABLE accountpassword TO ubuntu_sso;
845GRANT USAGE ON SEQUENCE accountpassword_id_seq TO ubuntu_sso;
846GRANT SELECT,INSERT,DELETE,UPDATE ON TABLE authtoken TO ubuntu_sso;
847GRANT USAGE ON SEQUENCE authtoken_id_seq TO ubuntu_sso;
848GRANT SELECT ON TABLE person TO ubuntu_sso;
849GRANT SELECT,INSERT,DELETE,UPDATE ON TABLE emailaddress TO ubuntu_sso;
850GRANT USAGE ON SEQUENCE emailaddress_id_seq TO ubuntu_sso;
851GRANT SELECT,INSERT,DELETE ON TABLE openidassociation TO ubuntu_sso;
852GRANT SELECT,INSERT,DELETE,UPDATE ON TABLE openidauthorization TO ubuntu_sso;
853GRANT USAGE ON SEQUENCE openidauthorization_id_seq TO ubuntu_sso;
854GRANT SELECT,INSERT,DELETE ON TABLE openidnonce TO ubuntu_sso;
855GRANT SELECT,INSERT,UPDATE ON TABLE openidrpsummary TO ubuntu_sso;
856GRANT USAGE ON SEQUENCE openidrpsummary_id_seq TO ubuntu_sso;
857GRANT SELECT ON SEQUENCE person_id_seq TO ubuntu_sso;
858GRANT SELECT ON TABLE personlocation TO ubuntu_sso;
859GRANT SELECT ON SEQUENCE personlocation_id_seq TO ubuntu_sso;
860GRANT SELECT ON TABLE teamparticipation TO ubuntu_sso;
861GRANT SELECT ON SEQUENCE teamparticipation_id_seq TO ubuntu_sso;
862
863-- Permissions on the Ubuntu SSO tables.
864--
865GRANT SELECT, INSERT, DELETE, UPDATE ON TABLE auth_permission,
866auth_group_permissions, auth_group, auth_user, auth_user_groups,
867auth_user_user_permissions, auth_message, django_content_type,
868django_session, django_site, django_admin_log,
869ssoopenidrpconfig TO ubuntu_sso;
870
871GRANT USAGE ON SEQUENCE auth_group_id_seq,
872auth_group_permissions_id_seq, auth_message_id_seq,
873auth_permission_id_seq, auth_user_groups_id_seq, auth_user_id_seq,
874auth_user_user_permissions_id_seq, django_admin_log_id_seq,
875django_content_type_id_seq, django_site_id_seq,
876ssoopenidrpconfig_id_seq TO ubuntu_sso;
877
878-- Permissions on the lpmirror tables (mirrors of relevant Launchpad
879-- information, available even when Launchpad database upgrades are in
880-- progress).
881GRANT SELECT
882ON TABLE
883 lp_person, lp_personlocation, lp_teamparticipation, lp_account
884TO ubuntu_sso;
885
8860
=== removed file 'database/replication/authdb_drop.sql'
--- database/replication/authdb_drop.sql 2009-11-11 10:32:35 +0000
+++ database/replication/authdb_drop.sql 1970-01-01 00:00:00 +0000
@@ -1,14 +0,0 @@
1-- Copyright 2009 Canonical Ltd. This software is licensed under the
2-- GNU Affero General Public License version 3 (see the file LICENSE).
3
4SET client_min_messages=ERROR;
5
6-- Drop everything in the authdb replication set.
7DROP TABLE IF EXISTS Account CASCADE;
8DROP TABLE IF EXISTS AccountPassword CASCADE;
9DROP TABLE IF EXISTS AuthToken CASCADE;
10DROP TABLE IF EXISTS EmailAddress CASCADE;
11DROP TABLE IF EXISTS OpenIDAssociation CASCADE;
12DROP TABLE IF EXISTS OpenIDAuthorization CASCADE;
13DROP TABLE IF EXISTS OpenIDNonce CASCADE;
14DROP TABLE IF EXISTS OpenIDRPSummary;
150
=== removed file 'database/replication/authdb_sequences.sql'
--- database/replication/authdb_sequences.sql 2010-01-13 06:54:32 +0000
+++ database/replication/authdb_sequences.sql 1970-01-01 00:00:00 +0000
@@ -1,22 +0,0 @@
1-- Repair sequences in the authdb replication set. We need to do this because
2-- we cannot restore the sequence values from the dump when restoring the
3-- data using pg_restore --data-only.
4
5SELECT setval('account_id_seq', max(id)) AS Account
6FROM Account;
7
8SELECT setval('accountpassword_id_seq', max(id)) AS AccountPassword
9FROM AccountPassword;
10
11SELECT setval('authtoken_id_seq', max(id)) AS AuthToken
12FROM AuthToken;
13
14SELECT setval('emailaddress_id_seq', max(id)) AS EmailAddress
15FROM EmailAddress;
16
17SELECT setval('openidauthorization_id_seq', max(id)) AS OpenIDAuthorization
18FROM OpenIDAuthorization;
19
20SELECT setval('openidrpsummary_id_seq', max(id)) AS OpenIDRPSummary
21FROM OpenIDRPSummary;
22
230
=== modified file 'database/replication/helpers.py'
--- database/replication/helpers.py 2010-02-26 03:34:49 +0000
+++ database/replication/helpers.py 2010-04-27 02:13:38 +0000
@@ -28,20 +28,15 @@
2828
29# Replication set id constants. Don't change these without DBA help.29# Replication set id constants. Don't change these without DBA help.
30LPMAIN_SET_ID = 130LPMAIN_SET_ID = 1
31AUTHDB_SET_ID = 2
32HOLDING_SET_ID = 66631HOLDING_SET_ID = 666
32LPMIRROR_SET_ID = 4
3333
34# Seed tables for the authdb replication set to be passed to34# Seed tables for the lpmain replication set to be passed to
35# calculate_replication_set().35# calculate_replication_set().
36AUTHDB_SEED = frozenset([36LPMAIN_SEED = frozenset([
37 ('public', 'account'),37 ('public', 'account'),
38 ('public', 'openidnonce'),
38 ('public', 'openidassociation'),39 ('public', 'openidassociation'),
39 ('public', 'openidnonce'),
40 ])
41
42# Seed tables for the lpmain replication set to be passed to
43# calculate_replication_set().
44LPMAIN_SEED = frozenset([
45 ('public', 'person'),40 ('public', 'person'),
46 ('public', 'launchpaddatabaserevision'),41 ('public', 'launchpaddatabaserevision'),
47 ('public', 'databasereplicationlag'),42 ('public', 'databasereplicationlag'),
@@ -57,7 +52,6 @@
57 ('public', 'launchpadstatistic'),52 ('public', 'launchpadstatistic'),
58 ('public', 'parsedapachelog'),53 ('public', 'parsedapachelog'),
59 ('public', 'shipitsurvey'),54 ('public', 'shipitsurvey'),
60 ('public', 'openidassociations'), # Remove this in April 2009 or later.
61 ('public', 'databasereplicationlag'),55 ('public', 'databasereplicationlag'),
62 ])56 ])
6357
@@ -70,6 +64,7 @@
70 'public.secret', 'public.sessiondata', 'public.sessionpkgdata',64 'public.secret', 'public.sessiondata', 'public.sessionpkgdata',
71 # Mirror tables, per Bug #489078. These tables have their own private65 # Mirror tables, per Bug #489078. These tables have their own private
72 # replication set that is setup manually.66 # replication set that is setup manually.
67 'public.lp_account',
73 'public.lp_person',68 'public.lp_person',
74 'public.lp_personlocation',69 'public.lp_personlocation',
75 'public.lp_teamparticipation',70 'public.lp_teamparticipation',
@@ -176,12 +171,13 @@
176 script = preamble() + script171 script = preamble() + script
177172
178 if sync is not None:173 if sync is not None:
179 script = script + dedent("""\174 sync_script = dedent("""\
180 sync (id = @master_node);175 sync (id = @master_node);
181 wait for event (176 wait for event (
182 origin = ALL, confirmed = ALL,177 origin = @master_node, confirmed = ALL,
183 wait on = @master_node, timeout = %d);178 wait on = @master_node, timeout = %d);
184 """ % sync)179 """ % sync)
180 script = script + sync_script
185181
186 # Copy the script to a NamedTemporaryFile rather than just pumping it182 # Copy the script to a NamedTemporaryFile rather than just pumping it
187 # to slonik via stdin. This way it can be examined if slonik appears183 # to slonik via stdin. This way it can be examined if slonik appears
@@ -192,7 +188,7 @@
192188
193 # Run slonik189 # Run slonik
194 log.debug("Executing slonik script %s" % script_on_disk.name)190 log.debug("Executing slonik script %s" % script_on_disk.name)
195 log.log(DEBUG2, script)191 log.log(DEBUG2, 'Running script:\n%s' % script)
196 returncode = subprocess.call(['slonik', script_on_disk.name])192 returncode = subprocess.call(['slonik', script_on_disk.name])
197193
198 if returncode != 0:194 if returncode != 0:
@@ -323,10 +319,10 @@
323 cluster name = sl;319 cluster name = sl;
324320
325 # Symbolic ids for replication sets.321 # Symbolic ids for replication sets.
326 define lpmain_set %d;322 define lpmain_set %d;
327 define authdb_set %d;323 define holding_set %d;
328 define holding_set %d;324 define lpmirror_set %d;
329 """ % (LPMAIN_SET_ID, AUTHDB_SET_ID, HOLDING_SET_ID))]325 """ % (LPMAIN_SET_ID, HOLDING_SET_ID, LPMIRROR_SET_ID))]
330326
331 if master_node is not None:327 if master_node is not None:
332 preamble.append(dedent("""\328 preamble.append(dedent("""\
@@ -503,19 +499,6 @@
503 raise ReplicationConfigError(499 raise ReplicationConfigError(
504 "Unreplicated sequences: %s" % repr(unrepl_sequences))500 "Unreplicated sequences: %s" % repr(unrepl_sequences))
505501
506 authdb_tables, authdb_sequences = calculate_replication_set(
507 cur, AUTHDB_SEED)
508 lpmain_tables, lpmain_sequences = calculate_replication_set(502 lpmain_tables, lpmain_sequences = calculate_replication_set(
509 cur, LPMAIN_SEED)503 cur, LPMAIN_SEED)
510504
511 confused_tables = authdb_tables.intersection(lpmain_tables)
512 if confused_tables:
513 raise ReplicationConfigError(
514 "Tables exist in multiple replication sets: %s"
515 % repr(confused_tables))
516 confused_sequences = authdb_sequences.intersection(lpmain_sequences)
517 if confused_sequences:
518 raise ReplicationConfigError(
519 "Sequences exist in multiple replication sets: %s"
520 % repr(confused_sequences))
521
522505
=== modified file 'database/replication/initialize.py'
--- database/replication/initialize.py 2010-01-22 06:25:48 +0000
+++ database/replication/initialize.py 2010-04-27 02:13:38 +0000
@@ -1,4 +1,4 @@
1#!/usr/bin/python2.51#!/usr/bin/python2.5 -S
2#2#
3# Copyright 2009 Canonical Ltd. This software is licensed under the3# Copyright 2009 Canonical Ltd. This software is licensed under the
4# GNU Affero General Public License version 3 (see the file LICENSE).4# GNU Affero General Public License version 3 (see the file LICENSE).
@@ -88,54 +88,13 @@
88 helpers.sync(120) # Will exit on failure.88 helpers.sync(120) # Will exit on failure.
8989
9090
91def create_replication_sets(91def create_replication_sets(lpmain_tables, lpmain_sequences):
92 authdb_tables, authdb_sequences, lpmain_tables, lpmain_sequences):
93 """Create the replication sets."""92 """Create the replication sets."""
94 log.info('Creating Slony-I replication sets.')93 log.info('Creating Slony-I replication sets.')
9594
96 # Instead of creating both the authdb and lpmain replication sets,
97 # we just create the lpmain replication set containing everything.
98 # This way, we can then test the populate_auth_replication_set.py
99 # migration script that moves the relevant tables from the lpmain
100 # replication set to the authdb replication set.
101 # We will turn this behavior off once we are running two
102 # replication sets in production and remove the migration script.
103 lpmain_tables = lpmain_tables.union(authdb_tables)
104 lpmain_sequences = lpmain_sequences.union(authdb_sequences)
105
106 script = ["try {"]95 script = ["try {"]
107 # script,append("""
108 # echo 'Creating AuthDB replication set (@authdb_set)';
109 # create set (
110 # id=@authdb_set, origin=@master_node,
111 # comment='AuthDB tables and sequences');
112 # """)
11396
114 # entry_id = 197 entry_id = 1
115 # for table in sorted(authdb_tables):
116 # script.append("""
117 # echo 'Adding %(table)s to replication set @authdb_set';
118 # set add table (
119 # set id=@authdb_set,
120 # origin=@master_node,
121 # id=%(entry_id)d,
122 # fully qualified name='%(table)s');
123 # """ % vars())
124 # entry_id += 1
125 # entry_id = 1
126 # for sequence in sorted(authdb_sequences):
127 # script.append("""
128 # echo 'Adding %(sequence)s to replication set @authdb_set';
129 # set add sequence (
130 # set id=@authdb_set,
131 # origin=@master_node,
132 # id=%(entry_id)d,
133 # fully qualified name='%(sequence)s');
134 # """ % vars())
135 # entry_id += 1
136 #
137 # assert entry_id < 200, 'authdb replcation set has > 200 objects???'
138 entry_id = 200
13998
140 script.append("""99 script.append("""
141 echo 'Creating LPMain replication set (@lpmain_set)';100 echo 'Creating LPMain replication set (@lpmain_set)';
@@ -157,7 +116,7 @@
157 """ % vars())116 """ % vars())
158 entry_id += 1117 entry_id += 1
159118
160 entry_id = 200119 entry_id = 1
161 script.append(120 script.append(
162 "echo 'Adding %d sequences to replication set @lpmain_set';"121 "echo 'Adding %d sequences to replication set @lpmain_set';"
163 % len(lpmain_sequences))122 % len(lpmain_sequences))
@@ -199,9 +158,6 @@
199 con.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)158 con.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
200 global cur159 global cur
201 cur = con.cursor()160 cur = con.cursor()
202 log.debug("Calculating authdb replication set.")
203 authdb_tables, authdb_sequences = helpers.calculate_replication_set(
204 cur, helpers.AUTHDB_SEED)
205 log.debug("Calculating lpmain replication set.")161 log.debug("Calculating lpmain replication set.")
206 lpmain_tables, lpmain_sequences = helpers.calculate_replication_set(162 lpmain_tables, lpmain_sequences = helpers.calculate_replication_set(
207 cur, helpers.LPMAIN_SEED)163 cur, helpers.LPMAIN_SEED)
@@ -212,8 +168,7 @@
212 fails = 0168 fails = 0
213 for table in all_tables_in_schema(cur, 'public'):169 for table in all_tables_in_schema(cur, 'public'):
214 times_seen = 0170 times_seen = 0
215 for table_set in [171 for table_set in [lpmain_tables, helpers.IGNORED_TABLES]:
216 authdb_tables, lpmain_tables, helpers.IGNORED_TABLES]:
217 if table in table_set:172 if table in table_set:
218 times_seen += 1173 times_seen += 1
219 if times_seen == 0:174 if times_seen == 0:
@@ -224,8 +179,7 @@
224 fails += 1179 fails += 1
225 for sequence in all_sequences_in_schema(cur, 'public'):180 for sequence in all_sequences_in_schema(cur, 'public'):
226 times_seen = 0181 times_seen = 0
227 for sequence_set in [182 for sequence_set in [lpmain_sequences, helpers.IGNORED_SEQUENCES]:
228 authdb_sequences, lpmain_sequences, helpers.IGNORED_SEQUENCES]:
229 if sequence in sequence_set:183 if sequence in sequence_set:
230 times_seen += 1184 times_seen += 1
231 if times_seen == 0:185 if times_seen == 0:
@@ -242,8 +196,7 @@
242196
243 ensure_live()197 ensure_live()
244198
245 create_replication_sets(199 create_replication_sets(lpmain_tables, lpmain_sequences)
246 authdb_tables, authdb_sequences, lpmain_tables, lpmain_sequences)
247200
248 helpers.sync(0)201 helpers.sync(0)
249202
250203
=== modified file 'database/replication/new-slave.py'
--- database/replication/new-slave.py 2010-01-13 08:40:48 +0000
+++ database/replication/new-slave.py 2010-04-27 02:13:38 +0000
@@ -1,4 +1,4 @@
1#!/usr/bin/python2.51#!/usr/bin/python2.5 -S
2#2#
3# Copyright 2009 Canonical Ltd. This software is licensed under the3# Copyright 2009 Canonical Ltd. This software is licensed under the
4# GNU Affero General Public License version 3 (see the file LICENSE).4# GNU Affero General Public License version 3 (see the file LICENSE).
@@ -22,10 +22,9 @@
22from canonical.database.sqlbase import (22from canonical.database.sqlbase import (
23 connect_string, ISOLATION_LEVEL_AUTOCOMMIT)23 connect_string, ISOLATION_LEVEL_AUTOCOMMIT)
24from canonical.launchpad.scripts import db_options, logger_options, logger24from canonical.launchpad.scripts import db_options, logger_options, logger
25from canonical.launchpad.webapp.adapter import _auth_store_tables
2625
27import replication.helpers26import replication.helpers
28from replication.helpers import AUTHDB_SET_ID, LPMAIN_SET_ID27from replication.helpers import LPMAIN_SET_ID
2928
30def main():29def main():
31 parser = OptionParser(30 parser = OptionParser(
@@ -77,8 +76,6 @@
77 # Get the connection string for masters.76 # Get the connection string for masters.
78 lpmain_connection_string = get_master_connection_string(77 lpmain_connection_string = get_master_connection_string(
79 source_connection, parser, LPMAIN_SET_ID) or source_connection_string78 source_connection, parser, LPMAIN_SET_ID) or source_connection_string
80 authdb_connection_string = get_master_connection_string(
81 source_connection, parser, AUTHDB_SET_ID) or source_connection_string
8279
83 # Sanity check the target connection string.80 # Sanity check the target connection string.
84 target_connection_string = ConnectionString(raw_target_connection_string)81 target_connection_string = ConnectionString(raw_target_connection_string)
@@ -130,31 +127,6 @@
130 log.error("Failed to duplicate database schema.")127 log.error("Failed to duplicate database schema.")
131 return 1128 return 1
132129
133 # Drop the authdb replication set tables we just restored, as they
134 # will be broken if the authdb master is a seperate database to the
135 # lpmain master.
136 log.debug("Dropping (possibly corrupt) authdb tables.")
137 cur = target_con.cursor()
138 for table_name in _auth_store_tables:
139 cur.execute("DROP TABLE IF EXISTS %s CASCADE" % table_name)
140 target_con.commit()
141
142 # Duplicate the authdb schema.
143 log.info("Duplicating authdb schema from '%s' to '%s'" % (
144 authdb_connection_string, target_connection_string))
145 table_args = ["--table=%s" % table for table in _auth_store_tables]
146 # We need to restore the two cross-replication-set views that where
147 # dropped as a side effect of dropping the auth store tables.
148 table_args.append("--table=ValidPersonCache")
149 table_args.append("--table=ValidPersonOrTeamCache")
150 cmd = "pg_dump --schema-only --no-privileges %s %s | psql -1 -q %s" % (
151 ' '.join(table_args),
152 source_connection_string.asPGCommandLineArgs(),
153 target_connection_string.asPGCommandLineArgs())
154 if subprocess.call(cmd, shell=True) != 0:
155 log.error("Failed to duplicate database schema.")
156 return 1
157
158 # Trash the broken Slony tables we just duplicated.130 # Trash the broken Slony tables we just duplicated.
159 log.debug("Removing slony cruft.")131 log.debug("Removing slony cruft.")
160 cur = target_con.cursor()132 cur = target_con.cursor()
@@ -163,21 +135,30 @@
163 del target_con135 del target_con
164136
165 # Get a list of existing set ids that can be subscribed too. This137 # Get a list of existing set ids that can be subscribed too. This
166 # is all sets where the origin is the master_node, and set 2 if138 # is all sets where the origin is the master_node. We
167 # the master happens to be configured as a forwarding slave. We
168 # don't allow other sets where the master is configured as a139 # don't allow other sets where the master is configured as a
169 # forwarding slave as we have to special case rebuilding the database140 # forwarding slave as we have to special case rebuilding the database
170 # schema (such as we do for the authdb replication set 2).141 # schema, and we want to avoid cascading slave configurations anyway
142 # since we are running an antique Slony-I at the moment - keep it
143 # simple!
144 # We order the sets smallest to largest by number of tables.
145 # This should let us subscribe the quickest sets first for more
146 # immediate feedback.
171 source_connection.rollback()147 source_connection.rollback()
172 master_node = replication.helpers.get_master_node(source_connection)148 master_node = replication.helpers.get_master_node(source_connection)
173 cur = source_connection.cursor()149 cur = source_connection.cursor()
174 cur.execute("""150 cur.execute("""
175 SELECT set_id FROM _sl.sl_set WHERE set_origin=%d151 SELECT set_id
176 UNION152 FROM _sl.sl_set, (
177 SELECT sub_set AS set_id FROM _sl.sl_subscribe153 SELECT tab_set, count(*) AS tab_count
178 WHERE sub_receiver=%d AND sub_forward IS TRUE AND sub_active IS TRUE154 FROM _sl.sl_table GROUP BY tab_set
179 AND sub_set=2155 ) AS TableCounts
180 """ % (master_node.node_id, master_node.node_id))156 WHERE
157 set_origin=%d
158 AND tab_set = set_id
159 ORDER BY tab_count
160 """
161 % (master_node.node_id,))
181 set_ids = [set_id for set_id, in cur.fetchall()]162 set_ids = [set_id for set_id, in cur.fetchall()]
182 log.debug("Discovered set ids %s" % repr(list(set_ids)))163 log.debug("Discovered set ids %s" % repr(list(set_ids)))
183164
@@ -209,19 +190,32 @@
209 } on error { echo 'Failed.'; exit 1; }190 } on error { echo 'Failed.'; exit 1; }
210 """)191 """)
211192
193 full_sync = []
194 sync_nicknames = [node.nickname for node in existing_nodes]
195 sync_nicknames.append('new_node');
196 for nickname in sync_nicknames:
197 full_sync.append(dedent("""\
198 echo 'Waiting for %(nickname)s sync.';
199 sync (id=@%(nickname)s);
200 wait for event (
201 origin = @%(nickname)s, confirmed=ALL,
202 wait on = @%(nickname)s, timeout=0);
203 """ % {'nickname': nickname}))
204 full_sync = '\n'.join(full_sync)
205 script += full_sync
206
212 for set_id in set_ids:207 for set_id in set_ids:
213
214 script += dedent("""\208 script += dedent("""\
215 echo 'Subscribing new node to set %d.';209 echo 'Subscribing new node to set %d.';
216 subscribe set (210 subscribe set (
217 id=%d, provider=@master_node, receiver=@new_node, forward=yes);211 id=%d, provider=@master_node, receiver=@new_node, forward=yes);
218212 echo 'Waiting for subscribe to start processing.';
219 echo 'Waiting for sync... this might take a while...';
220 sync (id = @master_node);213 sync (id = @master_node);
221 wait for event (214 wait for event (
222 origin = ALL, confirmed = ALL,215 origin = @master_node, confirmed = ALL,
223 wait on = @master_node, timeout = 0);216 wait on = @master_node, timeout = 0);
224 """ % (set_id, set_id))217 """ % (set_id, set_id))
218 script += full_sync
225219
226 replication.helpers.execute_slonik(script)220 replication.helpers.execute_slonik(script)
227221
228222
=== removed file 'database/replication/populate_auth_replication_set.py'
--- database/replication/populate_auth_replication_set.py 2009-10-17 14:06:03 +0000
+++ database/replication/populate_auth_replication_set.py 1970-01-01 00:00:00 +0000
@@ -1,177 +0,0 @@
1#!/usr/bin/python2.5
2#
3# Copyright 2009 Canonical Ltd. This software is licensed under the
4# GNU Affero General Public License version 3 (see the file LICENSE).
5
6"""Populate the auth replication set.
7
8This script moves the the SSO tables from the main replication set to
9the auth replication set.
10
11Once it has been run on production, these tables can no longer be
12maintained using the Launchpad database maintenance scripts
13(upgrade.py, security.py etc.).
14
15We do this so Launchpad database upgrades do not lock the SSO tables,
16allowing the SSO service to continue to operate.
17
18This is a single shot script.
19"""
20
21__metaclass__ = type
22__all__ = []
23
24import _pythonpath
25
26import sys
27from textwrap import dedent
28from optparse import OptionParser
29
30from canonical.database.sqlbase import (
31 connect, ISOLATION_LEVEL_AUTOCOMMIT, sqlvalues)
32from canonical.launchpad.scripts import db_options, logger_options, logger
33
34import replication.helpers
35
36def create_auth_set(cur):
37 """Create the auth replication set if it doesn't already exist."""
38 cur.execute("SELECT TRUE FROM _sl.sl_set WHERE set_id=2")
39 if cur.fetchone() is not None:
40 log.info("Auth set already exists.")
41 return
42 slonik_script = dedent("""\
43 create set (
44 id=@authdb_set, origin=@master_node,
45 comment='SSO service tables');
46 """)
47 log.info("Creating authdb replication set.")
48 replication.helpers.execute_slonik(slonik_script, sync=0)
49
50
51def subscribe_auth_set(cur):
52 """The authdb set subscription much match the lpmain set subscription.
53
54 This is a requirement to move stuff between replication sets. It
55 is also what we want (all nodes replicating everything).
56 """
57 cur.execute("""
58 SELECT sub_receiver FROM _sl.sl_subscribe WHERE sub_set = 1
59 EXCEPT
60 SELECT sub_receiver FROM _sl.sl_subscribe WHERE sub_set = 2
61 """)
62 for node_id in (node_id for node_id, in cur.fetchall()):
63 log.info("Subscribing Node #%d to authdb replication set" % node_id)
64 success = replication.helpers.execute_slonik(dedent("""\
65 subscribe set (
66 id = @authdb_set, provider = @master_node,
67 receiver = %d, forward = yes);
68 """ % node_id), sync=0)
69 if not success:
70 log.error("Slonik failed. Exiting.")
71 sys.exit(1)
72
73
74def migrate_tables_and_sequences(cur):
75 auth_tables, auth_sequences = (
76 replication.helpers.calculate_replication_set(
77 cur, replication.helpers.AUTHDB_SEED))
78
79 slonik_script = ["try {"]
80 for table_fqn in auth_tables:
81 namespace, table_name = table_fqn.split('.')
82 cur.execute("""
83 SELECT tab_id, tab_set
84 FROM _sl.sl_table
85 WHERE tab_nspname = %s AND tab_relname = %s
86 """ % sqlvalues(namespace, table_name))
87 try:
88 table_id, set_id = cur.fetchone()
89 except IndexError:
90 log.error("Table %s not found in _sl.sl_tables" % table_fqn)
91 sys.exit(1)
92 if set_id == 1:
93 slonik_script.append("echo 'Moving table %s';" % table_fqn)
94 slonik_script.append(
95 "set move table "
96 "(origin=@master_node, id=%d, new set=@authdb_set);"
97 % table_id)
98 elif set_id == 2:
99 log.warn(
100 "Table %s already in authdb replication set"
101 % table_fqn)
102 else:
103 log.error("Unknown replication set %s" % set_id)
104 sys.exit(1)
105
106 for sequence_fqn in auth_sequences:
107 namespace, sequence_name = sequence_fqn.split('.')
108 cur.execute("""
109 SELECT seq_id, seq_set
110 FROM _sl.sl_sequence
111 WHERE seq_nspname = %s AND seq_relname = %s
112 """ % sqlvalues(namespace, sequence_name))
113 try:
114 sequence_id, set_id = cur.fetchone()
115 except IndexError:
116 log.error(
117 "Sequence %s not found in _sl.sl_sequences" % sequence_fqn)
118 sys.exit(1)
119 if set_id == 1:
120 slonik_script.append("echo 'Moving sequence %s';" % sequence_fqn)
121 slonik_script.append(
122 "set move sequence "
123 "(origin=@master_node, id=%d, new set=@authdb_set);"
124 % sequence_id)
125 elif set_id ==2:
126 log.warn(
127 "Sequence %s already in authdb replication set."
128 % sequence_fqn)
129 else:
130 log.error("Unknown replication set %s" % set_id)
131 sys.exit(1)
132
133 if len(slonik_script) == 1:
134 log.warn("No tables or sequences to migrate.")
135 return
136
137 slonik_script.append(dedent("""\
138 } on error {
139 echo 'Failed to move one or more tables or sequences.';
140 exit 1;
141 }
142 """))
143
144 slonik_script = "\n".join(slonik_script)
145
146 log.info("Running migration script...")
147 if not replication.helpers.execute_slonik(slonik_script, sync=0):
148 log.error("Slonik failed. Exiting.")
149 sys.exit(1)
150
151
152def main():
153 parser = OptionParser()
154 db_options(parser)
155 logger_options(parser)
156 options, args = parser.parse_args()
157
158 global log
159 log = logger(options)
160
161 con = connect('slony', isolation=ISOLATION_LEVEL_AUTOCOMMIT)
162 cur = con.cursor()
163
164 # Don't start until cluster is synced.
165 log.info("Waiting for sync.")
166 replication.helpers.sync(0)
167
168 create_auth_set(cur)
169 subscribe_auth_set(cur)
170 migrate_tables_and_sequences(cur)
171
172
173log = None # Global log
174
175
176if __name__ == '__main__':
177 main()
1780
=== modified file 'database/replication/preamble.py'
--- database/replication/preamble.py 2009-10-17 14:06:03 +0000
+++ database/replication/preamble.py 2010-04-27 02:13:38 +0000
@@ -1,4 +1,4 @@
1#!/usr/bin/python2.51#!/usr/bin/python2.5 -S
2#2#
3# Copyright 2009 Canonical Ltd. This software is licensed under the3# Copyright 2009 Canonical Ltd. This software is licensed under the
4# GNU Affero General Public License version 3 (see the file LICENSE).4# GNU Affero General Public License version 3 (see the file LICENSE).
55
=== modified file 'database/replication/repair-restored-db.py'
--- database/replication/repair-restored-db.py 2010-01-22 06:25:48 +0000
+++ database/replication/repair-restored-db.py 2010-04-27 02:13:38 +0000
@@ -1,4 +1,4 @@
1#!/usr/bin/python2.51#!/usr/bin/python2.5 -S
2#2#
3# Copyright 2009 Canonical Ltd. This software is licensed under the3# Copyright 2009 Canonical Ltd. This software is licensed under the
4# GNU Affero General Public License version 3 (see the file LICENSE).4# GNU Affero General Public License version 3 (see the file LICENSE).
55
=== modified file 'database/replication/report.py'
--- database/replication/report.py 2009-10-17 14:06:03 +0000
+++ database/replication/report.py 2010-04-27 02:13:38 +0000
@@ -1,4 +1,4 @@
1#!/usr/bin/python2.51#!/usr/bin/python2.5 -S
2#2#
3# Copyright 2009 Canonical Ltd. This software is licensed under the3# Copyright 2009 Canonical Ltd. This software is licensed under the
4# GNU Affero General Public License version 3 (see the file LICENSE).4# GNU Affero General Public License version 3 (see the file LICENSE).
55
=== modified file 'database/replication/slon_ctl.py'
--- database/replication/slon_ctl.py 2009-10-17 14:06:03 +0000
+++ database/replication/slon_ctl.py 2010-04-27 02:13:38 +0000
@@ -1,4 +1,4 @@
1#!/usr/bin/python2.51#!/usr/bin/python2.5 -S
2#2#
3# Copyright 2009 Canonical Ltd. This software is licensed under the3# Copyright 2009 Canonical Ltd. This software is licensed under the
4# GNU Affero General Public License version 3 (see the file LICENSE).4# GNU Affero General Public License version 3 (see the file LICENSE).
55
=== modified file 'database/replication/sync.py'
--- database/replication/sync.py 2010-01-06 15:52:31 +0000
+++ database/replication/sync.py 2010-04-27 02:13:38 +0000
@@ -1,4 +1,4 @@
1#!/usr/bin/python2.51#!/usr/bin/python2.5 -S
2#2#
3# Copyright 2010 Canonical Ltd. This software is licensed under the3# Copyright 2010 Canonical Ltd. This software is licensed under the
4# GNU Affero General Public License version 3 (see the file LICENSE).4# GNU Affero General Public License version 3 (see the file LICENSE).
55
=== modified file 'database/schema/diagram.py'
--- database/schema/diagram.py 2010-02-09 01:31:05 +0000
+++ database/schema/diagram.py 2010-04-27 02:13:38 +0000
@@ -1,4 +1,4 @@
1#!/usr/bin/python2.51#!/usr/bin/python2.5 -S
2#2#
3# Copyright 2009 Canonical Ltd. This software is licensed under the3# Copyright 2009 Canonical Ltd. This software is licensed under the
4# GNU Affero General Public License version 3 (see the file LICENSE).4# GNU Affero General Public License version 3 (see the file LICENSE).
55
=== modified file 'database/schema/emptytables.py'
--- database/schema/emptytables.py 2009-10-17 14:06:03 +0000
+++ database/schema/emptytables.py 2010-04-27 02:13:38 +0000
@@ -1,4 +1,4 @@
1#!/usr/bin/python2.51#!/usr/bin/python2.5 -S
2#2#
3# Copyright 2009 Canonical Ltd. This software is licensed under the3# Copyright 2009 Canonical Ltd. This software is licensed under the
4# GNU Affero General Public License version 3 (see the file LICENSE).4# GNU Affero General Public License version 3 (see the file LICENSE).
55
=== modified file 'database/schema/fti.py'
--- database/schema/fti.py 2009-11-06 21:58:50 +0000
+++ database/schema/fti.py 2010-04-27 02:13:38 +0000
@@ -1,4 +1,4 @@
1#!/usr/bin/python2.51#!/usr/bin/python2.5 -S
2#2#
3# Copyright 2009 Canonical Ltd. This software is licensed under the3# Copyright 2009 Canonical Ltd. This software is licensed under the
4# GNU Affero General Public License version 3 (see the file LICENSE).4# GNU Affero General Public License version 3 (see the file LICENSE).
55
=== modified file 'database/schema/online_fti_updater.py'
--- database/schema/online_fti_updater.py 2009-10-17 14:06:03 +0000
+++ database/schema/online_fti_updater.py 2010-04-27 02:13:38 +0000
@@ -1,4 +1,4 @@
1#!/usr/bin/python2.51#!/usr/bin/python2.5 -S
2#2#
3# Copyright 2009 Canonical Ltd. This software is licensed under the3# Copyright 2009 Canonical Ltd. This software is licensed under the
4# GNU Affero General Public License version 3 (see the file LICENSE).4# GNU Affero General Public License version 3 (see the file LICENSE).
55
=== added file 'database/schema/patch-2207-47-0.sql'
--- database/schema/patch-2207-47-0.sql 1970-01-01 00:00:00 +0000
+++ database/schema/patch-2207-47-0.sql 2010-04-27 02:13:38 +0000
@@ -0,0 +1,6 @@
1SET client_min_messages=ERROR;
2
3UPDATE BugWatchActivity SET result = 9 WHERE result IS NULL;
4ALTER TABLE BugWatchActivity ALTER COLUMN result SET NOT NULL;
5
6INSERT INTO LaunchpadDatabaseRevision VALUES (2207, 47, 0);
07
=== added file 'database/schema/patch-2207-48-0.sql'
--- database/schema/patch-2207-48-0.sql 1970-01-01 00:00:00 +0000
+++ database/schema/patch-2207-48-0.sql 2010-04-27 02:13:38 +0000
@@ -0,0 +1,27 @@
1SET client_min_messages=ERROR;
2
3ALTER TABLE EmailAddress ADD CONSTRAINT emailaddress__person__fk
4 FOREIGN KEY (person) REFERENCES Person;
5
6CREATE TEMPORARY TABLE DudAccountLinks AS
7SELECT Person.id
8FROM Person
9LEFT OUTER JOIN Account ON Person.account = Account.id
10WHERE Person.account IS NOT NULL AND Account.id IS NULL;
11
12UPDATE Person SET account = NULL
13FROM DudAccountLinks
14WHERE Person.id = DudAccountLinks.id;
15
16DROP TABLE DudAccountLinks;
17
18ALTER TABLE Person ADD CONSTRAINT person__account__fk
19 FOREIGN KEY (account) REFERENCES Account;
20
21ALTER TABLE MailingListSubscription
22 ADD CONSTRAINT mailinglistsubscription__email_address_fk
23 FOREIGN KEY (email_address) REFERENCES EmailAddress
24 ON DELETE CASCADE;
25
26INSERT INTO LaunchpadDatabaseRevision VALUES (2207, 48, 0);
27
028
=== modified file 'database/schema/pending/add-mailing-list-experts.py'
--- database/schema/pending/add-mailing-list-experts.py 2009-10-17 14:06:03 +0000
+++ database/schema/pending/add-mailing-list-experts.py 2010-04-27 02:13:38 +0000
@@ -1,4 +1,4 @@
1#!/usr/bin/python2.51#!/usr/bin/python2.5 -S
2#2#
3# Copyright 2009 Canonical Ltd. This software is licensed under the3# Copyright 2009 Canonical Ltd. This software is licensed under the
4# GNU Affero General Public License version 3 (see the file LICENSE).4# GNU Affero General Public License version 3 (see the file LICENSE).
55
=== modified file 'database/schema/pending/create-openid-rp-configs.py'
--- database/schema/pending/create-openid-rp-configs.py 2009-10-17 14:06:03 +0000
+++ database/schema/pending/create-openid-rp-configs.py 2010-04-27 02:13:38 +0000
@@ -1,4 +1,4 @@
1#!/usr/bin/python2.51#!/usr/bin/python2.5 -S
2#2#
3# Copyright 2009 Canonical Ltd. This software is licensed under the3# Copyright 2009 Canonical Ltd. This software is licensed under the
4# GNU Affero General Public License version 3 (see the file LICENSE).4# GNU Affero General Public License version 3 (see the file LICENSE).
55
=== modified file 'database/schema/pending/gnu-savannah-celebrity.py'
--- database/schema/pending/gnu-savannah-celebrity.py 2009-10-17 14:06:03 +0000
+++ database/schema/pending/gnu-savannah-celebrity.py 2010-04-27 02:13:38 +0000
@@ -1,4 +1,4 @@
1#!/usr/bin/python2.51#!/usr/bin/python2.5 -S
2#2#
3# Copyright 2009 Canonical Ltd. This software is licensed under the3# Copyright 2009 Canonical Ltd. This software is licensed under the
4# GNU Affero General Public License version 3 (see the file LICENSE).4# GNU Affero General Public License version 3 (see the file LICENSE).
55
=== modified file 'database/schema/pending/migrate_kde_potemplates.py'
--- database/schema/pending/migrate_kde_potemplates.py 2009-10-17 14:06:03 +0000
+++ database/schema/pending/migrate_kde_potemplates.py 2010-04-27 02:13:38 +0000
@@ -1,4 +1,4 @@
1#!/usr/bin/python2.51#!/usr/bin/python2.5 -S
2#2#
3# Copyright 2009 Canonical Ltd. This software is licensed under the3# Copyright 2009 Canonical Ltd. This software is licensed under the
4# GNU Affero General Public License version 3 (see the file LICENSE).4# GNU Affero General Public License version 3 (see the file LICENSE).
55
=== modified file 'database/schema/pending/new-person-columns.py'
--- database/schema/pending/new-person-columns.py 2009-10-17 14:06:03 +0000
+++ database/schema/pending/new-person-columns.py 2010-04-27 02:13:38 +0000
@@ -1,4 +1,4 @@
1#!/usr/bin/python2.51#!/usr/bin/python2.5 -S
2#2#
3# Copyright 2009 Canonical Ltd. This software is licensed under the3# Copyright 2009 Canonical Ltd. This software is licensed under the
4# GNU Affero General Public License version 3 (see the file LICENSE).4# GNU Affero General Public License version 3 (see the file LICENSE).
55
=== added file 'database/schema/pending/patch-2207-49-0.sql'
--- database/schema/pending/patch-2207-49-0.sql 1970-01-01 00:00:00 +0000
+++ database/schema/pending/patch-2207-49-0.sql 2010-04-27 02:13:38 +0000
@@ -0,0 +1,16 @@
1SET client_min_messages=ERROR;
2
3DROP VIEW RevisionNumber;
4
5ALTER TABLE BranchRevision DROP COLUMN id;
6ALTER TABLE BranchRevision
7 ADD CONSTRAINT branchrevision_pkey
8 PRIMARY KEY (branch, revision);
9ALTER TABLE BranchRevision
10 DROP CONSTRAINT revision__branch__revision__key,
11 DROP CONSTRAINT revision__revision__branch__key,
12 DROP CONSTRAINT revisionnumber_branch_sequence_unique;
13CREATE UNIQUE INDEX branchrevision__branch__sequence__key
14 ON BranchRevision (branch, sequence) WHERE sequence IS NOT NULL;
15
16INSERT INTO LaunchpadDatabaseRevision VALUES (2207, 49, 0);
017
=== modified file 'database/schema/pending/prune-nonce.py'
--- database/schema/pending/prune-nonce.py 2009-10-17 14:06:03 +0000
+++ database/schema/pending/prune-nonce.py 2010-04-27 02:13:38 +0000
@@ -1,4 +1,4 @@
1#!/usr/bin/python2.51#!/usr/bin/python2.5 -S
2#2#
3# Copyright 2009 Canonical Ltd. This software is licensed under the3# Copyright 2009 Canonical Ltd. This software is licensed under the
4# GNU Affero General Public License version 3 (see the file LICENSE).4# GNU Affero General Public License version 3 (see the file LICENSE).
55
=== modified file 'database/schema/pending/update-shippingrequest-types.py'
--- database/schema/pending/update-shippingrequest-types.py 2009-10-17 14:06:03 +0000
+++ database/schema/pending/update-shippingrequest-types.py 2010-04-27 02:13:38 +0000
@@ -1,4 +1,4 @@
1#!/usr/bin/python2.51#!/usr/bin/python2.5 -S
2#2#
3# Copyright 2009 Canonical Ltd. This software is licensed under the3# Copyright 2009 Canonical Ltd. This software is licensed under the
4# GNU Affero General Public License version 3 (see the file LICENSE).4# GNU Affero General Public License version 3 (see the file LICENSE).
55
=== modified file 'database/schema/pending/update-translation-credits.py'
--- database/schema/pending/update-translation-credits.py 2009-10-17 14:06:03 +0000
+++ database/schema/pending/update-translation-credits.py 2010-04-27 02:13:38 +0000
@@ -1,4 +1,4 @@
1#!/usr/bin/python2.51#!/usr/bin/python2.5 -S
2#2#
3# Copyright 2009 Canonical Ltd. This software is licensed under the3# Copyright 2009 Canonical Ltd. This software is licensed under the
4# GNU Affero General Public License version 3 (see the file LICENSE).4# GNU Affero General Public License version 3 (see the file LICENSE).
@@ -54,7 +54,7 @@
54 pomsgid54 pomsgid
55 WHERE55 WHERE
56 posubmission.active IS TRUE AND56 posubmission.active IS TRUE AND
57 posubmission.pomsgset=pomsgset.id AND 57 posubmission.pomsgset=pomsgset.id AND
58 potmsgset=potmsgset.id AND58 potmsgset=potmsgset.id AND
59 primemsgid=pomsgid.id AND59 primemsgid=pomsgid.id AND
60 published IS NOT TRUE AND60 published IS NOT TRUE AND
@@ -77,7 +77,7 @@
77 pomsgid77 pomsgid
78 WHERE78 WHERE
79 posubmission.active IS FALSE AND79 posubmission.active IS FALSE AND
80 posubmission.pomsgset=pomsgset.id AND 80 posubmission.pomsgset=pomsgset.id AND
81 pomsgset.potmsgset=potmsgset.id AND81 pomsgset.potmsgset=potmsgset.id AND
82 potmsgset.primemsgid=pomsgid.id AND82 potmsgset.primemsgid=pomsgid.id AND
83 posubmission.published IS TRUE AND83 posubmission.published IS TRUE AND
8484
=== modified file 'database/schema/reset_sequences.py'
--- database/schema/reset_sequences.py 2009-10-17 14:06:03 +0000
+++ database/schema/reset_sequences.py 2010-04-27 02:13:38 +0000
@@ -1,4 +1,4 @@
1#!/usr/bin/python2.51#!/usr/bin/python2.5 -S
2#2#
3# Copyright 2009 Canonical Ltd. This software is licensed under the3# Copyright 2009 Canonical Ltd. This software is licensed under the
4# GNU Affero General Public License version 3 (see the file LICENSE).4# GNU Affero General Public License version 3 (see the file LICENSE).
55
=== modified file 'database/schema/security.cfg'
--- database/schema/security.cfg 2010-04-21 19:41:18 +0000
+++ database/schema/security.cfg 2010-04-27 02:13:38 +0000
@@ -35,20 +35,27 @@
35public.is_printable_ascii(text) = EXECUTE35public.is_printable_ascii(text) = EXECUTE
36public.launchpaddatabaserevision = SELECT36public.launchpaddatabaserevision = SELECT
37public.name_blacklist_match(text) = EXECUTE37public.name_blacklist_match(text) = EXECUTE
38public.fticache =
39public.pillarname = SELECT38public.pillarname = SELECT
40public.ulower(text) = EXECUTE39public.ulower(text) = EXECUTE
41public._killall_backends(text) =
42public.generate_openid_identifier() = EXECUTE40public.generate_openid_identifier() = EXECUTE
43public.getlocalnodeid() = EXECUTE41public.getlocalnodeid() = EXECUTE
44public.replication_lag() = EXECUTE42public.replication_lag() = EXECUTE
45public.replication_lag(integer) = EXECUTE43public.replication_lag(integer) = EXECUTE
46public.assert_patch_applied(integer, integer, integer) = EXECUTE44public.assert_patch_applied(integer, integer, integer) = EXECUTE
45# Explicitly state 'no permissions on these objects' to silence
46# security.py warnings.
47public.fticache =
48public._killall_backends(text) =
47public.exclusivelocks =49public.exclusivelocks =
48public.alllocks =50public.alllocks =
49public.pgstattuple(oid) =51public.pgstattuple(oid) =
50public.pgstattuple(text) =52public.pgstattuple(text) =
51public.bugnotificationarchive =53public.bugnotificationarchive =
54public.lp_account =
55public.lp_personlocation =
56public.lp_person =
57public.lp_teamparticipation =
58public.bug_update_latest_patch_uploaded(integer) =
5259
53[ro]60[ro]
54# A user with full readonly access to the database. Generally used for61# A user with full readonly access to the database. Generally used for
@@ -61,59 +68,6 @@
61type=user68type=user
62groups=admin69groups=admin
6370
64[sso_auth]
65# authdb replication set write access from the SSO service.
66type=user
67public.account = SELECT, INSERT, UPDATE, DELETE
68public.accountpassword = SELECT, INSERT, UPDATE, DELETE
69public.authtoken = SELECT, INSERT, UPDATE, DELETE
70public.emailaddress = SELECT, INSERT, UPDATE, DELETE
71public.openidrpsummary = SELECT, INSERT, UPDATE
72public.openidassociation = SELECT, INSERT, DELETE
73public.openidnonce = SELECT, INSERT, DELETE
74public.openidauthorization = SELECT, INSERT, UPDATE, DELETE
75public.person = SELECT
76public.personlocation = SELECT
77public.teamparticipation = SELECT
78
79[sso_main]
80# main replication set access from the SSO service login.launchpad.net
81type=user
82public.language = SELECT
83public.openidrpconfig = SELECT
84public.person = SELECT
85public.personlanguage = SELECT
86public.personlocation = SELECT
87public.shippingrequest = SELECT
88public.teammembership = SELECT
89public.teamparticipation = SELECT
90public.validpersoncache = SELECT
91# Needed for person.preferredemail to work.
92public.emailaddress = SELECT
93# Needed for OpenID login to work - Bug #352727
94public.country = SELECT
95# Needed for display of OpenID consumer logo per Bug #353926
96public.libraryfilealias = SELECT
97public.libraryfilecontent = SELECT
98
99[launchpad_auth]
100# authdb replication set access from the main Z3 application.
101type=user
102public.account = SELECT, INSERT, UPDATE, DELETE
103public.accountpassword = SELECT, INSERT, UPDATE, DELETE
104public.authtoken = SELECT, INSERT, UPDATE
105public.emailaddress = SELECT, INSERT, UPDATE, DELETE
106public.language = SELECT
107public.openidrpconfig = SELECT
108public.openidrpsummary = SELECT
109public.person = SELECT
110public.personlanguage = SELECT
111public.teammembership = SELECT
112public.teamparticipation = SELECT
113# XXX 2009-05-07 stub bug=373252: SELECT and DELETE permissions required
114# for garbo.py. INSERT permission needed for the tests.
115public.openidassociation = SELECT, INSERT, DELETE
116
117[launchpad_main]71[launchpad_main]
118# lpmain replication set access from the main Z3 application.72# lpmain replication set access from the main Z3 application.
119type=user73type=user
@@ -884,34 +838,6 @@
884public.country = SELECT838public.country = SELECT
885public.parsedapachelog = SELECT, INSERT, UPDATE839public.parsedapachelog = SELECT, INSERT, UPDATE
886840
887[sourcerer]
888type=user
889groups=script
890public.archive = SELECT
891public.archivearch = SELECT
892public.branch = SELECT, INSERT, UPDATE
893public.revision = SELECT, INSERT, UPDATE
894# Karma
895public.karma = SELECT, INSERT
896public.karmaaction = SELECT
897# To get at a source package's manifest
898public.distribution = SELECT
899public.distroseries = SELECT
900public.sourcepackagename = SELECT
901public.sourcepackagepublishinghistory = SELECT
902public.sourcepackagerelease = SELECT, UPDATE
903public.sourcepackagereleasefile = SELECT
904# To get at an upstream product's manifest
905public.product = SELECT
906public.productseries = SELECT
907public.productrelease = SELECT, UPDATE
908public.productreleasefile = SELECT
909# To get from source package to upstream
910public.packaging = SELECT
911# To get stuff from the librarian
912public.libraryfilealias = SELECT
913public.libraryfilecontent = SELECT
914
915[write]841[write]
916type=group842type=group
917# Full access except for tables that are exclusively updated by843# Full access except for tables that are exclusively updated by
@@ -1688,7 +1614,7 @@
1688public.teamparticipation = SELECT1614public.teamparticipation = SELECT
1689public.validpersoncache = SELECT1615public.validpersoncache = SELECT
16901616
1691[mp-creation-job]1617[merge-proposal-jobs]
1692type=user1618type=user
1693groups=script1619groups=script
1694public.account = SELECT1620public.account = SELECT
@@ -1725,18 +1651,6 @@
1725public.teamparticipation = SELECT1651public.teamparticipation = SELECT
1726public.validpersoncache = SELECT1652public.validpersoncache = SELECT
17271653
1728[update-preview-diffs]
1729type=user
1730groups=script
1731public.branch = SELECT
1732public.branchmergeproposal = SELECT, UPDATE
1733public.branchmergeproposaljob = SELECT
1734public.diff = SELECT, INSERT
1735public.job = SELECT, UPDATE
1736public.libraryfilealias = SELECT, INSERT
1737public.libraryfilecontent = SELECT, INSERT
1738public.previewdiff = SELECT, INSERT
1739
1740[upgrade-branches]1654[upgrade-branches]
1741type=user1655type=user
1742groups=script1656groups=script
17431657
=== modified file 'database/schema/security.py'
--- database/schema/security.py 2010-02-09 01:31:05 +0000
+++ database/schema/security.py 2010-04-27 02:13:38 +0000
@@ -1,4 +1,4 @@
1#!/usr/bin/python2.51#!/usr/bin/python2.5 -S
2#2#
3# Copyright 2009 Canonical Ltd. This software is licensed under the3# Copyright 2009 Canonical Ltd. This software is licensed under the
4# GNU Affero General Public License version 3 (see the file LICENSE).4# GNU Affero General Public License version 3 (see the file LICENSE).
55
=== modified file 'database/schema/sort_sql.py'
--- database/schema/sort_sql.py 2009-10-17 14:06:03 +0000
+++ database/schema/sort_sql.py 2010-04-27 02:13:38 +0000
@@ -1,4 +1,4 @@
1#!/usr/bin/python2.51#!/usr/bin/python2.5 -S
2#2#
3# Copyright 2009 Canonical Ltd. This software is licensed under the3# Copyright 2009 Canonical Ltd. This software is licensed under the
4# GNU Affero General Public License version 3 (see the file LICENSE).4# GNU Affero General Public License version 3 (see the file LICENSE).
55
=== modified file 'database/schema/trusted.sql'
--- database/schema/trusted.sql 2010-03-26 08:25:24 +0000
+++ database/schema/trusted.sql 2010-04-27 02:13:38 +0000
@@ -1470,36 +1470,36 @@
14701470
1471-- Update the (redundant) column bug.latest_patch_uploaded when a1471-- Update the (redundant) column bug.latest_patch_uploaded when a
1472-- a bug attachment is added or removed or if its type is changed.1472-- a bug attachment is added or removed or if its type is changed.
1473CREATE OR REPLACE FUNCTION bug_update_latest_patch_uploaded(integer) RETURNS VOID1473CREATE OR REPLACE FUNCTION bug_update_latest_patch_uploaded(integer)
1474 SECURITY DEFINER LANGUAGE plpgsql AS1474RETURNS VOID SECURITY DEFINER LANGUAGE plpgsql AS
1475 $$1475$$
1476 BEGIN1476BEGIN
1477 UPDATE bug SET latest_patch_uploaded =1477 UPDATE bug SET latest_patch_uploaded =
1478 (SELECT max(message.datecreated)1478 (SELECT max(message.datecreated)
1479 FROM message, bugattachment1479 FROM message, bugattachment
1480 WHERE bugattachment.message=message.id AND1480 WHERE bugattachment.message=message.id AND
1481 bugattachment.bug=$1 AND1481 bugattachment.bug=$1 AND
1482 bugattachment.type=1)1482 bugattachment.type=1)
1483 WHERE bug.id=$1;1483 WHERE bug.id=$1;
1484 END;1484END;
1485 $$;1485$$;
14861486
14871487
1488CREATE OR REPLACE FUNCTION bug_update_latest_patch_uploaded_on_insert_update() RETURNS trigger1488CREATE OR REPLACE FUNCTION bug_update_latest_patch_uploaded_on_insert_update()
1489 SECURITY DEFINER LANGUAGE plpgsql AS1489RETURNS trigger SECURITY DEFINER LANGUAGE plpgsql AS
1490 $$1490$$
1491 BEGIN1491BEGIN
1492 PERFORM bug_update_latest_patch_uploaded(NEW.bug);1492 PERFORM bug_update_latest_patch_uploaded(NEW.bug);
1493 RETURN NULL; -- Ignored - this is an AFTER trigger1493 RETURN NULL; -- Ignored - this is an AFTER trigger
1494 END;1494END;
1495 $$;1495$$;
14961496
14971497
1498CREATE OR REPLACE FUNCTION bug_update_latest_patch_uploaded_on_delete() RETURNS trigger1498CREATE OR REPLACE FUNCTION bug_update_latest_patch_uploaded_on_delete()
1499 SECURITY DEFINER LANGUAGE plpgsql AS1499RETURNS trigger SECURITY DEFINER LANGUAGE plpgsql AS
1500 $$1500$$
1501 BEGIN1501BEGIN
1502 PERFORM bug_update_latest_patch_uploaded(OLD.bug);1502 PERFORM bug_update_latest_patch_uploaded(OLD.bug);
1503 RETURN NULL; -- Ignored - this is an AFTER trigger1503 RETURN NULL; -- Ignored - this is an AFTER trigger
1504 END;1504END;
1505 $$;1505$$;
15061506
=== modified file 'database/schema/unautovacuumable.py'
--- database/schema/unautovacuumable.py 2009-11-06 21:58:50 +0000
+++ database/schema/unautovacuumable.py 2010-04-27 02:13:38 +0000
@@ -1,4 +1,4 @@
1#!/usr/bin/python2.51#!/usr/bin/python2.5 -S
2#2#
3# Copyright 2009 Canonical Ltd. This software is licensed under the3# Copyright 2009 Canonical Ltd. This software is licensed under the
4# GNU Affero General Public License version 3 (see the file LICENSE).4# GNU Affero General Public License version 3 (see the file LICENSE).
55
=== modified file 'database/schema/upgrade.py'
--- database/schema/upgrade.py 2009-10-17 14:06:03 +0000
+++ database/schema/upgrade.py 2010-04-27 02:13:38 +0000
@@ -1,4 +1,4 @@
1#!/usr/bin/python2.51#!/usr/bin/python2.5 -S
2#2#
3# Copyright 2009 Canonical Ltd. This software is licensed under the3# Copyright 2009 Canonical Ltd. This software is licensed under the
4# GNU Affero General Public License version 3 (see the file LICENSE).4# GNU Affero General Public License version 3 (see the file LICENSE).
@@ -249,9 +249,10 @@
249 id=@holding_set,249 id=@holding_set,
250 provider=@master_node, receiver=@node%d_node, forward=yes);250 provider=@master_node, receiver=@node%d_node, forward=yes);
251 echo 'Waiting for sync';251 echo 'Waiting for sync';
252 sync (id=1);252 sync (id=@master_node);
253 wait for event (253 wait for event (
254 origin=ALL, confirmed=ALL, wait on=@master_node, timeout=0254 origin=@master_node, confirmed=ALL,
255 wait on=@master_node, timeout=0
255 );256 );
256 """ % (slave_node.node_id, slave_node.node_id))257 """ % (slave_node.node_id, slave_node.node_id))
257258
@@ -281,7 +282,7 @@
281 (fqn(nspname, relname), tab_id)282 (fqn(nspname, relname), tab_id)
282 for nspname, relname, tab_id in cur.fetchall())283 for nspname, relname, tab_id in cur.fetchall())
283284
284 # Generate a slonik script to remove tables from the replication set, 285 # Generate a slonik script to remove tables from the replication set,
285 # and a DROP TABLE/DROP SEQUENCE sql script to run after.286 # and a DROP TABLE/DROP SEQUENCE sql script to run after.
286 if tabs_to_drop:287 if tabs_to_drop:
287 log.info("Dropping tables: %s" % ', '.join(288 log.info("Dropping tables: %s" % ', '.join(
288289
=== modified file 'lib/canonical/config/schema-lazr.conf'
--- lib/canonical/config/schema-lazr.conf 2010-04-19 03:44:27 +0000
+++ lib/canonical/config/schema-lazr.conf 2010-04-27 02:13:38 +0000
@@ -402,6 +402,11 @@
402# mapping done by branch-rewrite.py for.402# mapping done by branch-rewrite.py for.
403branch_rewrite_cache_lifetime: 10403branch_rewrite_cache_lifetime: 10
404404
405# Update Preview diff ready timeout
406#
407# How long, in minutes, we wait for a branch to be ready in order to
408# generate a diff for a merge proposal (in the UpdatePreviewDiffJob).
409update_preview_diff_ready_timeout: 15
405410
406[codeimport]411[codeimport]
407# Where the Bazaar imports are stored.412# Where the Bazaar imports are stored.
@@ -1419,6 +1424,25 @@
1419port: 112171424port: 11217
14201425
14211426
1427[merge_proposal_jobs]
1428# The database user which will be used by this process.
1429# datatype: string
1430dbuser: merge-proposal-jobs
1431storm_cache: generational
1432storm_cache_size: 500
1433
1434# See [error_reports].
1435error_dir: none
1436
1437# See [error_reports].
1438oops_prefix: none
1439
1440# See [error_reports].
1441copy_to_zlog: false
1442
1443##
1444## TODO: delete mpcreationjobs section after 10.04 rollout.
1445##
1422[mpcreationjobs]1446[mpcreationjobs]
1423# The database user which will be used by this process.1447# The database user which will be used by this process.
1424# datatype: string1448# datatype: string
@@ -1435,6 +1459,9 @@
1435# See [error_reports].1459# See [error_reports].
1436copy_to_zlog: false1460copy_to_zlog: false
14371461
1462##
1463## TODO: delete update_preview_diffs section after 10.04 rollout.
1464##
1438[update_preview_diffs]1465[update_preview_diffs]
1439dbuser: update-preview-diffs1466dbuser: update-preview-diffs
14401467
@@ -1447,6 +1474,7 @@
1447# See [error_reports].1474# See [error_reports].
1448copy_to_zlog: false1475copy_to_zlog: false
14491476
1477
1450[upgrade_branches]1478[upgrade_branches]
1451dbuser: upgrade-branches1479dbuser: upgrade-branches
14521480
14531481
=== added directory 'lib/canonical/launchpad/apidoc'
=== removed directory 'lib/canonical/launchpad/apidoc'
=== modified file 'lib/canonical/launchpad/daemons/tachandler.py'
--- lib/canonical/launchpad/daemons/tachandler.py 2010-04-05 09:22:54 +0000
+++ lib/canonical/launchpad/daemons/tachandler.py 2010-04-27 02:13:38 +0000
@@ -164,8 +164,8 @@
164 def _waitForDaemonStartup(self):164 def _waitForDaemonStartup(self):
165 """ Wait for the daemon to fully start.165 """ Wait for the daemon to fully start.
166166
167 Times out after 20 seconds. If that happens, the log file will167 Times out after 20 seconds. If that happens, the log file content
168 not be cleaned up so the user can post-mortem it.168 will be included in the exception message for debugging purpose.
169169
170 :raises TacException: Timeout.170 :raises TacException: Timeout.
171 """171 """
@@ -178,8 +178,8 @@
178 now = time.time()178 now = time.time()
179179
180 if now >= deadline:180 if now >= deadline:
181 raise TacException('Unable to start %s. Check %s.' % (181 raise TacException('Unable to start %s. Content of %s:\n%s' % (
182 self.tacfile, self.logfile))182 self.tacfile, self.logfile, open(self.logfile).read()))
183183
184 def tearDown(self):184 def tearDown(self):
185 self.killTac()185 self.killTac()
186186
=== modified file 'lib/canonical/launchpad/doc/product-update-remote-product-script.txt'
--- lib/canonical/launchpad/doc/product-update-remote-product-script.txt 2009-03-27 03:29:31 +0000
+++ lib/canonical/launchpad/doc/product-update-remote-product-script.txt 2010-04-27 02:13:38 +0000
@@ -14,7 +14,7 @@
14 014 0
1515
16 >>> print err16 >>> print err
17 INFO creating lockfile17 INFO Creating lockfile: /var/lock/launchpad-updateremoteproduct.lock
18 INFO 0 projects using BUGZILLA needing updating.18 INFO 0 projects using BUGZILLA needing updating.
19 ...19 ...
20 INFO 0 projects using RT needing updating.20 INFO 0 projects using RT needing updating.
2121
=== modified file 'lib/canonical/launchpad/scripts/garbo.py'
--- lib/canonical/launchpad/scripts/garbo.py 2010-04-08 08:55:10 +0000
+++ lib/canonical/launchpad/scripts/garbo.py 2010-04-27 02:13:38 +0000
@@ -26,9 +26,7 @@
26from canonical.launchpad.database.openidconsumer import OpenIDConsumerNonce26from canonical.launchpad.database.openidconsumer import OpenIDConsumerNonce
27from canonical.launchpad.interfaces import IMasterStore27from canonical.launchpad.interfaces import IMasterStore
28from canonical.launchpad.interfaces.emailaddress import EmailAddressStatus28from canonical.launchpad.interfaces.emailaddress import EmailAddressStatus
29from canonical.launchpad.interfaces.looptuner import ITunableLoop29from canonical.launchpad.utilities.looptuner import TunableLoop
30from canonical.launchpad.utilities.looptuner import (
31 DBLoopTuner, TunableLoop)
32from canonical.launchpad.webapp.interfaces import (30from canonical.launchpad.webapp.interfaces import (
33 IStoreSelector, MAIN_STORE, MASTER_FLAVOR)31 IStoreSelector, MAIN_STORE, MASTER_FLAVOR)
34from lp.bugs.interfaces.bug import IBugSet32from lp.bugs.interfaces.bug import IBugSet
@@ -42,7 +40,6 @@
42from lp.code.model.branchjob import BranchJob40from lp.code.model.branchjob import BranchJob
43from lp.code.model.codeimportresult import CodeImportResult41from lp.code.model.codeimportresult import CodeImportResult
44from lp.code.model.revision import RevisionAuthor, RevisionCache42from lp.code.model.revision import RevisionAuthor, RevisionCache
45from lp.registry.model.mailinglist import MailingListSubscription
46from lp.registry.model.person import Person43from lp.registry.model.person import Person
47from lp.services.job.model.job import Job44from lp.services.job.model.job import Job
48from lp.services.scripts.base import (45from lp.services.scripts.base import (
@@ -346,150 +343,6 @@
346 transaction.commit()343 transaction.commit()
347344
348345
349class MailingListSubscriptionPruner(TunableLoop):
350 """Prune `MailingListSubscription`s pointing at deleted email addresses.
351
352 Users subscribe to mailing lists with one of their verified email
353 addresses. When they remove an address, the mailing list
354 subscription should go away too.
355 """
356
357 maximum_chunk_size = 1000
358
359 def __init__(self, log, abort_time=None):
360 super(MailingListSubscriptionPruner, self).__init__(log, abort_time)
361 self.subscription_store = IMasterStore(MailingListSubscription)
362 self.email_store = IMasterStore(EmailAddress)
363
364 (self.min_subscription_id,
365 self.max_subscription_id) = self.subscription_store.find(
366 (Min(MailingListSubscription.id),
367 Max(MailingListSubscription.id))).one()
368
369 self.next_subscription_id = self.min_subscription_id
370
371 def isDone(self):
372 return (self.min_subscription_id is None or
373 self.next_subscription_id > self.max_subscription_id)
374
375 def __call__(self, chunk_size):
376 result = self.subscription_store.find(
377 MailingListSubscription,
378 MailingListSubscription.id >= self.next_subscription_id,
379 MailingListSubscription.id < (self.next_subscription_id +
380 chunk_size))
381 used_ids = set(result.values(MailingListSubscription.email_addressID))
382 existing_ids = set(self.email_store.find(
383 EmailAddress.id, EmailAddress.id.is_in(used_ids)))
384 deleted_ids = used_ids - existing_ids
385
386 self.subscription_store.find(
387 MailingListSubscription,
388 MailingListSubscription.id >= self.next_subscription_id,
389 MailingListSubscription.id < (self.next_subscription_id +
390 chunk_size),
391 MailingListSubscription.email_addressID.is_in(deleted_ids)
392 ).remove()
393
394 self.next_subscription_id += chunk_size
395 transaction.commit()
396
397
398class PersonEmailAddressLinkChecker(TunableLoop):
399 """Report invalid references between the authdb and main replication sets.
400
401 We can't use referential integrity to ensure references remain valid,
402 so we have to check regularly for any bugs that creep into our code.
403
404 We don't repair links yet, but could add this feature. I'd
405 rather track down the source of problems and fix problems there
406 and avoid automatic repair, which might be dangerous. In particular,
407 replication lag introduces a number of race conditions that would
408 need to be addressed.
409 """
410 maximum_chunk_size = 1000
411
412 def __init__(self, log, abort_time=None):
413 super(PersonEmailAddressLinkChecker, self).__init__(log, abort_time)
414
415 self.person_store = IMasterStore(Person)
416 self.email_store = IMasterStore(EmailAddress)
417
418 # This query detects invalid links between Person and EmailAddress.
419 # The first part detects difference in opionion about what Account
420 # is linked to. The second part detects EmailAddresses linked to
421 # non existent Person records.
422 query = """
423 SELECT Person.id, EmailAddress.id
424 FROM EmailAddress, Person
425 WHERE EmailAddress.person = Person.id
426 AND Person.account IS DISTINCT FROM EmailAddress.account
427 UNION
428 SELECT NULL, EmailAddress.id
429 FROM EmailAddress LEFT OUTER JOIN Person
430 ON EmailAddress.person = Person.id
431 WHERE EmailAddress.person IS NOT NULL
432 AND Person.id IS NULL
433 """
434 # We need to issue this query twice, waiting between calls
435 # for all pending database changes to replicate. The known
436 # bad set are the entries common in both results.
437 bad_links_1 = set(self.person_store.execute(query))
438 transaction.abort()
439
440 self.blockForReplication()
441
442 bad_links_2 = set(self.person_store.execute(query))
443 transaction.abort()
444
445 self.bad_links = bad_links_1.intersection(bad_links_2)
446
447 def blockForReplication(self):
448 start = time.time()
449 while True:
450 lag = self.person_store.execute(
451 "SELECT COALESCE(EXTRACT(EPOCH FROM replication_lag()), 0);"
452 ).get_one()[0]
453 if lag < (time.time() - start):
454 return
455 # Guestimate on how long we should wait for. We cap
456 # it as several hours of lag can clear in an instant
457 # in some cases.
458 naptime = min(300, lag)
459 self.log.debug(
460 "Waiting for replication. Lagged %s secs. Napping %s secs."
461 % (lag, naptime))
462 time.sleep(naptime)
463
464 def isDone(self):
465 return not self.bad_links
466
467 def __call__(self, chunksize):
468 for counter in range(0, int(chunksize)):
469 if not self.bad_links:
470 return
471 person_id, emailaddress_id = self.bad_links.pop()
472 if person_id is None:
473 person = None
474 else:
475 person = self.person_store.get(Person, person_id)
476 emailaddress = self.email_store.get(EmailAddress, emailaddress_id)
477 self.report(person, emailaddress)
478 # We don't repair... yet.
479 # self.repair(person, emailaddress)
480 transaction.abort()
481
482 def report(self, person, emailaddress):
483 if person is None:
484 self.log.error(
485 "Corruption - '%s' is linked to a non-existant Person."
486 % emailaddress.email)
487 else:
488 self.log.error(
489 "Corruption - '%s' and '%s' reference different Accounts."
490 % (emailaddress.email, person.name))
491
492
493class PersonPruner(TunableLoop):346class PersonPruner(TunableLoop):
494347
495 maximum_chunk_size = 1000348 maximum_chunk_size = 1000
@@ -662,7 +515,7 @@
662 def __call__(self, chunk_size):515 def __call__(self, chunk_size):
663 chunk_size = int(chunk_size)516 chunk_size = int(chunk_size)
664 ids_to_remove = list(self._ids_to_remove()[:chunk_size])517 ids_to_remove = list(self._ids_to_remove()[:chunk_size])
665 num_removed = self.job_store.find(518 self.job_store.find(
666 BranchJob,519 BranchJob,
667 In(BranchJob.id, ids_to_remove)).remove()520 In(BranchJob.id, ids_to_remove)).remove()
668 transaction.commit()521 transaction.commit()
@@ -918,8 +771,6 @@
918 CodeImportResultPruner,771 CodeImportResultPruner,
919 RevisionAuthorEmailLinker,772 RevisionAuthorEmailLinker,
920 HWSubmissionEmailLinker,773 HWSubmissionEmailLinker,
921 MailingListSubscriptionPruner,
922 PersonEmailAddressLinkChecker,
923 BugNotificationPruner,774 BugNotificationPruner,
924 BranchJobPruner,775 BranchJobPruner,
925 BugWatchActivityPruner,776 BugWatchActivityPruner,
926777
=== modified file 'lib/canonical/launchpad/scripts/tests/test_garbo.py'
--- lib/canonical/launchpad/scripts/tests/test_garbo.py 2010-04-13 01:49:42 +0000
+++ lib/canonical/launchpad/scripts/tests/test_garbo.py 2010-04-27 02:13:38 +0000
@@ -351,26 +351,6 @@
351 LaunchpadZopelessLayer.switchDbUser('testadmin')351 LaunchpadZopelessLayer.switchDbUser('testadmin')
352 self.assertEqual(sub3.owner, person3)352 self.assertEqual(sub3.owner, person3)
353353
354 def test_MailingListSubscriptionPruner(self):
355 LaunchpadZopelessLayer.switchDbUser('testadmin')
356 team, mailing_list = self.factory.makeTeamAndMailingList(
357 'mlist-team', 'mlist-owner')
358 person = self.factory.makePerson(email='preferred@example.org')
359 email = self.factory.makeEmail('secondary@example.org', person)
360 transaction.commit()
361 mailing_list.subscribe(person, email)
362
363 # User remains subscribed if we run the garbage collector.
364 self.runDaily()
365 self.assertNotEqual(mailing_list.getSubscription(person), None)
366
367 # If we remove the email address that was subscribed, the
368 # garbage collector removes the subscription.
369 LaunchpadZopelessLayer.switchDbUser('testadmin')
370 Store.of(email).remove(email)
371 self.runDaily()
372 self.assertEqual(mailing_list.getSubscription(person), None)
373
374 def test_PersonPruner(self):354 def test_PersonPruner(self):
375 personset = getUtility(IPersonSet)355 personset = getUtility(IPersonSet)
376 # Switch the DB user because the garbo_daily user isn't allowed to356 # Switch the DB user because the garbo_daily user isn't allowed to
@@ -466,42 +446,6 @@
466 BugNotification.date_emailed < THIRTY_DAYS_AGO).count(),446 BugNotification.date_emailed < THIRTY_DAYS_AGO).count(),
467 0)447 0)
468448
469 def test_PersonEmailAddressLinkChecker(self):
470 LaunchpadZopelessLayer.switchDbUser('testadmin')
471
472 # Make an EmailAddress record reference a non-existant Person.
473 emailaddress = IMasterStore(EmailAddress).get(EmailAddress, 16)
474 emailaddress.personID = -1
475
476 # Make a Person record reference a different Account to its
477 # EmailAddress records.
478 person = IMasterStore(Person).get(Person, 1)
479 person_email = Store.of(person).find(
480 EmailAddress, person=person).any()
481 person.accountID = -1
482
483 # Run the garbage collector. We should get two ERROR reports
484 # about the corrupt data.
485 collector = self.runDaily()
486
487 # The PersonEmailAddressLinkChecker is not intelligent enough
488 # to repair corruption. It is only there to alert us to the
489 # issue so data can be manually repaired and the cause
490 # tracked down and fixed.
491 self.assertEqual(emailaddress.personID, -1)
492 self.assertNotEqual(person.accountID, person_email.accountID)
493
494 # The corruption has been reported though as a ERROR messages.
495 log_output = collector.logger.output_file.getvalue()
496 error_message_1 = (
497 "ERROR Corruption - "
498 "'test@canonical.com' is linked to a non-existant Person.")
499 self.assertNotEqual(log_output.find(error_message_1), -1)
500 error_message_2 = (
501 "ERROR Corruption - "
502 "'mark@example.com' and 'mark' reference different Accounts")
503 self.assertNotEqual(log_output.find(error_message_2), -1)
504
505 def test_BranchJobPruner(self):449 def test_BranchJobPruner(self):
506 # Garbo should remove jobs completed over 30 days ago.450 # Garbo should remove jobs completed over 30 days ago.
507 self.useBzrBranches()451 self.useBzrBranches()
508452
=== modified file 'lib/contrib/glock.py'
--- lib/contrib/glock.py 2007-01-29 18:48:21 +0000
+++ lib/contrib/glock.py 2010-04-27 02:13:38 +0000
@@ -157,7 +157,7 @@
157 the caller decided not to block.157 the caller decided not to block.
158 """158 """
159 if self.logger:159 if self.logger:
160 self.logger.info('creating lockfile')160 self.logger.info('Creating lockfile: %s', self.fpath)
161 if _windows:161 if _windows:
162 if blocking:162 if blocking:
163 timeout = win32event.INFINITE163 timeout = win32event.INFINITE
164164
=== modified file 'lib/lp/answers/doc/expiration.txt'
--- lib/lp/answers/doc/expiration.txt 2009-07-23 17:49:31 +0000
+++ lib/lp/answers/doc/expiration.txt 2010-04-27 02:13:38 +0000
@@ -132,7 +132,7 @@
132 ... stderr=subprocess.PIPE)132 ... stderr=subprocess.PIPE)
133 >>> (out, err) = process.communicate()133 >>> (out, err) = process.communicate()
134 >>> print err134 >>> print err
135 INFO creating lockfile135 INFO Creating lockfile: /var/lock/launchpad-expire-questions.lock
136 INFO Expiring OPEN and NEEDSINFO questions without activity for the136 INFO Expiring OPEN and NEEDSINFO questions without activity for the
137 last 15 days.137 last 15 days.
138 INFO Found 5 questions to expire.138 INFO Found 5 questions to expire.
139139
=== modified file 'lib/lp/archivepublisher/publishing.py'
--- lib/lp/archivepublisher/publishing.py 2010-02-09 00:17:40 +0000
+++ lib/lp/archivepublisher/publishing.py 2010-04-27 02:13:38 +0000
@@ -12,6 +12,7 @@
12import hashlib12import hashlib
13import logging13import logging
14import os14import os
15import shutil
1516
16from datetime import datetime17from datetime import datetime
1718
@@ -29,7 +30,7 @@
29from canonical.database.sqlbase import sqlvalues30from canonical.database.sqlbase import sqlvalues
30from lp.registry.interfaces.pocket import (31from lp.registry.interfaces.pocket import (
31 PackagePublishingPocket, pocketsuffix)32 PackagePublishingPocket, pocketsuffix)
32from lp.soyuz.interfaces.archive import ArchivePurpose33from lp.soyuz.interfaces.archive import ArchivePurpose, ArchiveStatus
33from lp.soyuz.interfaces.binarypackagerelease import (34from lp.soyuz.interfaces.binarypackagerelease import (
34 BinaryPackageFormat)35 BinaryPackageFormat)
35from lp.soyuz.interfaces.component import IComponentSet36from lp.soyuz.interfaces.component import IComponentSet
@@ -596,3 +597,32 @@
596 in_file.close()597 in_file.close()
597598
598 out_file.write(" %s % 16d %s\n" % (checksum, length, file_name))599 out_file.write(" %s % 16d %s\n" % (checksum, length, file_name))
600
601 def deleteArchive(self):
602 """Delete the archive.
603
604 Physically remove the entire archive from disk and set the archive's
605 status to DELETED.
606
607 Any errors encountered while removing the archive from disk will
608 be caught and an OOPS report generated.
609 """
610
611 root_dir = os.path.join(
612 self._config.distroroot, self.archive.owner.name,
613 self.archive.name)
614
615 self.log.info(
616 "Attempting to delete archive '%s/%s' at '%s'." % (
617 self.archive.owner.name, self.archive.name, root_dir))
618
619 try:
620 shutil.rmtree(root_dir)
621 except (shutil.Error, OSError), e:
622 self.log.warning(
623 "Failed to delete directory '%s' for archive '%s/%s'\n%s" % (
624 root_dir, self.archive.owner.name,
625 self.archive.name, e))
626
627 self.archive.status = ArchiveStatus.DELETED
628 self.archive.publish = False
599629
=== modified file 'lib/lp/archivepublisher/scripts/generate_ppa_htaccess.py'
--- lib/lp/archivepublisher/scripts/generate_ppa_htaccess.py 2009-10-17 14:06:03 +0000
+++ lib/lp/archivepublisher/scripts/generate_ppa_htaccess.py 2010-04-27 02:13:38 +0000
@@ -17,7 +17,7 @@
17from canonical.launchpad.webapp import canonical_url17from canonical.launchpad.webapp import canonical_url
1818
19from lp.archivepublisher.config import getPubConfig19from lp.archivepublisher.config import getPubConfig
20from lp.soyuz.interfaces.archive import IArchiveSet20from lp.soyuz.interfaces.archive import IArchiveSet, ArchiveStatus
21from lp.soyuz.interfaces.archiveauthtoken import (21from lp.soyuz.interfaces.archiveauthtoken import (
22 IArchiveAuthTokenSet)22 IArchiveAuthTokenSet)
23from lp.soyuz.interfaces.archivesubscriber import (23from lp.soyuz.interfaces.archivesubscriber import (
@@ -241,6 +241,13 @@
241 ppa.name,241 ppa.name,
242 ppa.owner.displayname)242 ppa.owner.displayname)
243 continue243 continue
244 elif ppa.status == ArchiveStatus.DELETED or ppa.enabled is False:
245 self.logger.info(
246 "Skipping htacess updates for deleted or disabled PPA "
247 " '%s' owned by %s.",
248 ppa.name,
249 ppa.owner.displayname)
250 continue
244251
245 self.ensureHtaccess(ppa)252 self.ensureHtaccess(ppa)
246 temp_htpasswd = self.generateHtpasswd(ppa, valid_tokens)253 temp_htpasswd = self.generateHtpasswd(ppa, valid_tokens)
247254
=== modified file 'lib/lp/archivepublisher/tests/test_generate_ppa_htaccess.py'
--- lib/lp/archivepublisher/tests/test_generate_ppa_htaccess.py 2010-03-09 07:29:18 +0000
+++ lib/lp/archivepublisher/tests/test_generate_ppa_htaccess.py 2010-04-27 02:13:38 +0000
@@ -24,6 +24,7 @@
24from lp.archivepublisher.config import getPubConfig24from lp.archivepublisher.config import getPubConfig
25from lp.archivepublisher.scripts.generate_ppa_htaccess import (25from lp.archivepublisher.scripts.generate_ppa_htaccess import (
26 HtaccessTokenGenerator)26 HtaccessTokenGenerator)
27from lp.soyuz.interfaces.archive import ArchiveStatus
27from lp.soyuz.interfaces.archivesubscriber import (28from lp.soyuz.interfaces.archivesubscriber import (
28 ArchiveSubscriberStatus)29 ArchiveSubscriberStatus)
29from lp.testing import TestCaseWithFactory30from lp.testing import TestCaseWithFactory
@@ -476,6 +477,47 @@
476 self.assertFalse(os.path.isfile(htaccess))477 self.assertFalse(os.path.isfile(htaccess))
477 self.assertFalse(os.path.isfile(htpasswd))478 self.assertFalse(os.path.isfile(htpasswd))
478479
480 def testSkippingOfDisabledPPAs(self):
481 """Test that the htaccess for disabled PPAs are not touched."""
482 subs, tokens = self.setupDummyTokens()
483 htaccess, htpasswd = self.ensureNoFiles()
484
485 # Setup subscription so that htaccess/htpasswd is pending generation.
486 now = datetime.now(pytz.UTC)
487 subs[0].date_expires = now + timedelta(minutes=3)
488 self.assertEqual(subs[0].status, ArchiveSubscriberStatus.CURRENT)
489
490 # Set the PPA as disabled.
491 self.ppa.disable()
492 self.assertFalse(self.ppa.enabled)
493
494 script = self.getScript()
495 script.main()
496
497 # The htaccess and htpasswd files should not be generated.
498 self.assertFalse(os.path.isfile(htaccess))
499 self.assertFalse(os.path.isfile(htpasswd))
500
501 def testSkippingOfDeletedPPAs(self):
502 """Test that the htaccess for deleted PPAs are not touched."""
503 subs, tokens = self.setupDummyTokens()
504 htaccess, htpasswd = self.ensureNoFiles()
505
506 # Setup subscription so that htaccess/htpasswd is pending generation.
507 now = datetime.now(pytz.UTC)
508 subs[0].date_expires = now + timedelta(minutes=3)
509 self.assertEqual(subs[0].status, ArchiveSubscriberStatus.CURRENT)
510
511 # Set the PPA as deleted.
512 self.ppa.status = ArchiveStatus.DELETED
513
514 script = self.getScript()
515 script.main()
516
517 # The htaccess and htpasswd files should not be generated.
518 self.assertFalse(os.path.isfile(htaccess))
519 self.assertFalse(os.path.isfile(htpasswd))
520
479 def testSendingCancellationEmail(self):521 def testSendingCancellationEmail(self):
480 """Test that when a token is deactivated, its user gets an email.522 """Test that when a token is deactivated, its user gets an email.
481523
482524
=== modified file 'lib/lp/archivepublisher/tests/test_publisher.py'
--- lib/lp/archivepublisher/tests/test_publisher.py 2010-02-10 00:25:55 +0000
+++ lib/lp/archivepublisher/tests/test_publisher.py 2010-04-27 02:13:38 +0000
@@ -8,7 +8,6 @@
88
9import bz29import bz2
10import gzip10import gzip
11import hashlib
12import os11import os
13import shutil12import shutil
14import stat13import stat
@@ -26,7 +25,7 @@
26from canonical.database.constants import UTC_NOW25from canonical.database.constants import UTC_NOW
27from canonical.launchpad.ftests.keys_for_tests import gpgkeysdir26from canonical.launchpad.ftests.keys_for_tests import gpgkeysdir
28from lp.soyuz.interfaces.archive import (27from lp.soyuz.interfaces.archive import (
29 ArchivePurpose, IArchiveSet)28 ArchivePurpose, ArchiveStatus, IArchiveSet)
30from lp.soyuz.interfaces.binarypackagerelease import (29from lp.soyuz.interfaces.binarypackagerelease import (
31 BinaryPackageFormat)30 BinaryPackageFormat)
32from lp.registry.interfaces.distribution import IDistributionSet31from lp.registry.interfaces.distribution import IDistributionSet
@@ -37,7 +36,6 @@
37from lp.soyuz.interfaces.publishing import PackagePublishingStatus36from lp.soyuz.interfaces.publishing import PackagePublishingStatus
38from lp.archivepublisher.interfaces.archivesigningkey import (37from lp.archivepublisher.interfaces.archivesigningkey import (
39 IArchiveSigningKey)38 IArchiveSigningKey)
40from lp.testing import get_lsb_information
41from lp.soyuz.tests.test_publishing import TestNativePublishingBase39from lp.soyuz.tests.test_publishing import TestNativePublishingBase
42from canonical.zeca.ftests.harness import ZecaTestSetup40from canonical.zeca.ftests.harness import ZecaTestSetup
4341
@@ -95,6 +93,32 @@
95 foo_path = "%s/main/f/foo/foo_666.dsc" % self.pool_dir93 foo_path = "%s/main/f/foo/foo_666.dsc" % self.pool_dir
96 self.assertEqual(open(foo_path).read().strip(), 'Hello world')94 self.assertEqual(open(foo_path).read().strip(), 'Hello world')
9795
96 def testDeletingPPA(self):
97 """Test deleting a PPA"""
98 ubuntu_team = getUtility(IPersonSet).getByName('ubuntu-team')
99 test_archive = getUtility(IArchiveSet).new(
100 distribution=self.ubuntutest, owner=ubuntu_team,
101 purpose=ArchivePurpose.PPA)
102 publisher = getPublisher(test_archive, None, self.logger)
103
104 self.assertTrue(os.path.exists(publisher._config.archiveroot))
105
106 # Create a file inside archiveroot to ensure we're recursive.
107 open(os.path.join(
108 publisher._config.archiveroot, 'test_file'), 'w').close()
109
110 publisher.deleteArchive()
111 root_dir = os.path.join(
112 publisher._config.distroroot, test_archive.owner.name,
113 test_archive.name)
114 self.assertFalse(os.path.exists(root_dir))
115 self.assertEqual(test_archive.status, ArchiveStatus.DELETED)
116 self.assertEqual(test_archive.publish, False)
117
118 # Trying to delete it again won't fail, in the corner case where
119 # some admin manually deleted the repo.
120 publisher.deleteArchive()
121
98 def testPublishPartner(self):122 def testPublishPartner(self):
99 """Test that a partner package is published to the right place."""123 """Test that a partner package is published to the right place."""
100 archive = self.ubuntutest.getArchiveByComponent('partner')124 archive = self.ubuntutest.getArchiveByComponent('partner')
@@ -104,8 +128,7 @@
104 pub_config.poolroot, pub_config.temproot, self.logger)128 pub_config.poolroot, pub_config.temproot, self.logger)
105 publisher = Publisher(129 publisher = Publisher(
106 self.logger, pub_config, disk_pool, archive)130 self.logger, pub_config, disk_pool, archive)
107 pub_source = self.getPubSource(archive=archive,131 self.getPubSource(archive=archive, filecontent="I am partner")
108 filecontent="I am partner")
109132
110 publisher.A_publish(False)133 publisher.A_publish(False)
111134
@@ -143,7 +166,7 @@
143 disk_pool = DiskPool(166 disk_pool = DiskPool(
144 pub_config.poolroot, pub_config.temproot, self.logger)167 pub_config.poolroot, pub_config.temproot, self.logger)
145 publisher = Publisher(self.logger, pub_config, disk_pool, archive)168 publisher = Publisher(self.logger, pub_config, disk_pool, archive)
146 pub_source = self.getPubSource(169 self.getPubSource(
147 archive=archive, filecontent="I am partner",170 archive=archive, filecontent="I am partner",
148 status=PackagePublishingStatus.PENDING)171 status=PackagePublishingStatus.PENDING)
149172
@@ -230,8 +253,7 @@
230 self.logger, self.config, self.disk_pool,253 self.logger, self.config, self.disk_pool,
231 self.ubuntutest.main_archive)254 self.ubuntutest.main_archive)
232255
233 pub_source = self.getPubSource(256 self.getPubSource(status=PackagePublishingStatus.PUBLISHED)
234 status=PackagePublishingStatus.PUBLISHED)
235257
236 # a new non-careful publisher won't find anything to publish, thus258 # a new non-careful publisher won't find anything to publish, thus
237 # no pockets will be *dirtied*.259 # no pockets will be *dirtied*.
@@ -251,7 +273,7 @@
251 self.logger, self.config, self.disk_pool,273 self.logger, self.config, self.disk_pool,
252 self.ubuntutest.main_archive)274 self.ubuntutest.main_archive)
253275
254 pub_source = self.getPubSource(276 self.getPubSource(
255 filecontent='Hello world',277 filecontent='Hello world',
256 status=PackagePublishingStatus.PUBLISHED)278 status=PackagePublishingStatus.PUBLISHED)
257279
@@ -394,17 +416,17 @@
394 ubuntu = getUtility(IDistributionSet)['ubuntu']416 ubuntu = getUtility(IDistributionSet)['ubuntu']
395417
396 spiv = person_set.getByName('spiv')418 spiv = person_set.getByName('spiv')
397 spiv_archive = archive_set.new(419 archive_set.new(
398 owner=spiv, distribution=ubuntu, purpose=ArchivePurpose.PPA)420 owner=spiv, distribution=ubuntu, purpose=ArchivePurpose.PPA)
399 name16 = person_set.getByName('name16')421 name16 = person_set.getByName('name16')
400 name16_archive = archive_set.new(422 archive_set.new(
401 owner=name16, distribution=ubuntu, purpose=ArchivePurpose.PPA)423 owner=name16, distribution=ubuntu, purpose=ArchivePurpose.PPA)
402424
403 pub_source = self.getPubSource(425 self.getPubSource(
404 sourcename="foo", filename="foo_1.dsc", filecontent='Hello world',426 sourcename="foo", filename="foo_1.dsc", filecontent='Hello world',
405 status=PackagePublishingStatus.PENDING, archive=spiv.archive)427 status=PackagePublishingStatus.PENDING, archive=spiv.archive)
406428
407 pub_source = self.getPubSource(429 self.getPubSource(
408 sourcename="foo", filename="foo_1.dsc", filecontent='Hello world',430 sourcename="foo", filename="foo_1.dsc", filecontent='Hello world',
409 status=PackagePublishingStatus.PUBLISHED, archive=name16.archive)431 status=PackagePublishingStatus.PUBLISHED, archive=name16.archive)
410432
@@ -467,7 +489,7 @@
467 pub_source = self.getPubSource(489 pub_source = self.getPubSource(
468 sourcename="foo", filename="foo_1.dsc", filecontent='Hello world',490 sourcename="foo", filename="foo_1.dsc", filecontent='Hello world',
469 status=PackagePublishingStatus.PENDING, archive=cprov.archive)491 status=PackagePublishingStatus.PENDING, archive=cprov.archive)
470 pub_bin = self.getPubBinaries(492 self.getPubBinaries(
471 pub_source=pub_source,493 pub_source=pub_source,
472 description=" My leading spaces are normalised to a single "494 description=" My leading spaces are normalised to a single "
473 "space but not trailing. \n It does nothing, "495 "space but not trailing. \n It does nothing, "
@@ -478,7 +500,7 @@
478 ignored_source = self.getPubSource(500 ignored_source = self.getPubSource(
479 status=PackagePublishingStatus.DELETED,501 status=PackagePublishingStatus.DELETED,
480 archive=cprov.archive)502 archive=cprov.archive)
481 pub_udeb = self.getPubBinaries(503 self.getPubBinaries(
482 pub_source=ignored_source, binaryname='bingo',504 pub_source=ignored_source, binaryname='bingo',
483 description='nice udeb', format=BinaryPackageFormat.UDEB)[0]505 description='nice udeb', format=BinaryPackageFormat.UDEB)[0]
484506
@@ -609,27 +631,27 @@
609 # waiting to be deleted, each in different pockets. The deleted631 # waiting to be deleted, each in different pockets. The deleted
610 # source in the release pocket should not be processed. We'll632 # source in the release pocket should not be processed. We'll
611 # also have a binary waiting to be deleted.633 # also have a binary waiting to be deleted.
612 published_source = self.getPubSource(634 self.getPubSource(
613 pocket=PackagePublishingPocket.RELEASE,635 pocket=PackagePublishingPocket.RELEASE,
614 status=PackagePublishingStatus.PUBLISHED)636 status=PackagePublishingStatus.PUBLISHED)
615637
616 deleted_source_in_release_pocket = self.getPubSource(638 self.getPubSource(
617 pocket=PackagePublishingPocket.RELEASE,639 pocket=PackagePublishingPocket.RELEASE,
618 status=PackagePublishingStatus.DELETED)640 status=PackagePublishingStatus.DELETED)
619641
620 removed_source = self.getPubSource(642 self.getPubSource(
621 scheduleddeletiondate=UTC_NOW,643 scheduleddeletiondate=UTC_NOW,
622 dateremoved=UTC_NOW,644 dateremoved=UTC_NOW,
623 pocket=PackagePublishingPocket.UPDATES,645 pocket=PackagePublishingPocket.UPDATES,
624 status=PackagePublishingStatus.DELETED)646 status=PackagePublishingStatus.DELETED)
625647
626 deleted_source = self.getPubSource(648 self.getPubSource(
627 pocket=PackagePublishingPocket.SECURITY,649 pocket=PackagePublishingPocket.SECURITY,
628 status=PackagePublishingStatus.DELETED)650 status=PackagePublishingStatus.DELETED)
629651
630 deleted_binary = self.getPubBinaries(652 self.getPubBinaries(
631 pocket=PackagePublishingPocket.BACKPORTS,653 pocket=PackagePublishingPocket.BACKPORTS,
632 status=PackagePublishingStatus.DELETED)[0]654 status=PackagePublishingStatus.DELETED)
633655
634 # Run the deletion detection.656 # Run the deletion detection.
635 publisher.A2_markPocketsWithDeletionsDirty()657 publisher.A2_markPocketsWithDeletionsDirty()
@@ -681,19 +703,19 @@
681703
682 # Create pending deletions in RELEASE, BACKPORTS, SECURITY and704 # Create pending deletions in RELEASE, BACKPORTS, SECURITY and
683 # UPDATES pockets.705 # UPDATES pockets.
684 deleted_source = self.getPubSource(706 self.getPubSource(
685 pocket=PackagePublishingPocket.RELEASE,707 pocket=PackagePublishingPocket.RELEASE,
686 status=PackagePublishingStatus.DELETED)708 status=PackagePublishingStatus.DELETED)
687709
688 deleted_binary = self.getPubBinaries(710 self.getPubBinaries(
689 pocket=PackagePublishingPocket.BACKPORTS,711 pocket=PackagePublishingPocket.BACKPORTS,
690 status=PackagePublishingStatus.DELETED)[0]712 status=PackagePublishingStatus.DELETED)[0]
691713
692 allowed_source_deletion = self.getPubSource(714 self.getPubSource(
693 pocket=PackagePublishingPocket.SECURITY,715 pocket=PackagePublishingPocket.SECURITY,
694 status=PackagePublishingStatus.DELETED)716 status=PackagePublishingStatus.DELETED)
695717
696 allowed_binary_deletion = self.getPubBinaries(718 self.getPubBinaries(
697 pocket=PackagePublishingPocket.UPDATES,719 pocket=PackagePublishingPocket.UPDATES,
698 status=PackagePublishingStatus.DELETED)[0]720 status=PackagePublishingStatus.DELETED)[0]
699721
@@ -763,7 +785,7 @@
763 self.logger, self.config, self.disk_pool,785 self.logger, self.config, self.disk_pool,
764 self.ubuntutest.main_archive)786 self.ubuntutest.main_archive)
765787
766 pub_source = self.getPubSource(filecontent='Hello world')788 self.getPubSource(filecontent='Hello world')
767789
768 publisher.A_publish(False)790 publisher.A_publish(False)
769 publisher.C_doFTPArchive(False)791 publisher.C_doFTPArchive(False)
@@ -848,8 +870,7 @@
848 archive_publisher = getPublisher(870 archive_publisher = getPublisher(
849 cprov.archive, allowed_suites, self.logger)871 cprov.archive, allowed_suites, self.logger)
850872
851 pub_source = self.getPubSource(873 self.getPubSource(filecontent='Hello world', archive=cprov.archive)
852 filecontent='Hello world', archive=cprov.archive)
853874
854 archive_publisher.A_publish(False)875 archive_publisher.A_publish(False)
855 self.layer.txn.commit()876 self.layer.txn.commit()
@@ -952,8 +973,7 @@
952 allowed_suites = []973 allowed_suites = []
953 archive_publisher = getPublisher(974 archive_publisher = getPublisher(
954 named_ppa, allowed_suites, self.logger)975 named_ppa, allowed_suites, self.logger)
955 pub_source = self.getPubSource(976 self.getPubSource(filecontent='Hello world', archive=named_ppa)
956 filecontent='Hello world', archive=named_ppa)
957977
958 archive_publisher.A_publish(False)978 archive_publisher.A_publish(False)
959 self.layer.txn.commit()979 self.layer.txn.commit()
@@ -1062,7 +1082,7 @@
1062 Publish files in pool, generate archive indexes and release files.1082 Publish files in pool, generate archive indexes and release files.
1063 """1083 """
1064 self.setupPublisher(archive)1084 self.setupPublisher(archive)
1065 pub_source = self.getPubSource(archive=archive)1085 self.getPubSource(archive=archive)
10661086
1067 self.archive_publisher.A_publish(False)1087 self.archive_publisher.A_publish(False)
1068 transaction.commit()1088 transaction.commit()
10691089
=== modified file 'lib/lp/bugs/browser/bugwatch.py'
--- lib/lp/bugs/browser/bugwatch.py 2010-01-15 03:32:46 +0000
+++ lib/lp/bugs/browser/bugwatch.py 2010-04-27 02:13:38 +0000
@@ -6,12 +6,15 @@
6__metaclass__ = type6__metaclass__ = type
7__all__ = [7__all__ = [
8 'BugWatchSetNavigation',8 'BugWatchSetNavigation',
9 'BugWatchActivityPortletView',
9 'BugWatchEditView',10 'BugWatchEditView',
10 'BugWatchView']11 'BugWatchView']
1112
13
12from zope.component import getUtility14from zope.component import getUtility
13from zope.interface import Interface15from zope.interface import Interface
1416
17from canonical.database.constants import UTC_NOW
15from canonical.widgets.textwidgets import URIWidget18from canonical.widgets.textwidgets import URIWidget
1619
17from canonical.launchpad import _20from canonical.launchpad import _
@@ -21,7 +24,8 @@
21from canonical.launchpad.fields import URIField24from canonical.launchpad.fields import URIField
22from canonical.launchpad.webapp.interfaces import ILaunchBag25from canonical.launchpad.webapp.interfaces import ILaunchBag
23from lp.bugs.interfaces.bugwatch import (26from lp.bugs.interfaces.bugwatch import (
24 IBugWatch, IBugWatchSet, NoBugTrackerFound, UnrecognizedBugTrackerURL)27 BUG_WATCH_ACTIVITY_SUCCESS_STATUSES, IBugWatch, IBugWatchSet,
28 NoBugTrackerFound, UnrecognizedBugTrackerURL)
25from canonical.launchpad.webapp import (29from canonical.launchpad.webapp import (
26 action, canonical_url, custom_widget, GetitemNavigation,30 action, canonical_url, custom_widget, GetitemNavigation,
27 LaunchpadFormView, LaunchpadView)31 LaunchpadFormView, LaunchpadView)
@@ -99,6 +103,11 @@
99 """See `LaunchpadFormView.`"""103 """See `LaunchpadFormView.`"""
100 return {'url' : self.context.url}104 return {'url' : self.context.url}
101105
106 @property
107 def watch_has_activity(self):
108 """Return True if there has been activity on the bug watch."""
109 return not self.context.activity.is_empty()
110
102 def validate(self, data):111 def validate(self, data):
103 """See `LaunchpadFormView.`"""112 """See `LaunchpadFormView.`"""
104 if 'url' not in data:113 if 'url' not in data:
@@ -136,3 +145,53 @@
136 return canonical_url(getUtility(ILaunchBag).bug)145 return canonical_url(getUtility(ILaunchBag).bug)
137146
138 cancel_url = next_url147 cancel_url = next_url
148
149
150class BugWatchActivityPortletView(LaunchpadFormView):
151 """A portlet for displaying the activity of a bug watch."""
152
153 schema = BugWatchEditForm
154
155 def userCanReschedule(self, action=None):
156 """Return True if the current user can reschedule the bug watch."""
157 return self.context.can_be_rescheduled
158
159 @action('Update Now', name='reschedule', condition=userCanReschedule)
160 def reschedule_action(self, action, data):
161 """Schedule the current bug watch for immediate checking."""
162 bugwatch = self.context
163 bugwatch.setNextCheck(UTC_NOW)
164 self.request.response.addInfoNotification(
165 structured(
166 'The <a href="%(url)s">%(bugtracker)s #%(remote_bug)s</a> '
167 'bug watch has been scheduled for immediate checking.',
168 url=bugwatch.url, bugtracker=bugwatch.bugtracker.name,
169 remote_bug=bugwatch.remotebug))
170
171 @property
172 def next_url(self):
173 return canonical_url(getUtility(ILaunchBag).bug)
174
175 cancel_url = next_url
176
177 @property
178 def recent_watch_activity(self):
179 """Return a list of dicts representing recent watch activity."""
180 activity_items = []
181 for activity in self.context.activity:
182 if activity.result in BUG_WATCH_ACTIVITY_SUCCESS_STATUSES:
183 icon = "/@@/yes"
184 completion_message = "completed successfully"
185 else:
186 icon = "/@@/no"
187 completion_message = (
188 "failed with error '%s'" % activity.result.title)
189
190 activity_items.append({
191 'icon': icon,
192 'date': activity.activity_date,
193 'completion_message': completion_message,
194 'result_text': activity.result.title,
195 })
196
197 return activity_items
139198
=== modified file 'lib/lp/bugs/browser/configure.zcml'
--- lib/lp/bugs/browser/configure.zcml 2010-03-11 01:39:25 +0000
+++ lib/lp/bugs/browser/configure.zcml 2010-04-27 02:13:38 +0000
@@ -1103,6 +1103,12 @@
1103 class="lp.bugs.browser.bugwatch.BugWatchEditView"1103 class="lp.bugs.browser.bugwatch.BugWatchEditView"
1104 permission="launchpad.AnyPerson"1104 permission="launchpad.AnyPerson"
1105 template="../templates/bugwatch-editform.pt"/>1105 template="../templates/bugwatch-editform.pt"/>
1106 <browser:page
1107 for="lp.bugs.interfaces.bugwatch.IBugWatch"
1108 name="+portlet-activity"
1109 class="lp.bugs.browser.bugwatch.BugWatchActivityPortletView"
1110 permission="launchpad.AnyPerson"
1111 template="../templates/bugwatch-portlet-activity.pt"/>
1106 <browser:pages1112 <browser:pages
1107 for="lp.bugs.interfaces.bugwatch.IBugWatch"1113 for="lp.bugs.interfaces.bugwatch.IBugWatch"
1108 permission="launchpad.AnyPerson">1114 permission="launchpad.AnyPerson">
11091115
=== modified file 'lib/lp/bugs/browser/tests/bugwatch-views.txt'
--- lib/lp/bugs/browser/tests/bugwatch-views.txt 2009-10-22 11:55:51 +0000
+++ lib/lp/bugs/browser/tests/bugwatch-views.txt 2010-04-27 02:13:38 +0000
@@ -1,4 +1,5 @@
1= Bug Watch Edit Page =1Bug Watch Edit Page
2===================
23
3It's possible to edit a bug watch on +edit, as well as deleting it.4It's possible to edit a bug watch on +edit, as well as deleting it.
4Deleting a bug watch is only possible when the bug watch isn't linked to5Deleting a bug watch is only possible when the bug watch isn't linked to
@@ -28,3 +29,84 @@
28 >>> [action.label for action in unlinked_bugwatch_view.actions29 >>> [action.label for action in unlinked_bugwatch_view.actions
29 ... if action.available()]30 ... if action.available()]
30 ['Change', 'Delete Bug Watch']31 ['Change', 'Delete Bug Watch']
32
33
34Recent activity
35---------------
36
37The Bug Watch +edit page displays a list of the recent activity for the
38watch. This is provided by the BugWatch activity portlet view and can be
39accessed via the recent_watch_activity property of BugWatchView.
40
41We'll create a new watch in order to demonstrate this.
42
43 >>> from canonical.launchpad.ftests import login
44 >>> login('foo.bar@canonical.com')
45 >>> new_watch = factory.makeBugWatch()
46
47The view for the new watch will have an empty recent_watch_activity list
48since it hasn't been updated yet.
49
50 >>> new_watch_view = create_initialized_view(
51 ... new_watch, '+portlet-activity')
52 >>> len(new_watch_view.recent_watch_activity)
53 0
54
55The BugWatch +edit view has a watch_has_activity property, which is used
56to determine whether the recent activity portlet should be displayed.
57
58 >>> new_watch_edit_view = create_initialized_view(
59 ... new_watch, '+edit')
60 >>> print new_watch_edit_view.watch_has_activity
61 False
62
63Adding a successful activity entry for the watch will cause it to show
64up on the BugWatchView's recent_watch_activity property.
65
66 >>> new_watch.addActivity()
67 >>> len(new_watch_view.recent_watch_activity)
68 1
69
70The BugWatch +edit view's watch_has_activity property will also have
71changed.
72
73 >>> new_watch_edit_view = create_initialized_view(
74 ... new_watch, '+edit')
75 >>> print new_watch_edit_view.watch_has_activity
76 True
77
78Each entry in the recent_watch_activity list is a dict containing data
79about the activity.
80
81 >>> from pprint import pprint
82 >>> for activity_dict in new_watch_view.recent_watch_activity:
83 ... pprint(activity_dict)
84 {'completion_message': 'completed successfully',
85 'date': datetime.datetime(...tzinfo=<UTC>),
86 'icon': '/@@/yes',
87 'result_text': 'Synchronisation succeeded'}
88
89If an activity entry records a failure, the 'icon' entry in the dict
90will point to the 'no' icon and the completion_message will explain the
91failure.
92
93We'll commit the transaction to make sure that the two activities have
94different dates.
95
96 >>> import transaction
97 >>> transaction.commit()
98
99 >>> from lp.bugs.interfaces.bugwatch import BugWatchActivityStatus
100 >>> new_watch.addActivity(result=BugWatchActivityStatus.BUG_NOT_FOUND)
101 >>> for activity_dict in new_watch_view.recent_watch_activity:
102 ... pprint(activity_dict)
103 {'completion_message': "failed with error 'Bug Not Found'",
104 'date': datetime.datetime(...tzinfo=<UTC>),
105 'icon': '/@@/no',
106 'result_text': 'Bug Not Found'}
107 {'completion_message': 'completed successfully',
108 'date': datetime.datetime(...tzinfo=<UTC>),
109 'icon': '/@@/yes',
110 'result_text': 'Synchronisation succeeded'}
111
112
31113
=== modified file 'lib/lp/bugs/configure.zcml'
--- lib/lp/bugs/configure.zcml 2010-04-14 12:55:44 +0000
+++ lib/lp/bugs/configure.zcml 2010-04-27 02:13:38 +0000
@@ -849,7 +849,9 @@
849 bug849 bug
850 bugtasks850 bugtasks
851 bugtracker851 bugtracker
852 can_be_rescheduled
852 datecreated853 datecreated
854 failed_activity
853 getLastErrorMessage855 getLastErrorMessage
854 hasComment856 hasComment
855 unpushed_comments857 unpushed_comments
@@ -870,7 +872,8 @@
870 permission="launchpad.AnyPerson"872 permission="launchpad.AnyPerson"
871 attributes="873 attributes="
872 destroySelf874 destroySelf
873 addActivity"875 addActivity
876 setNextCheck"
874 set_attributes="bugtracker remotebug"/>877 set_attributes="bugtracker remotebug"/>
875 <require878 <require
876 permission="launchpad.Admin"879 permission="launchpad.Admin"
877880
=== modified file 'lib/lp/bugs/doc/bug-watch-activity.txt'
--- lib/lp/bugs/doc/bug-watch-activity.txt 2010-04-21 10:30:24 +0000
+++ lib/lp/bugs/doc/bug-watch-activity.txt 2010-04-27 02:13:38 +0000
@@ -50,11 +50,14 @@
50 >>> activity.activity_date50 >>> activity.activity_date
51 datetime.datetime...51 datetime.datetime...
5252
53The BugWatchActivity's result will be BugWatchActivityStatus.SYNC_SUCCEEDED.
54
55 >>> print activity.result.title
56 Synchronisation succeeded
57
53The other fields on the BugWatchActivity record, which aren't required,58The other fields on the BugWatchActivity record, which aren't required,
54will all be None.59will all be None.
5560
56 >>> print activity.result
57 None
58 >>> print activity.message61 >>> print activity.message
59 None62 None
60 >>> print activity.oops_id63 >>> print activity.oops_id
@@ -83,12 +86,13 @@
83 >>> print bug_watch.activity.count()86 >>> print bug_watch.activity.count()
84 287 2
8588
86The most recent activity entry will have a result of None since it was89The most recent activity entry will have a result of
90BugWatchActivityStatus.SYNC_SUCCEEDED since it was
87successful.91successful.
8892
89 >>> most_recent_activity = bug_watch.activity.first()93 >>> most_recent_activity = bug_watch.activity.first()
90 >>> print most_recent_activity.result94 >>> print most_recent_activity.result.title
91 None95 Synchronisation succeeded
9296
93Its message will also be empty97Its message will also be empty
9498
9599
=== modified file 'lib/lp/bugs/doc/bugnotification-sending.txt'
--- lib/lp/bugs/doc/bugnotification-sending.txt 2010-04-15 10:58:02 +0000
+++ lib/lp/bugs/doc/bugnotification-sending.txt 2010-04-27 02:13:38 +0000
@@ -984,7 +984,7 @@
984 >>> process.returncode984 >>> process.returncode
985 0985 0
986 >>> print err986 >>> print err
987 INFO creating lockfile987 INFO Creating lockfile: /var/lock/launchpad-send-bug-notifications.lock
988 INFO Notifying mark@example.com about bug 2.988 INFO Notifying mark@example.com about bug 2.
989 ...989 ...
990 INFO Notifying support@ubuntu.com about bug 2.990 INFO Notifying support@ubuntu.com about bug 2.
991991
=== modified file 'lib/lp/bugs/doc/bugtask-expiration.txt'
--- lib/lp/bugs/doc/bugtask-expiration.txt 2010-04-14 13:23:02 +0000
+++ lib/lp/bugs/doc/bugtask-expiration.txt 2010-04-27 02:13:38 +0000
@@ -445,7 +445,7 @@
445 ... stderr=subprocess.PIPE)445 ... stderr=subprocess.PIPE)
446 >>> (out, err) = process.communicate()446 >>> (out, err) = process.communicate()
447 >>> print err447 >>> print err
448 INFO creating lockfile448 INFO Creating lockfile: /var/lock/launchpad-expire-bugtasks.lock
449 INFO Expiring unattended, INCOMPLETE bugtasks older than449 INFO Expiring unattended, INCOMPLETE bugtasks older than
450 60 days for projects that use Launchpad Bugs.450 60 days for projects that use Launchpad Bugs.
451 INFO Found 3 bugtasks to expire.451 INFO Found 3 bugtasks to expire.
452452
=== modified file 'lib/lp/bugs/doc/bugtask.txt'
--- lib/lp/bugs/doc/bugtask.txt 2010-01-21 17:40:23 +0000
+++ lib/lp/bugs/doc/bugtask.txt 2010-04-27 02:13:38 +0000
@@ -1104,7 +1104,7 @@
1104 >>> (out, err) = process.communicate()1104 >>> (out, err) = process.communicate()
11051105
1106 >>> print err1106 >>> print err
1107 INFO creating lockfile1107 INFO Creating lockfile: /var/lock/launchpad-launchpad-targetnamecacheupdater.lock
1108 INFO Updating targetname cache of bugtasks.1108 INFO Updating targetname cache of bugtasks.
1109 INFO Updating 1 BugTasks (starting id: 2).1109 INFO Updating 1 BugTasks (starting id: 2).
1110 INFO Updating ...BugTasks...1110 INFO Updating ...BugTasks...
11111111
=== modified file 'lib/lp/bugs/doc/bugwatch.txt'
--- lib/lp/bugs/doc/bugwatch.txt 2010-04-21 10:30:24 +0000
+++ lib/lp/bugs/doc/bugwatch.txt 2010-04-27 02:13:38 +0000
@@ -513,3 +513,87 @@
513 >>> bug.removeWatch(bug_watch, factory.makePerson())513 >>> bug.removeWatch(bug_watch, factory.makePerson())
514 >>> [bug_watch.remotebug for bug_watch in bug.watches]514 >>> [bug_watch.remotebug for bug_watch in bug.watches]
515 []515 []
516
517
518Checking if a watch can be rescheduled
519--------------------------------------
520
521IBugWatch provides an attribute, can_be_rescheduled, which indicates
522whether or not the watch can be rescheduled. For a new bug watch this
523will be False.
524
525 >>> schedulable_watch = factory.makeBugWatch()
526 >>> schedulable_watch.next_check = None
527 >>> schedulable_watch.can_be_rescheduled
528 False
529
530If there's been activity on the watch but it's always been successful,
531can_be_rescheduled will be False.
532
533 >>> schedulable_watch.addActivity()
534 >>> schedulable_watch.can_be_rescheduled
535 False
536
537If the watch's updates have failed less than 60% of the time,
538can_be_rescheduled will be True
539
540 >>> from lp.bugs.interfaces.bugwatch import BugWatchActivityStatus
541 >>> schedulable_watch.addActivity(
542 ... result=BugWatchActivityStatus.BUG_NOT_FOUND)
543 >>> schedulable_watch.can_be_rescheduled
544 True
545
546If the watch is rescheduled, can_be_rescheduled will be False, since the
547next_check time for the watch will be in the past (or in this case is
548now) and therefore it will be checked with the next checkwatches run.
549
550 >>> from pytz import utc
551 >>> from datetime import datetime
552 >>> schedulable_watch.next_check = datetime.now(utc)
553 >>> schedulable_watch.can_be_rescheduled
554 False
555
556However, if the watch has failed more than 60% of the time
557can_be_rescheduled will be False, since it's assumed that the watch
558needs attention in order for it to be able to work again.
559
560 >>> schedulable_watch.next_check = None
561 >>> schedulable_watch.addActivity(
562 ... result=BugWatchActivityStatus.BUG_NOT_FOUND)
563 >>> schedulable_watch.can_be_rescheduled
564 False
565
566
567Rescheduling a watch
568--------------------
569
570The rescheduling of a watch is done via IBugWatch.setNextCheck(). This
571is to ensure that watches are only rescheduled when can_be_rescheduled
572is True (note that the BugWatch Scheduler bypasses setNextCheck() and
573sets next_check directly because it has admin privileges).
574
575The schedulable_watch that we used in the previous test cannot currently
576be rescheduled.
577
578 >>> schedulable_watch.can_be_rescheduled
579 False
580
581Calling setNextCheck() on this watch will cause an Exception,
582BugWatchCannotBeRescheduled, to be raised.
583
584 >>> schedulable_watch.setNextCheck(datetime.now(utc))
585 Traceback (most recent call last):
586 ...
587 BugWatchCannotBeRescheduled...
588
589If we add some activity to the watch, to make its can_be_rescheduled
590property become True, setNextCheck() will succeed.
591
592 >>> schedulable_watch.addActivity()
593 >>> schedulable_watch.can_be_rescheduled
594 True
595
596 >>> next_check = datetime.now(utc)
597 >>> schedulable_watch.setNextCheck(next_check)
598 >>> schedulable_watch.next_check == next_check
599 True
516600
=== modified file 'lib/lp/bugs/doc/checkwatches.txt'
--- lib/lp/bugs/doc/checkwatches.txt 2010-04-21 10:30:24 +0000
+++ lib/lp/bugs/doc/checkwatches.txt 2010-04-27 02:13:38 +0000
@@ -44,7 +44,7 @@
44 044 0
4545
46 >>> print err46 >>> print err
47 INFO creating lockfile47 INFO Creating lockfile: /var/lock/launchpad-checkwatches.lock
48 DEBUG No global batch size specified.48 DEBUG No global batch size specified.
49 DEBUG Skipping updating Ubuntu Bugzilla watches.49 DEBUG Skipping updating Ubuntu Bugzilla watches.
50 DEBUG No watches to update on http://bugs.debian.org50 DEBUG No watches to update on http://bugs.debian.org
5151
=== modified file 'lib/lp/bugs/doc/cve-update.txt'
--- lib/lp/bugs/doc/cve-update.txt 2009-06-12 16:36:02 +0000
+++ lib/lp/bugs/doc/cve-update.txt 2010-04-27 02:13:38 +0000
@@ -37,7 +37,7 @@
37 ... )37 ... )
38 >>> (output, empty) = process.communicate()38 >>> (output, empty) = process.communicate()
39 >>> print output39 >>> print output
40 INFO creating lockfile40 INFO Creating lockfile: /var/lock/launchpad-updatecve.lock
41 ...41 ...
42 INFO CVE-1999-0002 created42 INFO CVE-1999-0002 created
43 INFO Creating new SGI reference for 1999-000243 INFO Creating new SGI reference for 1999-0002
@@ -91,7 +91,7 @@
91 ... )91 ... )
92 >>> (output, empty) = process.communicate()92 >>> (output, empty) = process.communicate()
93 >>> print output93 >>> print output
94 INFO creating lockfile94 INFO Creating lockfile: /var/lock/launchpad-updatecve.lock
95 ...95 ...
96 INFO Creating new CERT reference for 1999-000296 INFO Creating new CERT reference for 1999-0002
97 INFO Creating new CIAC reference for 1999-000297 INFO Creating new CIAC reference for 1999-0002
9898
=== modified file 'lib/lp/bugs/interfaces/bugwatch.py'
--- lib/lp/bugs/interfaces/bugwatch.py 2010-03-23 12:55:05 +0000
+++ lib/lp/bugs/interfaces/bugwatch.py 2010-04-27 02:13:38 +0000
@@ -8,7 +8,9 @@
8__metaclass__ = type8__metaclass__ = type
99
10__all__ = [10__all__ = [
11 'BUG_WATCH_ACTIVITY_SUCCESS_STATUSES',
11 'BugWatchActivityStatus',12 'BugWatchActivityStatus',
13 'BugWatchCannotBeRescheduled',
12 'IBugWatch',14 'IBugWatch',
13 'IBugWatchActivity',15 'IBugWatchActivity',
14 'IBugWatchSet',16 'IBugWatchSet',
@@ -93,6 +95,42 @@
93 Launchpad cannot import the status of private remote bugs.95 Launchpad cannot import the status of private remote bugs.
94 """)96 """)
9597
98 SYNC_SUCCEEDED = DBItem(9, """
99 Synchronisation succeeded
100
101 The remote bug's status was successfully synchronized to Launchpad.
102 """)
103
104 COMMENT_IMPORT_FAILED = DBItem(10, """
105 Unable to import comments
106
107 The remote bug's status was synchronized successfully but
108 comments could not be imported from the remote bug.
109 """)
110
111 COMMENT_PUSH_FAILED = DBItem(11, """
112 Unable to push comments
113
114 The remote bug's status was synchronized successfully and
115 its comments were successfully imported but Launchpad was unable
116 to push comments back to the remote bug.
117 """)
118
119 BACKLINK_FAILED = DBItem(12, """
120 Unable to set link remote bug to Launchpad
121
122 The remote bug's status and comments were synchronized
123 sucessfully with Launchpad but Launchpad was unable to set the
124 remote bug's link back to the relevant Launchpad bug.
125 """)
126
127
128# The set of BugWatchActivityStatuses that are considered to indicate
129# success.
130BUG_WATCH_ACTIVITY_SUCCESS_STATUSES = [
131 BugWatchActivityStatus.SYNC_SUCCEEDED,
132 ]
133
96134
97class IBugWatch(IHasBug):135class IBugWatch(IHasBug):
98 """A bug on a remote system."""136 """A bug on a remote system."""
@@ -173,6 +211,10 @@
173 Text(title=_('The URL at which to view the remote bug.'),211 Text(title=_('The URL at which to view the remote bug.'),
174 readonly=True))212 readonly=True))
175213
214 can_be_rescheduled = Attribute(
215 "A True or False indicator of whether or not this watch can be "
216 "rescheduled.")
217
176 def updateImportance(remote_importance, malone_importance):218 def updateImportance(remote_importance, malone_importance):
177 """Update the importance of the bug watch and any linked bug task.219 """Update the importance of the bug watch and any linked bug task.
178220
@@ -213,6 +255,13 @@
213 def addActivity(result=None, message=None, oops_id=None):255 def addActivity(result=None, message=None, oops_id=None):
214 """Add an `IBugWatchActivity` record for this BugWatch."""256 """Add an `IBugWatchActivity` record for this BugWatch."""
215257
258 def setNextCheck(next_check):
259 """Set the next_check time of the watch.
260
261 :raises: `BugWatchCannotBeRescheduled` if
262 `IBugWatch.can_be_rescheduled` is False.
263 """
264
216265
217# Defined here because of circular imports.266# Defined here because of circular imports.
218IBugTracker['watches'].value_type.schema = IBugWatch267IBugTracker['watches'].value_type.schema = IBugWatch
@@ -326,3 +375,6 @@
326 title=_('OOPS ID'), readonly=True,375 title=_('OOPS ID'), readonly=True,
327 description=_("The OOPS ID associated with this activity."))376 description=_("The OOPS ID associated with this activity."))
328377
378
379class BugWatchCannotBeRescheduled(Exception):
380 """The current `IBugWatch` can't be rescheduled."""
329381
=== modified file 'lib/lp/bugs/model/bugwatch.py'
--- lib/lp/bugs/model/bugwatch.py 2010-04-09 15:04:19 +0000
+++ lib/lp/bugs/model/bugwatch.py 2010-04-27 02:13:38 +0000
@@ -12,6 +12,9 @@
1212
13import re13import re
14import urllib14import urllib
15
16from datetime import datetime
17from pytz import utc
15from urlparse import urlunsplit18from urlparse import urlunsplit
1619
17from zope.event import notify20from zope.event import notify
@@ -44,8 +47,9 @@
4447
45from lp.bugs.interfaces.bugtracker import BugTrackerType, IBugTrackerSet48from lp.bugs.interfaces.bugtracker import BugTrackerType, IBugTrackerSet
46from lp.bugs.interfaces.bugwatch import (49from lp.bugs.interfaces.bugwatch import (
47 BugWatchActivityStatus, IBugWatch, IBugWatchActivity, IBugWatchSet,50 BUG_WATCH_ACTIVITY_SUCCESS_STATUSES, BugWatchActivityStatus,
48 NoBugTrackerFound, UnrecognizedBugTrackerURL)51 BugWatchCannotBeRescheduled, IBugWatch, IBugWatchActivity,
52 IBugWatchSet, NoBugTrackerFound, UnrecognizedBugTrackerURL)
49from lp.bugs.model.bugmessage import BugMessage53from lp.bugs.model.bugmessage import BugMessage
50from lp.bugs.model.bugset import BugSetBase54from lp.bugs.model.bugset import BugSetBase
51from lp.bugs.model.bugtask import BugTask55from lp.bugs.model.bugtask import BugTask
@@ -66,6 +70,9 @@
66 }70 }
6771
6872
73WATCH_RESCHEDULE_THRESHOLD = 0.6
74
75
69class BugWatch(SQLBase):76class BugWatch(SQLBase):
70 """See `IBugWatch`."""77 """See `IBugWatch`."""
71 implements(IBugWatch)78 implements(IBugWatch)
@@ -212,6 +219,8 @@
212219
213 if self.last_error_type in error_message_mapping:220 if self.last_error_type in error_message_mapping:
214 message = error_message_mapping[self.last_error_type]221 message = error_message_mapping[self.last_error_type]
222 elif self.last_error_type != BugWatchActivityStatus.UNKNOWN:
223 message = self.last_error_type.description
215 else:224 else:
216 message = ("Launchpad couldn't import bug #%(bug)s from "225 message = ("Launchpad couldn't import bug #%(bug)s from "
217 "%(bugtracker)s.")226 "%(bugtracker)s.")
@@ -284,7 +293,12 @@
284 """See `IBugWatch`."""293 """See `IBugWatch`."""
285 activity = BugWatchActivity()294 activity = BugWatchActivity()
286 activity.bug_watch = self295 activity.bug_watch = self
287 activity.result = result296 if result is None:
297 # If no result is passed we assume that the activity
298 # succeded and set the result field accordingly.
299 activity.result = BugWatchActivityStatus.SYNC_SUCCEEDED
300 else:
301 activity.result = result
288 if message is not None:302 if message is not None:
289 activity.message = unicode(message)303 activity.message = unicode(message)
290 if oops_id is not None:304 if oops_id is not None:
@@ -300,6 +314,52 @@
300 BugWatchActivity.bug_watch == self).order_by(314 BugWatchActivity.bug_watch == self).order_by(
301 Desc('activity_date'))315 Desc('activity_date'))
302316
317 @property
318 def can_be_rescheduled(self):
319 """See `IBugWatch`."""
320 if (self.next_check is not None and
321 self.next_check <= datetime.now(utc)):
322 # If the watch is already scheduled for a time in the past
323 # (or for right now) it can't be rescheduled, since it
324 # should be be checked by the next checkwatches run anyway.
325 return False
326
327 if self.activity.is_empty():
328 # Don't show the reschedule button if the watch has never
329 # been checked.
330 return False
331
332 if self.failed_activity.is_empty():
333 # Don't show the reschedule button if the watch has never
334 # failed.
335 return False
336
337 # If the ratio is lower than the reschedule threshold, we
338 # can show the button.
339 failure_ratio = (
340 float(self.failed_activity.count()) /
341 self.activity.count())
342 return failure_ratio <= WATCH_RESCHEDULE_THRESHOLD
343
344 @property
345 def failed_activity(self):
346 store = Store.of(self)
347 success_status_ids = [
348 status.value for status in BUG_WATCH_ACTIVITY_SUCCESS_STATUSES]
349
350 return store.find(
351 BugWatchActivity,
352 BugWatchActivity.bug_watch == self,
353 Not(In(BugWatchActivity.result, success_status_ids))).order_by(
354 Desc('activity_date'))
355
356 def setNextCheck(self, next_check):
357 """See `IBugWatch`."""
358 if not self.can_be_rescheduled:
359 raise BugWatchCannotBeRescheduled()
360
361 self.next_check = next_check
362
303363
304class BugWatchSet(BugSetBase):364class BugWatchSet(BugSetBase):
305 """A set for BugWatch"""365 """A set for BugWatch"""
306366
=== modified file 'lib/lp/bugs/scripts/bugheat.py'
--- lib/lp/bugs/scripts/bugheat.py 2010-04-14 12:55:44 +0000
+++ lib/lp/bugs/scripts/bugheat.py 2010-04-27 02:13:38 +0000
@@ -79,12 +79,12 @@
79 self._getHeatFromSubscribers(),79 self._getHeatFromSubscribers(),
80 ])80 ])
8181
82 # Bugs decay over time. Every month the bug isn't touched its heat82 # Bugs decay over time. Every day the bug isn't touched its heat
83 # decreases by 10%.83 # decreases by 1%.
84 months = (84 days = (
85 datetime.utcnow() -85 datetime.utcnow() -
86 self.bug.date_last_updated.replace(tzinfo=None)).days / 3086 self.bug.date_last_updated.replace(tzinfo=None)).days
87 total_heat = int(total_heat * (0.9 ** months))87 total_heat = int(total_heat * (0.99 ** days))
8888
89 return total_heat89 return total_heat
9090
9191
=== modified file 'lib/lp/bugs/scripts/checkwatches/scheduler.py'
--- lib/lp/bugs/scripts/checkwatches/scheduler.py 2010-03-26 14:33:46 +0000
+++ lib/lp/bugs/scripts/checkwatches/scheduler.py 2010-04-27 02:13:38 +0000
@@ -10,12 +10,11 @@
1010
11import transaction11import transaction
1212
13from storm.expr import Not
14
15from canonical.database.sqlbase import sqlvalues13from canonical.database.sqlbase import sqlvalues
16from canonical.launchpad.utilities.looptuner import TunableLoop14from canonical.launchpad.utilities.looptuner import TunableLoop
17from canonical.launchpad.interfaces import IMasterStore15from canonical.launchpad.interfaces import IMasterStore
1816
17from lp.bugs.interfaces.bugwatch import BUG_WATCH_ACTIVITY_SUCCESS_STATUSES
19from lp.bugs.model.bugwatch import BugWatch18from lp.bugs.model.bugwatch import BugWatch
2019
2120
@@ -70,7 +69,7 @@
70 FROM (SELECT 169 FROM (SELECT 1
71 FROM bugwatchactivity70 FROM bugwatchactivity
72 WHERE bugwatchactivity.bug_watch = bug_watch.id71 WHERE bugwatchactivity.bug_watch = bug_watch.id
73 AND bugwatchactivity.result IS NOT NULL72 AND bugwatchactivity.result NOT IN (%s)
74 ORDER BY bugwatchactivity.id DESC73 ORDER BY bugwatchactivity.id DESC
75 LIMIT %s) AS recent_failures74 LIMIT %s) AS recent_failures
76 ) AS recent_failure_count75 ) AS recent_failure_count
@@ -80,7 +79,8 @@
80 ) AS counts79 ) AS counts
81 WHERE BugWatch.id = counts.id80 WHERE BugWatch.id = counts.id
82 """ % sqlvalues(81 """ % sqlvalues(
83 self.delay_coefficient, self.max_sample_size, chunk_size)82 self.delay_coefficient, BUG_WATCH_ACTIVITY_SUCCESS_STATUSES,
83 self.max_sample_size, chunk_size)
84 self.transaction.begin()84 self.transaction.begin()
85 result = self.store.execute(query)85 result = self.store.execute(query)
86 self.log.debug("Scheduled %s watches" % result.rowcount)86 self.log.debug("Scheduled %s watches" % result.rowcount)
8787
=== modified file 'lib/lp/bugs/scripts/tests/test_bugheat.py'
--- lib/lp/bugs/scripts/tests/test_bugheat.py 2010-04-14 12:55:44 +0000
+++ lib/lp/bugs/scripts/tests/test_bugheat.py 2010-04-27 02:13:38 +0000
@@ -215,12 +215,12 @@
215 "Expected %s, got %s" % (0, heat))215 "Expected %s, got %s" % (0, heat))
216216
217 def test_getBugHeat_decay(self):217 def test_getBugHeat_decay(self):
218 # Every month, a bug that wasn't touched has its heat reduced by 10%.218 # Every day, a bug that wasn't touched has its heat reduced by 1%.
219 aging_bug = self.factory.makeBug()219 aging_bug = self.factory.makeBug()
220 fresh_heat = BugHeatCalculator(aging_bug).getBugHeat()220 fresh_heat = BugHeatCalculator(aging_bug).getBugHeat()
221 aging_bug.date_last_updated = (221 aging_bug.date_last_updated = (
222 aging_bug.date_last_updated - timedelta(days=32))222 aging_bug.date_last_updated - timedelta(days=1))
223 expected = int(fresh_heat * 0.9)223 expected = int(fresh_heat * 0.99)
224 heat = BugHeatCalculator(aging_bug).getBugHeat()224 heat = BugHeatCalculator(aging_bug).getBugHeat()
225 self.assertEqual(225 self.assertEqual(
226 expected, heat,226 expected, heat,
227227
=== modified file 'lib/lp/bugs/stories/bugwatches/xx-bugwatch-errors.txt'
--- lib/lp/bugs/stories/bugwatches/xx-bugwatch-errors.txt 2010-04-16 11:20:33 +0000
+++ lib/lp/bugs/stories/bugwatches/xx-bugwatch-errors.txt 2010-04-27 02:13:38 +0000
@@ -62,11 +62,17 @@
62 The Mozilla.org Bug Tracker bug #900 appears not to exist. Check62 The Mozilla.org Bug Tracker bug #900 appears not to exist. Check
63 that the bug number is correct.63 that the bug number is correct.
6464
65We can observe this for each of the BugWatchActivityStatus values:65We can observe this for each of the BugWatchActivityStatus failure values:
6666
67 >>> from lp.bugs.interfaces.bugwatch import (
68 ... BUG_WATCH_ACTIVITY_SUCCESS_STATUSES)
67 >>> from lp.bugs.tests.externalbugtracker import (69 >>> from lp.bugs.tests.externalbugtracker import (
68 ... set_bugwatch_error_type)70 ... set_bugwatch_error_type)
69 >>> for item in sorted(BugWatchActivityStatus.items):71
72 >>> failure_values = [
73 ... value for value in sorted(BugWatchActivityStatus.items) if
74 ... value not in BUG_WATCH_ACTIVITY_SUCCESS_STATUSES]
75 >>> for item in failure_values:
70 ... set_bugwatch_error_type(watch, item)76 ... set_bugwatch_error_type(watch, item)
71 ... user_browser.open('http://bugs.launchpad.dev/thunderbird/+bug/12')77 ... user_browser.open('http://bugs.launchpad.dev/thunderbird/+bug/12')
72 ... for tag in find_tags_by_class(user_browser.contents,78 ... for tag in find_tags_by_class(user_browser.contents,
@@ -86,6 +92,14 @@
86 Launchpad doesn't support importing bugs from Bugzilla bug trackers.92 Launchpad doesn't support importing bugs from Bugzilla bug trackers.
87 The bug is marked as private on the remote bug tracker. Launchpad93 The bug is marked as private on the remote bug tracker. Launchpad
88 cannot import the status of private remote bugs.94 cannot import the status of private remote bugs.
95 The remote bug's status was synchronized successfully but comments
96 could not be imported from the remote bug.
97 The remote bug's status was synchronized successfully and its
98 comments were successfully imported but Launchpad was unable to push
99 comments back to the remote bug.
100 The remote bug's status and comments were synchronized sucessfully
101 with Launchpad but Launchpad was unable to set the remote bug's link
102 back to the relevant Launchpad bug.
89103
90Finally, if the error gets solved (or no error occurs), the error104Finally, if the error gets solved (or no error occurs), the error
91message will go away.105message will go away.
92106
=== modified file 'lib/lp/bugs/stories/bugwatches/xx-edit-bugwatch.txt'
--- lib/lp/bugs/stories/bugwatches/xx-edit-bugwatch.txt 2010-04-09 12:00:54 +0000
+++ lib/lp/bugs/stories/bugwatches/xx-edit-bugwatch.txt 2010-04-27 02:13:38 +0000
@@ -77,3 +77,126 @@
77 ... admin_browser.contents, 'bugwatch-next_check')77 ... admin_browser.contents, 'bugwatch-next_check')
78 >>> print extract_text(data_tag.renderContents())78 >>> print extract_text(data_tag.renderContents())
79 Next check: 2010-04-08...79 Next check: 2010-04-08...
80
81
82Recent activity
83---------------
84
85Recent activity on a bug watch is shown on the page as a list of
86activity entries. When a watch has not been checked, no activity is
87shown.
88
89 >>> admin_browser.open('http://bugs.launchpad.dev/bugs/1/+watch/2')
90 >>> recent_activity_list = find_tag_by_id(
91 ... admin_browser.contents, 'recent-watch-activity')
92 >>> print recent_activity_list
93 None
94
95Adding some activity to the watch will cause it to show up in the recent
96activity list.
97
98 >>> login('foo.bar@canonical.com')
99 >>> watch = getUtility(IBugWatchSet).get(2)
100 >>> watch.addActivity()
101 >>> logout()
102
103 >>> admin_browser.open('http://bugs.launchpad.dev/bugs/1/+watch/2')
104 >>> recent_activity_list = find_tag_by_id(
105 ... admin_browser.contents, 'recent-watch-activity')
106 >>> print extract_text(recent_activity_list)
107 Update completed successfully ... ago
108
109If an update fails, that too will be reflected in the list.
110
111 >>> from lp.bugs.interfaces.bugwatch import BugWatchActivityStatus
112 >>> login('foo.bar@canonical.com')
113 >>> watch = getUtility(IBugWatchSet).get(2)
114 >>> watch.addActivity(result=BugWatchActivityStatus.BUG_NOT_FOUND)
115 >>> logout()
116
117 >>> admin_browser.open('http://bugs.launchpad.dev/bugs/1/+watch/2')
118 >>> recent_activity_list = find_tag_by_id(
119 ... admin_browser.contents, 'recent-watch-activity')
120 >>> print extract_text(recent_activity_list)
121 Update failed with error 'Bug Not Found' ... ago
122 Update completed successfully ... ago
123
124
125Rescheduling a watch
126--------------------
127
128It's possible to reschedule a failing watch via the BugWatch +edit page
129by clicking the "Update Now" button.
130
131For a new watch, the "Update Now" button isn't shown.
132
133 >>> from pytz import utc
134 >>> from datetime import datetime, timedelta
135 >>> login('foo.bar@canonical.com')
136 >>> bug_watch = factory.makeBugWatch()
137 >>> bug_watch.next_check = None
138 >>> watch_url = (
139 ... 'http://bugs.launchpad.dev/bugs/%s/+watch/%s' %
140 ... (bug_watch.bug.id, bug_watch.id))
141 >>> logout()
142
143 >>> user_browser.open(watch_url)
144 >>> user_browser.getControl('Update Now')
145 Traceback (most recent call last):
146 ...
147 LookupError: label 'Update Now'
148
149If the watch has been checked but has never failed, the button will
150remain hidden.
151
152 >>> login('foo.bar@canonical.com')
153 >>> bug_watch.addActivity()
154 >>> logout()
155
156 >>> user_browser.open(watch_url)
157 >>> user_browser.getControl('Update Now')
158 Traceback (most recent call last):
159 ...
160 LookupError: label 'Update Now'
161
162If the watch has failed less than 60% of its recent checks, the button
163will appear on the page.
164
165 >>> login('foo.bar@canonical.com')
166 >>> bug_watch.addActivity(result=BugWatchActivityStatus.BUG_NOT_FOUND)
167 >>> logout()
168
169 >>> user_browser.open(watch_url)
170 >>> reschedule_button = user_browser.getControl('Update Now')
171
172 >>> data_tag = find_tag_by_id(
173 ... user_browser.contents, 'bugwatch-next_check')
174 >>> print extract_text(data_tag.renderContents())
175 Next check: Not yet scheduled
176
177Clicking the Update Now button will schedule it to be checked
178immediately.
179
180 >>> reschedule_button.click()
181
182 >>> for message in find_tags_by_class(
183 ... user_browser.contents, 'informational message'):
184 ... print extract_text(message)
185 The ... bug watch has been scheduled for immediate checking.
186
187Looking at the watch +edit page again, we can see that the watch has
188been scheduled.
189
190 >>> user_browser.open(watch_url)
191 >>> data_tag = find_tag_by_id(
192 ... user_browser.contents, 'bugwatch-next_check')
193 >>> print extract_text(data_tag.renderContents())
194 Next check: 2...
195
196The button will no longer be shown on the page.
197
198 >>> reschedule_button = user_browser.getControl('Update Now')
199 Traceback (most recent call last):
200 ...
201 LookupError: label 'Update Now'
202
80203
=== modified file 'lib/lp/bugs/templates/bugwatch-editform.pt'
--- lib/lp/bugs/templates/bugwatch-editform.pt 2009-09-03 12:43:53 +0000
+++ lib/lp/bugs/templates/bugwatch-editform.pt 2010-04-27 02:13:38 +0000
@@ -21,6 +21,10 @@
21 </div>21 </div>
22 </div>22 </div>
2323
24 <div class="yui-g" tal:condition="view/watch_has_activity">
25 <div tal:replace="structure context/@@+portlet-activity" />
26 </div>
27
24 <div class="yui-g">28 <div class="yui-g">
25 <div metal:use-macro="context/@@launchpad_form/form"/>29 <div metal:use-macro="context/@@launchpad_form/form"/>
26 </div>30 </div>
2731
=== added file 'lib/lp/bugs/templates/bugwatch-portlet-activity.pt'
--- lib/lp/bugs/templates/bugwatch-portlet-activity.pt 1970-01-01 00:00:00 +0000
+++ lib/lp/bugs/templates/bugwatch-portlet-activity.pt 2010-04-27 02:13:38 +0000
@@ -0,0 +1,44 @@
1<div
2 xmlns:tal="http://xml.zope.org/namespaces/tal"
3 xmlns:metal="http://xml.zope.org/namespaces/metal"
4 xmlns:i18n="http://xml.zope.org/namespaces/i18n"
5 class="portlet" id="portlet-watches">
6 <h2>Recent activity</h2>
7 <div id="recent-watch-activity">
8 <div>
9 <form
10 tal:attributes="action view/action_url;"
11 tal:condition="view/userCanReschedule"
12 name="launchpadform"
13 id="reschedule-form"
14 method="post"
15 enctype="multipart/form-data"
16 accept-charset="UTF-8">
17 <div>
18 This watch has failed to update at
19 <tal:fail-count
20 replace="context/failed_activity/count" />
21 out of the last
22 <tal:fail-count
23 replace="context/activity/count" />
24 attempts.
25 </div>
26 <div>
27 The next update will occur
28 <tal:next-check
29 replace="view/context/next_check/fmt:approximatedate" />
30 <tal:reschedule-button
31 replace="structure view/reschedule_action/render" />
32 </div>
33 </form>
34 </div>
35 <tal:activity repeat="activity view/recent_watch_activity">
36 <div>
37 <img tal:attributes="src activity/icon; title activity/result_text" />
38 Update
39 <tal:message replace="activity/completion_message" />
40 <tal:time replace="activity/date/fmt:displaydate" />
41 </div>
42 </tal:activity>
43 </div>
44</div>
045
=== modified file 'lib/lp/bugs/tests/test_apportjob.py'
--- lib/lp/bugs/tests/test_apportjob.py 2010-04-08 13:26:26 +0000
+++ lib/lp/bugs/tests/test_apportjob.py 2010-04-27 02:13:38 +0000
@@ -273,7 +273,7 @@
273 expect_returncode=0)273 expect_returncode=0)
274 self.assertEqual('', stdout)274 self.assertEqual('', stdout)
275 self.assertIn(275 self.assertIn(
276 'INFO Ran 1 IProcessApportBlobJobSource jobs.\n', stderr)276 'INFO Ran 1 ProcessApportBlobJob jobs.\n', stderr)
277277
278 def test_getFileBugData(self):278 def test_getFileBugData(self):
279 # The IProcessApportBlobJobSource.getFileBugData() method279 # The IProcessApportBlobJobSource.getFileBugData() method
280280
=== modified file 'lib/lp/bugs/tests/test_bugheat.py'
--- lib/lp/bugs/tests/test_bugheat.py 2010-02-25 21:37:02 +0000
+++ lib/lp/bugs/tests/test_bugheat.py 2010-04-27 02:13:38 +0000
@@ -96,7 +96,7 @@
96 expect_returncode=0)96 expect_returncode=0)
97 self.assertEqual('', stdout)97 self.assertEqual('', stdout)
98 self.assertIn(98 self.assertIn(
99 'INFO Ran 1 ICalculateBugHeatJobSource jobs.\n', stderr)99 'INFO Ran 1 CalculateBugHeatJob jobs.\n', stderr)
100100
101 def test_getOopsVars(self):101 def test_getOopsVars(self):
102 # BugJobDerived.getOopsVars() returns the variables to be used102 # BugJobDerived.getOopsVars() returns the variables to be used
103103
=== modified file 'lib/lp/code/configure.zcml'
--- lib/lp/code/configure.zcml 2010-04-23 02:35:47 +0000
+++ lib/lp/code/configure.zcml 2010-04-27 02:13:38 +0000
@@ -38,7 +38,7 @@
38 <subscriber38 <subscriber
39 for="lp.code.interfaces.codereviewvote.ICodeReviewVoteReference39 for="lp.code.interfaces.codereviewvote.ICodeReviewVoteReference
40 lp.code.interfaces.event.IReviewerNominatedEvent"40 lp.code.interfaces.event.IReviewerNominatedEvent"
41 handler="lp.code.mail.branchmergeproposal.send_review_requested_notifications"/>41 handler="lp.code.subscribers.branchmergeproposal.review_requested"/>
4242
43 <!-- CodeImportMachine -->43 <!-- CodeImportMachine -->
4444
@@ -272,19 +272,84 @@
272 factory="lp.code.browser.branchmergeproposal.text_xhtml_representation"272 factory="lp.code.browser.branchmergeproposal.text_xhtml_representation"
273 name="description"/>273 name="description"/>
274274
275 <!-- Branch Merge Proposal Jobs -->
275276
276 <class class="lp.code.model.branchmergeproposaljob.CreateMergeProposalJob">277 <class class="lp.code.model.branchmergeproposaljob.CreateMergeProposalJob">
277 <allow interface="canonical.launchpad.interfaces.IMessageJob"/>278 <allow interface="canonical.launchpad.interfaces.IMessageJob"/>
278 <allow interface="lp.code.interfaces.branchmergeproposal.ICreateMergeProposalJob"/>279 <allow interface="lp.code.interfaces.branchmergeproposal.ICreateMergeProposalJob"/>
279 </class>280 </class>
281 <securedutility
282 component="lp.code.model.branchmergeproposaljob.CreateMergeProposalJob"
283 provides="lp.code.interfaces.branchmergeproposal.ICreateMergeProposalJobSource">
284 <allow interface="lp.code.interfaces.branchmergeproposal.ICreateMergeProposalJobSource"/>
285 </securedutility>
286
287 <class class="lp.code.model.branchmergeproposaljob.MergeProposalCreatedJob">
288 <allow interface="lp.code.interfaces.branchmergeproposal.IBranchMergeProposalJob"/>
289 <allow interface="lp.code.interfaces.branchmergeproposal.IMergeProposalCreatedJob"/>
290 </class>
291 <securedutility
292 component="lp.code.model.branchmergeproposaljob.MergeProposalCreatedJob"
293 provides="lp.code.interfaces.branchmergeproposal.IMergeProposalCreatedJobSource">
294 <allow interface="lp.code.interfaces.branchmergeproposal.IMergeProposalCreatedJobSource"/>
295 </securedutility>
296
297 <securedutility
298 component="lp.code.model.branchmergeproposaljob.BranchMergeProposalJobSource"
299 provides="lp.code.interfaces.branchmergeproposal.IBranchMergeProposalJobSource">
300 <allow interface="lp.code.interfaces.branchmergeproposal.IBranchMergeProposalJobSource"/>
301 </securedutility>
302
303 <class class="lp.code.model.branchmergeproposaljob.UpdatePreviewDiffJob">
304 <allow interface="lp.code.interfaces.branchmergeproposal.IUpdatePreviewDiffJob" />
305 <allow interface="lp.code.interfaces.branchmergeproposal.IBranchMergeProposalJob" />
306 </class>
307 <securedutility
308 component="lp.code.model.branchmergeproposaljob.UpdatePreviewDiffJob"
309 provides="lp.code.interfaces.branchmergeproposal.IUpdatePreviewDiffJobSource">
310 <allow interface="lp.code.interfaces.branchmergeproposal.IUpdatePreviewDiffJobSource"/>
311 </securedutility>
312
313 <class class="lp.code.model.branchmergeproposaljob.CodeReviewCommentEmailJob">
314 <allow interface="lp.code.interfaces.branchmergeproposal.ICodeReviewCommentEmailJob" />
315 <allow interface="lp.code.interfaces.branchmergeproposal.IBranchMergeProposalJob" />
316 </class>
317 <securedutility
318 component="lp.code.model.branchmergeproposaljob.CodeReviewCommentEmailJob"
319 provides="lp.code.interfaces.branchmergeproposal.ICodeReviewCommentEmailJobSource">
320 <allow interface="lp.code.interfaces.branchmergeproposal.ICodeReviewCommentEmailJobSource"/>
321 </securedutility>
322
323 <class class="lp.code.model.branchmergeproposaljob.ReviewRequestedEmailJob">
324 <allow interface="lp.code.interfaces.branchmergeproposal.IReviewRequestedEmailJob" />
325 <allow interface="lp.code.interfaces.branchmergeproposal.IBranchMergeProposalJob" />
326 </class>
327 <securedutility
328 component="lp.code.model.branchmergeproposaljob.ReviewRequestedEmailJob"
329 provides="lp.code.interfaces.branchmergeproposal.IReviewRequestedEmailJobSource">
330 <allow interface="lp.code.interfaces.branchmergeproposal.IReviewRequestedEmailJobSource"/>
331 </securedutility>
332
333 <class class="lp.code.model.branchmergeproposaljob.MergeProposalUpdatedEmailJob">
334 <allow interface="lp.services.job.interfaces.job.IRunnableJob" />
335 <allow interface="lp.code.interfaces.branchmergeproposal.IBranchMergeProposalJob" />
336 </class>
337 <securedutility
338 component="lp.code.model.branchmergeproposaljob.MergeProposalUpdatedEmailJob"
339 provides="lp.code.interfaces.branchmergeproposal.IMergeProposalUpdatedEmailJobSource">
340 <allow interface="lp.code.interfaces.branchmergeproposal.IMergeProposalUpdatedEmailJobSource"/>
341 </securedutility>
342
343 <!-- Branch Merge Proposal Subscribers -->
344
280 <subscriber345 <subscriber
281 for="lp.code.interfaces.branchmergeproposal.IBranchMergeProposal346 for="lp.code.interfaces.branchmergeproposal.IBranchMergeProposal
282 lp.code.interfaces.event.INewBranchMergeProposalEvent"347 lp.code.interfaces.event.INewBranchMergeProposalEvent"
283 handler="lp.code.mail.branchmergeproposal.send_merge_proposal_created_notifications"/>348 handler="lp.code.subscribers.branchmergeproposal.merge_proposal_created"/>
284 <subscriber349 <subscriber
285 for="lp.code.interfaces.branchmergeproposal.IBranchMergeProposal350 for="lp.code.interfaces.branchmergeproposal.IBranchMergeProposal
286 lazr.lifecycle.interfaces.IObjectModifiedEvent"351 lazr.lifecycle.interfaces.IObjectModifiedEvent"
287 handler="lp.code.mail.branchmergeproposal.send_merge_proposal_modified_notifications"/>352 handler="lp.code.subscribers.branchmergeproposal.merge_proposal_modified"/>
288 <subscriber353 <subscriber
289 for="lp.code.interfaces.branchmergeproposal.IBranchMergeProposal354 for="lp.code.interfaces.branchmergeproposal.IBranchMergeProposal
290 lp.code.interfaces.event.INewBranchMergeProposalEvent"355 lp.code.interfaces.event.INewBranchMergeProposalEvent"
@@ -297,10 +362,6 @@
297 for="lp.code.interfaces.branchmergeproposal.IBranchMergeProposal"362 for="lp.code.interfaces.branchmergeproposal.IBranchMergeProposal"
298 provides="canonical.launchpad.webapp.interfaces.IPrimaryContext"363 provides="canonical.launchpad.webapp.interfaces.IPrimaryContext"
299 factory="lp.code.browser.branchmergeproposal.BranchMergeProposalPrimaryContext"/>364 factory="lp.code.browser.branchmergeproposal.BranchMergeProposalPrimaryContext"/>
300 <class class="lp.code.model.branchmergeproposaljob.MergeProposalCreatedJob">
301 <allow interface="lp.code.interfaces.branchmergeproposal.IBranchMergeProposalJob"/>
302 <allow interface="lp.code.interfaces.branchmergeproposal.IMergeProposalCreatedJob"/>
303 </class>
304365
305 <!-- hierarchy -->366 <!-- hierarchy -->
306367
@@ -309,16 +370,6 @@
309 provides="lp.code.interfaces.branchmergeproposal.IBranchMergeProposalGetter">370 provides="lp.code.interfaces.branchmergeproposal.IBranchMergeProposalGetter">
310 <allow interface="lp.code.interfaces.branchmergeproposal.IBranchMergeProposalGetter"/>371 <allow interface="lp.code.interfaces.branchmergeproposal.IBranchMergeProposalGetter"/>
311 </securedutility>372 </securedutility>
312 <securedutility
313 component="lp.code.model.branchmergeproposaljob.CreateMergeProposalJob"
314 provides="lp.code.interfaces.branchmergeproposal.ICreateMergeProposalJobSource">
315 <allow interface="lp.code.interfaces.branchmergeproposal.ICreateMergeProposalJobSource"/>
316 </securedutility>
317 <securedutility
318 component="lp.code.model.branchmergeproposaljob.MergeProposalCreatedJob"
319 provides="lp.code.interfaces.branchmergeproposal.IMergeProposalCreatedJobSource">
320 <allow interface="lp.code.interfaces.branchmergeproposal.IMergeProposalCreatedJobSource"/>
321 </securedutility>
322 <class373 <class
323 class="lp.code.model.seriessourcepackagebranch.SeriesSourcePackageBranch">374 class="lp.code.model.seriessourcepackagebranch.SeriesSourcePackageBranch">
324 <allow interface="lp.code.interfaces.seriessourcepackagebranch.ISeriesSourcePackageBranch"/>375 <allow interface="lp.code.interfaces.seriessourcepackagebranch.ISeriesSourcePackageBranch"/>
@@ -907,15 +958,6 @@
907 <allow interface="lp.code.interfaces.revision.IRevisionSet" />958 <allow interface="lp.code.interfaces.revision.IRevisionSet" />
908 </securedutility>959 </securedutility>
909960
910 <securedutility
911 component="lp.code.model.branchmergeproposaljob.UpdatePreviewDiffJob"
912 provides="lp.code.interfaces.branchmergeproposal.IUpdatePreviewDiffJobSource">
913 <allow interface="lp.code.interfaces.branchmergeproposal.IUpdatePreviewDiffJobSource"/>
914 </securedutility>
915 <class class="lp.code.model.branchmergeproposaljob.UpdatePreviewDiffJob">
916 <allow interface="lp.services.job.interfaces.job.IRunnableJob" />
917 <allow interface="lp.code.interfaces.branchmergeproposal.IBranchMergeProposalJob" />
918 </class>
919961
920 <securedutility962 <securedutility
921 component="lp.code.model.branchjob.BranchUpgradeJob"963 component="lp.code.model.branchjob.BranchUpgradeJob"
922964
=== modified file 'lib/lp/code/doc/branch-merge-proposal-notifications.txt'
--- lib/lp/code/doc/branch-merge-proposal-notifications.txt 2010-02-19 02:15:27 +0000
+++ lib/lp/code/doc/branch-merge-proposal-notifications.txt 2010-04-27 02:13:38 +0000
@@ -15,7 +15,7 @@
15 ... BranchSubscriptionDiffSize, BranchSubscriptionNotificationLevel,15 ... BranchSubscriptionDiffSize, BranchSubscriptionNotificationLevel,
16 ... CodeReviewNotificationLevel)16 ... CodeReviewNotificationLevel)
17 >>> from lp.code.interfaces.branchmergeproposal import (17 >>> from lp.code.interfaces.branchmergeproposal import (
18 ... IMergeProposalCreatedJobSource)18 ... IBranchMergeProposalJobSource)
19 >>> from lp.code.model.diff import PreviewDiff19 >>> from lp.code.model.diff import PreviewDiff
20 >>> from lp.testing.mail_helpers import pop_notifications20 >>> from lp.testing.mail_helpers import pop_notifications
21 >>> import transaction21 >>> import transaction
@@ -103,8 +103,11 @@
103 >>> bmp = source_branch.addLandingTarget(103 >>> bmp = source_branch.addLandingTarget(
104 ... registrant, target_branch)104 ... registrant, target_branch)
105 >>> removeSecurityProxy(bmp).preview_diff = preview_diff105 >>> removeSecurityProxy(bmp).preview_diff = preview_diff
106 >>> [job,] = list(getUtility(IMergeProposalCreatedJobSource).iterReady())106 >>> # Fake the update preview diff as done.
107 >>> job.run(_create_preview=False)107 >>> bmp.next_preview_diff_job.start()
108 >>> bmp.next_preview_diff_job.complete()
109 >>> [job] = list(getUtility(IBranchMergeProposalJobSource).iterReady())
110 >>> job.run()
108 >>> notifications = pop_notifications(111 >>> notifications = pop_notifications(
109 ... sort_key=lambda n: n.get('X-Envelope-To'))112 ... sort_key=lambda n: n.get('X-Envelope-To'))
110113
@@ -155,8 +158,11 @@
155 ... registrant, target_branch,158 ... registrant, target_branch,
156 ... description=initial_comment, review_requests=reviewers)159 ... description=initial_comment, review_requests=reviewers)
157 >>> removeSecurityProxy(bmp).preview_diff = preview_diff160 >>> removeSecurityProxy(bmp).preview_diff = preview_diff
158 >>> [job,] = list(getUtility(IMergeProposalCreatedJobSource).iterReady())161 >>> # Fake the update preview diff as done.
159 >>> job.run(_create_preview=False)162 >>> bmp.next_preview_diff_job.start()
163 >>> bmp.next_preview_diff_job.complete()
164 >>> [job] = list(getUtility(IBranchMergeProposalJobSource).iterReady())
165 >>> job.run()
160 >>> notifications = pop_notifications(166 >>> notifications = pop_notifications(
161 ... sort_key=lambda n: n.get('X-Envelope-To'))167 ... sort_key=lambda n: n.get('X-Envelope-To'))
162 >>> for notification in notifications:168 >>> for notification in notifications:
163169
=== modified file 'lib/lp/code/doc/codereviewcomment.txt'
--- lib/lp/code/doc/codereviewcomment.txt 2010-02-18 00:35:39 +0000
+++ lib/lp/code/doc/codereviewcomment.txt 2010-04-27 02:13:38 +0000
@@ -45,6 +45,15 @@
45 >>> comment3.message.parent == comment2.message45 >>> comment3.message.parent == comment2.message
46 True46 True
4747
48When comments are added, a job is created to send the emails to the
49subscribers of the merge proposal.
50
51 >>> # Needed for now to make the iterReady show the jobs.
52 >>> factory.makeRevisionsForBranch(merge_proposal.source_branch)
53 >>> factory.makeRevisionsForBranch(merge_proposal.target_branch)
54 >>> from lp.code.tests.helpers import mark_all_merge_proposal_jobs_done
55 >>> mark_all_merge_proposal_jobs_done()
56
48If there is a subscriber to any of the branches involved in the merge,57If there is a subscriber to any of the branches involved in the merge,
49a notification is produced when the comment is created.58a notification is produced when the comment is created.
5059
@@ -60,9 +69,18 @@
60 >>> from lp.testing.mail_helpers import (69 >>> from lp.testing.mail_helpers import (
61 ... pop_notifications, print_emails)70 ... pop_notifications, print_emails)
62 >>> _unused = pop_notifications()71 >>> _unused = pop_notifications()
72 >>> merge_proposal.root_message_id = '<201003111740.test.root@example.com>'
63 >>> comment = merge_proposal.createComment(73 >>> comment = merge_proposal.createComment(
64 ... sender, 'Please merge', 'This patch is very nice.',74 ... sender, 'Please merge', 'This patch is very nice.',
65 ... vote=CodeReviewVote.APPROVE, review_type='DB')75 ... vote=CodeReviewVote.APPROVE, review_type='DB')
76
77Now run the pending job to send the email.
78
79 >>> from lp.code.interfaces.branchmergeproposal import (
80 ... IBranchMergeProposalJobSource)
81 >>> [job] = list(getUtility(IBranchMergeProposalJobSource).iterReady())
82 >>> job.run()
83
66 >>> notifications = pop_notifications()84 >>> notifications = pop_notifications()
67 >>> notifications = [email for email in notifications if85 >>> notifications = [email for email in notifications if
68 ... email['X-Launchpad-Message-Rationale'] == 'Owner']86 ... email['X-Launchpad-Message-Rationale'] == 'Owner']
6987
=== modified file 'lib/lp/code/interfaces/branchmergeproposal.py'
--- lib/lp/code/interfaces/branchmergeproposal.py 2010-03-05 03:35:10 +0000
+++ lib/lp/code/interfaces/branchmergeproposal.py 2010-04-27 02:13:38 +0000
@@ -11,11 +11,19 @@
11 'IBranchMergeProposal',11 'IBranchMergeProposal',
12 'IBranchMergeProposalGetter',12 'IBranchMergeProposalGetter',
13 'IBranchMergeProposalJob',13 'IBranchMergeProposalJob',
14 'IBranchMergeProposalJobSource',
14 'IBranchMergeProposalListingBatchNavigator',15 'IBranchMergeProposalListingBatchNavigator',
16 'ICodeReviewCommentEmailJob',
17 'ICodeReviewCommentEmailJobSource',
15 'ICreateMergeProposalJob',18 'ICreateMergeProposalJob',
16 'ICreateMergeProposalJobSource',19 'ICreateMergeProposalJobSource',
17 'IMergeProposalCreatedJob',20 'IMergeProposalCreatedJob',
18 'IMergeProposalCreatedJobSource',21 'IMergeProposalCreatedJobSource',
22 'IMergeProposalUpdatedEmailJob',
23 'IMergeProposalUpdatedEmailJobSource',
24 'IReviewRequestedEmailJob',
25 'IReviewRequestedEmailJobSource',
26 'IUpdatePreviewDiffJob',
19 'IUpdatePreviewDiffJobSource',27 'IUpdatePreviewDiffJobSource',
20 'notify_modified',28 'notify_modified',
21 ]29 ]
@@ -29,12 +37,13 @@
2937
30from canonical.launchpad import _38from canonical.launchpad import _
31from canonical.launchpad.fields import PublicPersonChoice, Summary, Whiteboard39from canonical.launchpad.fields import PublicPersonChoice, Summary, Whiteboard
32from canonical.launchpad.interfaces import IBug, IPrivacy40from canonical.launchpad.interfaces.launchpad import IPrivacy
41from lp.bugs.interfaces.bug import IBug
33from lp.code.enums import BranchMergeProposalStatus, CodeReviewVote42from lp.code.enums import BranchMergeProposalStatus, CodeReviewVote
34from lp.code.interfaces.branch import IBranch43from lp.code.interfaces.branch import IBranch
35from lp.registry.interfaces.person import IPerson44from lp.registry.interfaces.person import IPerson
36from lp.code.interfaces.diff import IPreviewDiff, IStaticDiff45from lp.code.interfaces.diff import IPreviewDiff, IStaticDiff
37from lp.services.job.interfaces.job import IJob, IRunnableJob46from lp.services.job.interfaces.job import IJob, IJobSource, IRunnableJob
38from canonical.launchpad.webapp.interfaces import ITableBatchNavigator47from canonical.launchpad.webapp.interfaces import ITableBatchNavigator
39from lazr.restful.fields import CollectionField, Reference48from lazr.restful.fields import CollectionField, Reference
40from lazr.restful.declarations import (49from lazr.restful.declarations import (
@@ -500,6 +509,10 @@
500 """Destroy this object."""509 """Destroy this object."""
501510
502511
512class IBranchMergeProposalJobSource(IJobSource):
513 """A job source that will get all supported merge proposal jobs."""
514
515
503class IBranchMergeProposalListingBatchNavigator(ITableBatchNavigator):516class IBranchMergeProposalListingBatchNavigator(ITableBatchNavigator):
504 """A marker interface for registering the appropriate listings."""517 """A marker interface for registering the appropriate listings."""
505518
@@ -570,15 +583,12 @@
570 """583 """
571584
572585
573class ICreateMergeProposalJobSource(Interface):586class ICreateMergeProposalJobSource(IJobSource):
574 """Acquire MergeProposalJobs."""587 """Acquire MergeProposalJobs."""
575588
576 def create(message_bytes):589 def create(message_bytes):
577 """Return a CreateMergeProposalJob for this message."""590 """Return a CreateMergeProposalJob for this message."""
578591
579 def iterReady():
580 """Iterate through jobs that are ready to run."""
581
582592
583class IMergeProposalCreatedJob(IRunnableJob):593class IMergeProposalCreatedJob(IRunnableJob):
584 """Interface for review diffs."""594 """Interface for review diffs."""
@@ -590,8 +600,12 @@
590 def create(bmp):600 def create(bmp):
591 """Create a MergeProposalCreatedJob for the specified Job."""601 """Create a MergeProposalCreatedJob for the specified Job."""
592602
593 def iterReady():603
594 """Iterate through all ready MergeProposalCreatedJobs."""604class IUpdatePreviewDiffJob(IRunnableJob):
605 """Interface for the job to update the diff for a merge proposal."""
606
607 def checkReady():
608 """Check to see if this job is ready to run."""
595609
596610
597class IUpdatePreviewDiffJobSource(Interface):611class IUpdatePreviewDiffJobSource(Interface):
@@ -603,11 +617,55 @@
603 def get(id):617 def get(id):
604 """Return the UpdatePreviewDiffJob with this id."""618 """Return the UpdatePreviewDiffJob with this id."""
605619
606 def iterReady():620
607 """Iterate through jobs ready to update preview diffs."""621class ICodeReviewCommentEmailJob(IRunnableJob):
608622 """Interface for the job to send code review comment email."""
609 def contextManager():623
610 """Get a context for running this kind of job in."""624 code_review_comment = Attribute('The code review comment.')
625
626
627class ICodeReviewCommentEmailJobSource(Interface):
628 """Create or retrieve jobs that update preview diffs."""
629
630 def create(code_review_comment):
631 """Create a job to email subscribers about the comment."""
632
633
634class IReviewRequestedEmailJob(IRunnableJob):
635 """Interface for the job to sends review request emails."""
636
637 reviewer = Attribute('The person or team asked to do the review.')
638 requester = Attribute('The person who has asked for the review.')
639
640
641class IReviewRequestedEmailJobSource(Interface):
642 """Create or retrieve jobs that email review requests."""
643
644 def create(review_request):
645 """Create a job to email a review request.
646
647 :param review_request: A vote reference for the requested review.
648 """
649
650
651class IMergeProposalUpdatedEmailJob(IRunnableJob):
652 """Interface for the job to sends email about merge proposal updates."""
653
654 editor = Attribute('The person that did the editing.')
655 delta_text = Attribute(
656 'The textual representation of the changed fields.')
657
658
659class IMergeProposalUpdatedEmailJobSource(Interface):
660 """Create or retrieve jobs that email about merge proposal updates."""
661
662 def create(merge_proposal, delta_text, editor):
663 """Create a job to email merge proposal updates to subscribers.
664
665 :param merge_proposal: The merge proposal that has been edited.
666 :param delta_text: The text representation of the changed fields.
667 :param editor: The person who did the editing.
668 """
611669
612670
613# XXX: JonathanLange 2010-01-06: This is only used in the scanner, perhaps it671# XXX: JonathanLange 2010-01-06: This is only used in the scanner, perhaps it
614672
=== modified file 'lib/lp/code/interfaces/codehosting.py'
--- lib/lp/code/interfaces/codehosting.py 2010-02-24 04:25:38 +0000
+++ lib/lp/code/interfaces/codehosting.py 2010-04-27 02:13:38 +0000
@@ -179,6 +179,18 @@
179 :param branchID: a branch ID.179 :param branchID: a branch ID.
180 """180 """
181181
182 def branchChanged(branch_id, stacked_on_url, last_revision_id):
183 """Record that a branch has been changed.
184
185 This method records the stacked on branch and tip revision id of the
186 branch and creates a scan job if the tip revision id has changed.
187
188 :param branchID: The database id of the branch to operate on.
189 :param stacked_on_url: The unique name of the branch this branch is
190 stacked on, or '' if this branch is not stacked.
191 :param last_revision_id: The tip revision ID of the branch.
192 """
193
182 def translatePath(requester_id, path):194 def translatePath(requester_id, path):
183 """Translate 'path' so that the codehosting transport can access it.195 """Translate 'path' so that the codehosting transport can access it.
184196
185197
=== modified file 'lib/lp/code/interfaces/codereviewcomment.py'
--- lib/lp/code/interfaces/codereviewcomment.py 2010-04-22 04:40:15 +0000
+++ lib/lp/code/interfaces/codereviewcomment.py 2010-04-27 02:13:38 +0000
@@ -74,12 +74,16 @@
74 attachments.74 attachments.
75 """75 """
7676
77 def getOriginalEmail():
78 """An email object of the original raw email if there was one."""
79
77 as_quoted_email = exported(80 as_quoted_email = exported(
78 TextLine(81 TextLine(
79 title=_('The message as quoted in email.'),82 title=_('The message as quoted in email.'),
80 readonly=True))83 readonly=True))
8184
8285
86
83class ICodeReviewCommentDeletion(Interface):87class ICodeReviewCommentDeletion(Interface):
84 """This interface provides deletion of CodeReviewComments.88 """This interface provides deletion of CodeReviewComments.
8589
8690
=== modified file 'lib/lp/code/mail/branch.py'
--- lib/lp/code/mail/branch.py 2010-03-09 16:58:30 +0000
+++ lib/lp/code/mail/branch.py 2010-04-27 02:13:38 +0000
@@ -69,23 +69,23 @@
69 review_level=subscription.review_level)69 review_level=subscription.review_level)
7070
71 @classmethod71 @classmethod
72 def forReviewer(cls, vote_reference, recipient,72 def forReviewer(cls, branch_merge_proposal, pending_review, reviewer,
73 branch_identity_cache=None):73 branch_identity_cache=None):
74 """Construct RecipientReason for a reviewer.74 """Construct RecipientReason for a reviewer.
7575
76 The reviewer will be the sole recipient.76 The reviewer will be the sole recipient.
77 """77 """
78 merge_proposal = vote_reference.branch_merge_proposal78 branch = branch_merge_proposal.source_branch
79 branch = merge_proposal.source_branch79 if pending_review:
80 if vote_reference.comment is None:
81 reason_template = (80 reason_template = (
82 '%(entity_is)s requested to review %(merge_proposal)s.')81 '%(entity_is)s requested to review %(merge_proposal)s.')
83 else:82 else:
84 reason_template = (83 reason_template = (
85 '%(entity_is)s reviewing %(merge_proposal)s.')84 '%(entity_is)s reviewing %(merge_proposal)s.')
86 return cls(vote_reference.reviewer, recipient, branch,85 return cls(reviewer, reviewer, branch,
87 'Reviewer', reason_template, merge_proposal,86 cls.makeRationale('Reviewer', reviewer),
88 branch_identity_cache=branch_identity_cache)87 reason_template, branch_merge_proposal,
88 branch_identity_cache=branch_identity_cache)
8989
90 @classmethod90 @classmethod
91 def forRegistrant(cls, merge_proposal, branch_identity_cache=None):91 def forRegistrant(cls, merge_proposal, branch_identity_cache=None):
@@ -93,7 +93,6 @@
9393
94 The registrant will be the sole recipient.94 The registrant will be the sole recipient.
95 """95 """
96 branch = merge_proposal.source_branch
97 reason_template = 'You proposed %(branch_name)s for merging.'96 reason_template = 'You proposed %(branch_name)s for merging.'
98 return cls(merge_proposal.registrant, merge_proposal.registrant,97 return cls(merge_proposal.registrant, merge_proposal.registrant,
99 merge_proposal.source_branch,98 merge_proposal.source_branch,
@@ -124,16 +123,16 @@
124 The owner will be the sole recipient.123 The owner will be the sole recipient.
125 """124 """
126 return cls(branch.owner, recipient, branch,125 return cls(branch.owner, recipient, branch,
127 cls.makeRationale('Owner', branch.owner, recipient),126 cls.makeRationale('Owner', branch.owner),
128 'You are getting this email as %(lc_entity_is)s the'127 'You are getting this email as %(lc_entity_is)s the'
129 ' owner of the branch and someone has edited the'128 ' owner of the branch and someone has edited the'
130 ' details.',129 ' details.',
131 branch_identity_cache=branch_identity_cache)130 branch_identity_cache=branch_identity_cache)
132131
133 @staticmethod132 @staticmethod
The diff has been truncated for viewing.