Merge lp:~wgrant/launchpad/observer-db into lp:launchpad/db-devel

Proposed by William Grant
Status: Superseded
Proposed branch: lp:~wgrant/launchpad/observer-db
Merge into: lp:launchpad/db-devel
Diff against target: 1099 lines (+579/-214)
11 files modified
cronscripts/check-teamparticipation.py (+3/-71)
database/schema/comments.sql (+20/-0)
database/schema/patch-2208-93-1.sql (+46/-0)
lib/lp/archivepublisher/domination.py (+104/-75)
lib/lp/archivepublisher/tests/test_dominator.py (+170/-2)
lib/lp/bugs/javascript/buglisting.js (+0/-6)
lib/lp/bugs/javascript/tests/test_buglisting.js (+10/-18)
lib/lp/bugs/templates/buglisting-default.pt (+8/-2)
lib/lp/registry/scripts/teamparticipation.py (+160/-0)
lib/lp/registry/tests/test_teammembership.py (+56/-28)
lib/lp/soyuz/model/publishing.py (+2/-12)
To merge this branch: bzr merge lp:~wgrant/launchpad/observer-db
Reviewer Review Type Date Requested Status
Robert Collins db Pending
Review via email: mp+81102@code.launchpad.net

This proposal has been superseded by a proposal from 2011-11-03.

Commit message

Initial DB schema for access policies.

Description of the change

This is the initial DB patch for Disclosure's access policies. The migration process will be long and tortuous, but this is a start.

A private artifact (currently a bug or a branch) will be linked to one of its target's access policies, and attempts to access the artifact will be checked against the policy's permissions. This will shortly supplant the existing subscription-and-other-stuff-based checks.

Permissions can be either policy-global or artifact-specific. In the first case APP.artifact is left unset, letting the permission holder see all artifacts under the policy. For the second case both policy and artifact are set, restricting the access to a specific artifact under a specific policy.

The identification of access policies isn't well-defined yet, but this schema will do for now. The intent is that, at least for the initial pass, projects will have preconfigured and immutable "Private" and "Security" policies, with the existing bug privacy/security checkboxes altered to map onto these policies. So we either have to use an enum, or just treat these as well-known names until a later pass.

To post a comment you must log in.

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
=== modified file 'cronscripts/check-teamparticipation.py'
--- cronscripts/check-teamparticipation.py 2011-09-18 03:43:46 +0000
+++ cronscripts/check-teamparticipation.py 2011-11-03 02:37:26 +0000
@@ -20,77 +20,8 @@
2020
21import _pythonpath21import _pythonpath
2222
23import transaction23from lp.registry.scripts.teamparticipation import check_teamparticipation
2424from lp.services.scripts.base import LaunchpadScript
25from canonical.database.sqlbase import cursor
26from lp.services.scripts.base import LaunchpadScript, LaunchpadScriptFailure
27
28
29def check_teamparticipation(log):
30 # Check self-participation.
31 query = """
32 SELECT id, name
33 FROM Person WHERE id NOT IN (
34 SELECT person FROM Teamparticipation WHERE person = team
35 ) AND merged IS NULL
36 """
37 cur = cursor()
38 cur.execute(query)
39 non_self_participants = cur.fetchall()
40 if len(non_self_participants) > 0:
41 log.warn("Some people/teams are not members of themselves: %s"
42 % non_self_participants)
43
44 # Check if there are any circular references between teams.
45 cur.execute("""
46 SELECT tp.team, tp2.team
47 FROM teamparticipation AS tp, teamparticipation AS tp2
48 WHERE tp.team = tp2.person
49 AND tp.person = tp2.team
50 AND tp.id != tp2.id;
51 """)
52 circular_references = cur.fetchall()
53 if len(circular_references) > 0:
54 raise LaunchpadScriptFailure(
55 "Circular references found: %s" % circular_references)
56
57 # Check if there are any missing/spurious TeamParticipation entries.
58 cur.execute("SELECT id FROM Person WHERE teamowner IS NOT NULL")
59 team_ids = cur.fetchall()
60 transaction.abort()
61
62 def get_participants(team):
63 """Recurse through the team's members to get all its participants."""
64 participants = set()
65 for member in team.activemembers:
66 participants.add(member)
67 if member.is_team:
68 participants.update(get_participants(member))
69 return participants
70
71 from lp.registry.model.person import Person
72 batch = team_ids[:50]
73 team_ids = team_ids[50:]
74 while batch:
75 for [id] in batch:
76 team = Person.get(id)
77 expected = get_participants(team)
78 found = set(team.allmembers)
79 difference = expected.difference(found)
80 if len(difference) > 0:
81 people = ", ".join("%s (%s)" % (person.name, person.id)
82 for person in difference)
83 log.warn("%s (%s): missing TeamParticipation entries for %s."
84 % (team.name, team.id, people))
85 reverse_difference = found.difference(expected)
86 if len(reverse_difference) > 0:
87 people = ", ".join("%s (%s)" % (person.name, person.id)
88 for person in reverse_difference)
89 log.warn("%s (%s): spurious TeamParticipation entries for %s."
90 % (team.name, team.id, people))
91 transaction.abort()
92 batch = team_ids[:50]
93 team_ids = team_ids[50:]
9425
9526
96class CheckTeamParticipationScript(LaunchpadScript):27class CheckTeamParticipationScript(LaunchpadScript):
@@ -99,5 +30,6 @@
99 def main(self):30 def main(self):
100 check_teamparticipation(self.logger)31 check_teamparticipation(self.logger)
10132
33
102if __name__ == '__main__':34if __name__ == '__main__':
103 CheckTeamParticipationScript("check-teamparticipation").run()35 CheckTeamParticipationScript("check-teamparticipation").run()
10436
=== modified file 'database/schema/comments.sql'
--- database/schema/comments.sql 2011-10-07 11:32:23 +0000
+++ database/schema/comments.sql 2011-11-03 02:37:26 +0000
@@ -6,6 +6,26 @@
6 GNU Affero General Public License version 3 (see the file LICENSE).6 GNU Affero General Public License version 3 (see the file LICENSE).
7*/7*/
88
9-- AccessPolicy
10
11COMMENT ON TABLE AccessPolicy IS 'A policy to manage access to a pillar\'s artifacts.';
12COMMENT ON COLUMN AccessPolicy.product IS 'The product that this policy applies to.';
13COMMENT ON COLUMN AccessPolicy.distribution IS 'The distribution that this policy applies to.';
14COMMENT ON COLUMN AccessPolicy.display_name IS 'A human-readable name for this policy.';
15
16-- AccessPolicyArtifact
17
18COMMENT ON TABLE AccessPolicyArtifact IS 'An artifact that an access permission can apply to.';
19COMMENT ON COLUMN AccessPolicyArtifact.bug IS 'The bug that this abstract artifact represents.';
20COMMENT ON COLUMN AccessPolicyArtifact.branch IS 'The branch that this abstract artifact represents.';
21
22-- AccessPolicyPermission
23
24COMMENT ON TABLE AccessPolicyPermission IS 'A permission for a person to access a policy\'s artifacts.';
25COMMENT ON COLUMN AccessPolicyPermission.policy IS 'The access policy on which access is granted.';
26COMMENT ON COLUMN AccessPolicyPermission.person IS 'The person that holds the permission.';
27COMMENT ON COLUMN AccessPolicyPermission.artifact IS 'The optional artifact to which the access is restricted.';
28
9-- Announcement29-- Announcement
1030
11COMMENT ON TABLE Announcement IS 'A project announcement. This is a single item of news or information that the project is communicating. Announcements can be attached to a Project, a Product or a Distribution.';31COMMENT ON TABLE Announcement IS 'A project announcement. This is a single item of news or information that the project is communicating. Announcements can be attached to a Project, a Product or a Distribution.';
1232
=== added file 'database/schema/patch-2208-93-1.sql'
--- database/schema/patch-2208-93-1.sql 1970-01-01 00:00:00 +0000
+++ database/schema/patch-2208-93-1.sql 2011-11-03 02:37:26 +0000
@@ -0,0 +1,46 @@
1-- Copyright 2011 Canonical Ltd. This software is licensed under the
2-- GNU Affero General Public License version 3 (see the file LICENSE).
3SET client_min_messages=ERROR;
4
5CREATE TABLE AccessPolicy (
6 id serial PRIMARY KEY,
7 product integer REFERENCES Product,
8 distribution integer REFERENCES Distribution,
9 display_name text NOT NULL,
10 CONSTRAINT accesspolicy__product__display_name__key
11 UNIQUE (product, display_name),
12 CONSTRAINT accesspolicy__distribution__display_name__key
13 UNIQUE (distribution, display_name),
14 CONSTRAINT has_target CHECK (product IS NULL != distribution IS NULL)
15);
16
17CREATE TABLE AccessPolicyArtifact (
18 id serial PRIMARY KEY,
19 bug integer REFERENCES Bug,
20 branch integer REFERENCES Branch,
21 CONSTRAINT accesspolicyartifact__bug__key UNIQUE (bug),
22 CONSTRAINT accesspolicyartifact__branch__key UNIQUE (branch),
23 CONSTRAINT has_artifact CHECK (bug IS NULL != branch IS NULL)
24);
25
26CREATE TABLE AccessPolicyPermission (
27 id serial PRIMARY KEY,
28 policy integer NOT NULL REFERENCES AccessPolicy,
29 person integer NOT NULL REFERENCES Person,
30 artifact integer REFERENCES AccessPolicyArtifact,
31 CONSTRAINT accesspolicypermission__policy__person__artifact__key
32 UNIQUE (policy, person, artifact)
33);
34
35CREATE UNIQUE INDEX accesspolicypermission__policy__person__key
36 ON AccessPolicyPermission(policy, person) WHERE artifact IS NULL;
37
38ALTER TABLE bug
39 ADD COLUMN access_policy integer REFERENCES AccessPolicy;
40CREATE INDEX bug__access_policy__idx ON bug(access_policy);
41
42ALTER TABLE branch
43 ADD COLUMN access_policy integer REFERENCES AccessPolicy;
44CREATE INDEX branch__access_policy__idx ON branch(access_policy);
45
46INSERT INTO LaunchpadDatabaseRevision VALUES (2208, 93, 1);
047
=== modified file 'lib/lp/archivepublisher/domination.py'
--- lib/lp/archivepublisher/domination.py 2011-10-18 11:56:09 +0000
+++ lib/lp/archivepublisher/domination.py 2011-11-03 02:37:26 +0000
@@ -53,6 +53,7 @@
53__all__ = ['Dominator']53__all__ = ['Dominator']
5454
55from datetime import timedelta55from datetime import timedelta
56from operator import itemgetter
5657
57import apt_pkg58import apt_pkg
58from storm.expr import (59from storm.expr import (
@@ -67,6 +68,9 @@
67 flush_database_updates,68 flush_database_updates,
68 sqlvalues,69 sqlvalues,
69 )70 )
71from canonical.launchpad.components.decoratedresultset import (
72 DecoratedResultSet,
73 )
70from canonical.launchpad.interfaces.lpstorm import IStore74from canonical.launchpad.interfaces.lpstorm import IStore
71from lp.registry.model.sourcepackagename import SourcePackageName75from lp.registry.model.sourcepackagename import SourcePackageName
72from lp.services.database.bulk import load_related76from lp.services.database.bulk import load_related
@@ -258,7 +262,8 @@
258 # Go through publications from latest version to oldest. This262 # Go through publications from latest version to oldest. This
259 # makes it easy to figure out which release superseded which:263 # makes it easy to figure out which release superseded which:
260 # the dominant is always the oldest live release that is newer264 # the dominant is always the oldest live release that is newer
261 # than the one being superseded.265 # than the one being superseded. In this loop, that means the
266 # dominant is always the last live publication we saw.
262 publications = sorted(267 publications = sorted(
263 publications, cmp=generalization.compare, reverse=True)268 publications, cmp=generalization.compare, reverse=True)
264269
@@ -272,25 +277,29 @@
272 for pub in publications:277 for pub in publications:
273 version = generalization.getPackageVersion(pub)278 version = generalization.getPackageVersion(pub)
274 # There should never be two published releases with the same279 # There should never be two published releases with the same
275 # version. So this comparison is really a string280 # version. So it doesn't matter whether this comparison is
276 # comparison, not a version comparison: if the versions are281 # really a string comparison or a version comparison: if the
277 # equal by either measure, they're from the same release.282 # versions are equal by either measure, they're from the same
278 if dominant_version is not None and version == dominant_version:283 # release.
284 if version == dominant_version:
279 # This publication is for a live version, but has been285 # This publication is for a live version, but has been
280 # superseded by a newer publication of the same version.286 # superseded by a newer publication of the same version.
281 # Supersede it.287 # Supersede it.
282 pub.supersede(current_dominant, logger=self.logger)288 pub.supersede(current_dominant, logger=self.logger)
283 self.logger.debug2(289 self.logger.debug2(
284 "Superseding older publication for version %s.", version)290 "Superseding older publication for version %s.", version)
285 elif (version in live_versions or291 elif version in live_versions:
286 (not generalization.is_source and
287 not self._checkArchIndep(pub))):
288 # This publication stays active; if any publications292 # This publication stays active; if any publications
289 # that follow right after this are to be superseded,293 # that follow right after this are to be superseded,
290 # this is the release that they are superseded by.294 # this is the release that they are superseded by.
291 current_dominant = pub295 current_dominant = pub
292 dominant_version = version296 dominant_version = version
293 self.logger.debug2("Keeping version %s.", version)297 self.logger.debug2("Keeping version %s.", version)
298 elif not (generalization.is_source or self._checkArchIndep(pub)):
299 # As a special case, we keep this version live as well.
300 current_dominant = pub
301 dominant_version = version
302 self.logger.debug2("Keeping version %s.", version)
294 elif current_dominant is None:303 elif current_dominant is None:
295 # This publication is no longer live, but there is no304 # This publication is no longer live, but there is no
296 # newer version to supersede it either. Therefore it305 # newer version to supersede it either. Therefore it
@@ -442,72 +451,81 @@
442 # always equals to "scheduleddeletiondate - quarantine".451 # always equals to "scheduleddeletiondate - quarantine".
443 pub_record.datemadepending = UTC_NOW452 pub_record.datemadepending = UTC_NOW
444453
454 def findBinariesForDomination(self, distroarchseries, pocket):
455 """Find binary publications that need dominating.
456
457 This is only for traditional domination, where the latest published
458 publication is always kept published. It will ignore publications
459 that have no other publications competing for the same binary package.
460 """
461 # Avoid circular imports.
462 from lp.soyuz.model.publishing import BinaryPackagePublishingHistory
463
464 bpph_location_clauses = [
465 BinaryPackagePublishingHistory.status ==
466 PackagePublishingStatus.PUBLISHED,
467 BinaryPackagePublishingHistory.distroarchseries ==
468 distroarchseries,
469 BinaryPackagePublishingHistory.archive == self.archive,
470 BinaryPackagePublishingHistory.pocket == pocket,
471 ]
472 candidate_binary_names = Select(
473 BinaryPackageName.id,
474 And(
475 BinaryPackageRelease.binarypackagenameID ==
476 BinaryPackageName.id,
477 BinaryPackagePublishingHistory.binarypackagereleaseID ==
478 BinaryPackageRelease.id,
479 bpph_location_clauses,
480 ),
481 group_by=BinaryPackageName.id,
482 having=Count(BinaryPackagePublishingHistory.id) > 1)
483 main_clauses = [
484 BinaryPackageRelease.id ==
485 BinaryPackagePublishingHistory.binarypackagereleaseID,
486 BinaryPackageRelease.binarypackagenameID.is_in(
487 candidate_binary_names),
488 BinaryPackageRelease.binpackageformat !=
489 BinaryPackageFormat.DDEB,
490 ]
491 main_clauses.extend(bpph_location_clauses)
492
493 store = IStore(BinaryPackagePublishingHistory)
494 return store.find(BinaryPackagePublishingHistory, *main_clauses)
495
445 def dominateBinaries(self, distroseries, pocket):496 def dominateBinaries(self, distroseries, pocket):
446 """Perform domination on binary package publications.497 """Perform domination on binary package publications.
447498
448 Dominates binaries, restricted to `distroseries`, `pocket`, and499 Dominates binaries, restricted to `distroseries`, `pocket`, and
449 `self.archive`.500 `self.archive`.
450 """501 """
451 # Avoid circular imports.
452 from lp.soyuz.model.publishing import BinaryPackagePublishingHistory
453
454 generalization = GeneralizedPublication(is_source=False)502 generalization = GeneralizedPublication(is_source=False)
455503
456 for distroarchseries in distroseries.architectures:504 for distroarchseries in distroseries.architectures:
457 self.logger.info(505 self.logger.info(
458 "Performing domination across %s/%s (%s)",506 "Performing domination across %s/%s (%s)",
459 distroseries.name, pocket.title,507 distroarchseries.distroseries.name, pocket.title,
460 distroarchseries.architecturetag)508 distroarchseries.architecturetag)
461509
462 bpph_location_clauses = [510 self.logger.info("Finding binaries...")
463 BinaryPackagePublishingHistory.status ==511 bins = self.findBinariesForDomination(distroarchseries, pocket)
464 PackagePublishingStatus.PUBLISHED,512 sorted_packages = self._sortPackages(bins, generalization)
465 BinaryPackagePublishingHistory.distroarchseries ==513 self.logger.info("Dominating binaries...")
466 distroarchseries,514 self._dominatePublications(sorted_packages, generalization)
467 BinaryPackagePublishingHistory.archive == self.archive,515
468 BinaryPackagePublishingHistory.pocket == pocket,516 # We need to make a second pass to cover the cases where:
469 ]517 # * arch-specific binaries were not all dominated before arch-all
470 candidate_binary_names = Select(518 # ones that depend on them
471 BinaryPackageName.id,519 # * An arch-all turned into an arch-specific, or vice-versa
472 And(520 # * A package is completely schizophrenic and changes all of
473 BinaryPackageRelease.binarypackagenameID ==521 # its binaries between arch-all and arch-any (apparently
474 BinaryPackageName.id,522 # occurs sometimes!)
475 BinaryPackagePublishingHistory.binarypackagereleaseID ==523 for distroarchseries in distroseries.architectures:
476 BinaryPackageRelease.id,524 self.logger.info("Finding binaries...(2nd pass)")
477 bpph_location_clauses,525 bins = self.findBinariesForDomination(distroarchseries, pocket)
478 ),526 sorted_packages = self._sortPackages(bins, generalization)
479 group_by=BinaryPackageName.id,527 self.logger.info("Dominating binaries...(2nd pass)")
480 having=Count(BinaryPackagePublishingHistory.id) > 1)528 self._dominatePublications(sorted_packages, generalization)
481 main_clauses = [
482 BinaryPackagePublishingHistory,
483 BinaryPackageRelease.id ==
484 BinaryPackagePublishingHistory.binarypackagereleaseID,
485 BinaryPackageRelease.binarypackagenameID.is_in(
486 candidate_binary_names),
487 BinaryPackageRelease.binpackageformat !=
488 BinaryPackageFormat.DDEB,
489 ]
490 main_clauses.extend(bpph_location_clauses)
491
492 def do_domination(pass2_msg=""):
493 msg = "binaries..." + pass2_msg
494 self.logger.info("Finding %s" % msg)
495 bins = IStore(
496 BinaryPackagePublishingHistory).find(*main_clauses)
497 self.logger.info("Dominating %s" % msg)
498 sorted_packages = self._sortPackages(bins, generalization)
499 self._dominatePublications(sorted_packages, generalization)
500
501 do_domination()
502
503 # We need to make a second pass to cover the cases where:
504 # * arch-specific binaries were not all dominated before arch-all
505 # ones that depend on them
506 # * An arch-all turned into an arch-specific, or vice-versa
507 # * A package is completely schizophrenic and changes all of
508 # its binaries between arch-all and arch-any (apparently
509 # occurs sometimes!)
510 do_domination("(2nd pass)")
511529
512 def _composeActiveSourcePubsCondition(self, distroseries, pocket):530 def _composeActiveSourcePubsCondition(self, distroseries, pocket):
513 """Compose ORM condition for restricting relevant source pubs."""531 """Compose ORM condition for restricting relevant source pubs."""
@@ -522,21 +540,11 @@
522 SourcePackagePublishingHistory.pocket == pocket,540 SourcePackagePublishingHistory.pocket == pocket,
523 )541 )
524542
525 def dominateSources(self, distroseries, pocket):543 def findSourcesForDomination(self, distroseries, pocket):
526 """Perform domination on source package publications.544 """Find binary publications that need dominating."""
527
528 Dominates sources, restricted to `distroseries`, `pocket`, and
529 `self.archive`.
530 """
531 # Avoid circular imports.545 # Avoid circular imports.
532 from lp.soyuz.model.publishing import SourcePackagePublishingHistory546 from lp.soyuz.model.publishing import SourcePackagePublishingHistory
533547
534 generalization = GeneralizedPublication(is_source=True)
535
536 self.logger.debug(
537 "Performing domination across %s/%s (Source)",
538 distroseries.name, pocket.title)
539
540 spph_location_clauses = self._composeActiveSourcePubsCondition(548 spph_location_clauses = self._composeActiveSourcePubsCondition(
541 distroseries, pocket)549 distroseries, pocket)
542 having_multiple_active_publications = (550 having_multiple_active_publications = (
@@ -546,12 +554,33 @@
546 And(join_spph_spr(), join_spr_spn(), spph_location_clauses),554 And(join_spph_spr(), join_spr_spn(), spph_location_clauses),
547 group_by=SourcePackageName.id,555 group_by=SourcePackageName.id,
548 having=having_multiple_active_publications)556 having=having_multiple_active_publications)
549 sources = IStore(SourcePackagePublishingHistory).find(557
550 SourcePackagePublishingHistory,558 # We'll also access the SourcePackageReleases associated with
559 # the publications we find. Since they're in the join anyway,
560 # load them alongside the publications.
561 # Actually we'll also want the SourcePackageNames, but adding
562 # those to the (outer) query would complicate it, and
563 # potentially slow it down.
564 query = IStore(SourcePackagePublishingHistory).find(
565 (SourcePackagePublishingHistory, SourcePackageRelease),
551 join_spph_spr(),566 join_spph_spr(),
552 SourcePackageRelease.sourcepackagenameID.is_in(567 SourcePackageRelease.sourcepackagenameID.is_in(
553 candidate_source_names),568 candidate_source_names),
554 spph_location_clauses)569 spph_location_clauses)
570 return DecoratedResultSet(query, itemgetter(0))
571
572 def dominateSources(self, distroseries, pocket):
573 """Perform domination on source package publications.
574
575 Dominates sources, restricted to `distroseries`, `pocket`, and
576 `self.archive`.
577 """
578 self.logger.debug(
579 "Performing domination across %s/%s (Source)",
580 distroseries.name, pocket.title)
581
582 generalization = GeneralizedPublication(is_source=True)
583 sources = self.findSourcesForDomination(distroseries, pocket)
555584
556 self.logger.debug("Dominating sources...")585 self.logger.debug("Dominating sources...")
557 self._dominatePublications(586 self._dominatePublications(
558587
=== modified file 'lib/lp/archivepublisher/tests/test_dominator.py'
--- lib/lp/archivepublisher/tests/test_dominator.py 2011-10-27 11:36:13 +0000
+++ lib/lp/archivepublisher/tests/test_dominator.py 2011-11-03 02:37:26 +0000
@@ -361,7 +361,10 @@
361def make_spphs_for_versions(factory, versions):361def make_spphs_for_versions(factory, versions):
362 """Create publication records for each of `versions`.362 """Create publication records for each of `versions`.
363363
364 They records are created in the same order in which they are specified.364 All these publications will be in the same source package, archive,
365 distroseries, and pocket. They will all be in Published status.
366
367 The records are created in the same order in which they are specified.
365 Make the order irregular to prove that version ordering is not a368 Make the order irregular to prove that version ordering is not a
366 coincidence of object creation order etc.369 coincidence of object creation order etc.
367370
@@ -371,6 +374,7 @@
371 spn = factory.makeSourcePackageName()374 spn = factory.makeSourcePackageName()
372 distroseries = factory.makeDistroSeries()375 distroseries = factory.makeDistroSeries()
373 pocket = factory.getAnyPocket()376 pocket = factory.getAnyPocket()
377 archive = distroseries.main_archive
374 sprs = [378 sprs = [
375 factory.makeSourcePackageRelease(379 factory.makeSourcePackageRelease(
376 sourcepackagename=spn, version=version)380 sourcepackagename=spn, version=version)
@@ -378,11 +382,34 @@
378 return [382 return [
379 factory.makeSourcePackagePublishingHistory(383 factory.makeSourcePackagePublishingHistory(
380 distroseries=distroseries, pocket=pocket,384 distroseries=distroseries, pocket=pocket,
381 sourcepackagerelease=spr,385 sourcepackagerelease=spr, archive=archive,
382 status=PackagePublishingStatus.PUBLISHED)386 status=PackagePublishingStatus.PUBLISHED)
383 for spr in sprs]387 for spr in sprs]
384388
385389
390def make_bpphs_for_versions(factory, versions):
391 """Create publication records for each of `versions`.
392
393 All these publications will be in the same binary package, source
394 package, archive, distroarchseries, and pocket. They will all be in
395 Published status.
396 """
397 bpn = factory.makeBinaryPackageName()
398 spn = factory.makeSourcePackageName()
399 das = factory.makeDistroArchSeries()
400 archive = das.distroseries.main_archive
401 pocket = factory.getAnyPocket()
402 bprs = [
403 factory.makeBinaryPackageRelease(binarypackagename=bpn)
404 for version in versions]
405 return [
406 factory.makeBinaryPackagePublishingHistory(
407 binarypackagerelease=bpr, binarypackagename=bpn,
408 distroarchseries=das, pocket=pocket, archive=archive,
409 sourcepackagename=spn, status=PackagePublishingStatus.PUBLISHED)
410 for bpr in bprs]
411
412
386def list_source_versions(spphs):413def list_source_versions(spphs):
387 """Extract the versions from `spphs` as a list, in the same order."""414 """Extract the versions from `spphs` as a list, in the same order."""
388 return [spph.sourcepackagerelease.version for spph in spphs]415 return [spph.sourcepackagerelease.version for spph in spphs]
@@ -921,3 +948,144 @@
921 [],948 [],
922 dominator.findPublishedSPPHs(949 dominator.findPublishedSPPHs(
923 spph.distroseries, spph.pocket, other_package.name))950 spph.distroseries, spph.pocket, other_package.name))
951
952 def test_findBinariesForDomination_finds_published_publications(self):
953 bpphs = make_bpphs_for_versions(self.factory, ['1.0', '1.1'])
954 dominator = self.makeDominator(bpphs)
955 self.assertContentEqual(
956 bpphs, dominator.findBinariesForDomination(
957 bpphs[0].distroarchseries, bpphs[0].pocket))
958
959 def test_findBinariesForDomination_skips_single_pub_packages(self):
960 # The domination algorithm that uses findBinariesForDomination
961 # always keeps the latest version live. Thus, a single
962 # publication isn't worth dominating. findBinariesForDomination
963 # won't return it.
964 bpphs = make_bpphs_for_versions(self.factory, ['1.0'])
965 dominator = self.makeDominator(bpphs)
966 self.assertContentEqual(
967 [], dominator.findBinariesForDomination(
968 bpphs[0].distroarchseries, bpphs[0].pocket))
969
970 def test_findBinariesForDomination_ignores_other_distroseries(self):
971 bpphs = make_bpphs_for_versions(self.factory, ['1.0', '1.1'])
972 dominator = self.makeDominator(bpphs)
973 das = bpphs[0].distroarchseries
974 other_series = self.factory.makeDistroSeries(
975 distribution=das.distroseries.distribution)
976 other_das = self.factory.makeDistroArchSeries(
977 distroseries=other_series, architecturetag=das.architecturetag,
978 processorfamily=das.processorfamily)
979 self.assertContentEqual(
980 [], dominator.findBinariesForDomination(
981 other_das, bpphs[0].pocket))
982
983 def test_findBinariesForDomination_ignores_other_architectures(self):
984 bpphs = make_bpphs_for_versions(self.factory, ['1.0', '1.1'])
985 dominator = self.makeDominator(bpphs)
986 other_das = self.factory.makeDistroArchSeries(
987 distroseries=bpphs[0].distroseries)
988 self.assertContentEqual(
989 [], dominator.findBinariesForDomination(
990 other_das, bpphs[0].pocket))
991
992 def test_findBinariesForDomination_ignores_other_archive(self):
993 bpphs = make_bpphs_for_versions(self.factory, ['1.0', '1.1'])
994 dominator = self.makeDominator(bpphs)
995 dominator.archive = self.factory.makeArchive()
996 self.assertContentEqual(
997 [], dominator.findBinariesForDomination(
998 bpphs[0].distroarchseries, bpphs[0].pocket))
999
1000 def test_findBinariesForDomination_ignores_other_pocket(self):
1001 bpphs = make_bpphs_for_versions(self.factory, ['1.0', '1.1'])
1002 dominator = self.makeDominator(bpphs)
1003 for bpph in bpphs:
1004 removeSecurityProxy(bpph).pocket = PackagePublishingPocket.UPDATES
1005 self.assertContentEqual(
1006 [], dominator.findBinariesForDomination(
1007 bpphs[0].distroarchseries, PackagePublishingPocket.SECURITY))
1008
1009 def test_findBinariesForDomination_ignores_other_status(self):
1010 # If we have one BPPH for each possible status, plus one
1011 # Published one to stop findBinariesForDomination from skipping
1012 # the package, findBinariesForDomination returns only the
1013 # Published ones.
1014 versions = [
1015 '1.%d' % self.factory.getUniqueInteger()
1016 for status in PackagePublishingStatus.items] + ['0.9']
1017 bpphs = make_bpphs_for_versions(self.factory, versions)
1018 dominator = self.makeDominator(bpphs)
1019
1020 for bpph, status in zip(bpphs, PackagePublishingStatus.items):
1021 bpph.status = status
1022
1023 # These are the Published publications. The other ones will all
1024 # be ignored.
1025 published_bpphs = [
1026 bpph
1027 for bpph in bpphs
1028 if bpph.status == PackagePublishingStatus.PUBLISHED]
1029
1030 self.assertContentEqual(
1031 published_bpphs,
1032 dominator.findBinariesForDomination(
1033 bpphs[0].distroarchseries, bpphs[0].pocket))
1034
1035 def test_findSourcesForDomination_finds_published_publications(self):
1036 spphs = make_spphs_for_versions(self.factory, ['2.0', '2.1'])
1037 dominator = self.makeDominator(spphs)
1038 self.assertContentEqual(
1039 spphs, dominator.findSourcesForDomination(
1040 spphs[0].distroseries, spphs[0].pocket))
1041
1042 def test_findSourcesForDomination_skips_single_pub_packages(self):
1043 # The domination algorithm that uses findSourcesForDomination
1044 # always keeps the latest version live. Thus, a single
1045 # publication isn't worth dominating. findSourcesForDomination
1046 # won't return it.
1047 spphs = make_spphs_for_versions(self.factory, ['2.0'])
1048 dominator = self.makeDominator(spphs)
1049 self.assertContentEqual(
1050 [], dominator.findSourcesForDomination(
1051 spphs[0].distroseries, spphs[0].pocket))
1052
1053 def test_findSourcesForDomination_ignores_other_distroseries(self):
1054 spphs = make_spphs_for_versions(self.factory, ['2.0', '2.1'])
1055 dominator = self.makeDominator(spphs)
1056 other_series = self.factory.makeDistroSeries(
1057 distribution=spphs[0].distroseries.distribution)
1058 self.assertContentEqual(
1059 [], dominator.findSourcesForDomination(
1060 other_series, spphs[0].pocket))
1061
1062 def test_findSourcesForDomination_ignores_other_pocket(self):
1063 spphs = make_spphs_for_versions(self.factory, ['2.0', '2.1'])
1064 dominator = self.makeDominator(spphs)
1065 for spph in spphs:
1066 removeSecurityProxy(spph).pocket = PackagePublishingPocket.UPDATES
1067 self.assertContentEqual(
1068 [], dominator.findSourcesForDomination(
1069 spphs[0].distroseries, PackagePublishingPocket.SECURITY))
1070
1071 def test_findSourcesForDomination_ignores_other_status(self):
1072 versions = [
1073 '1.%d' % self.factory.getUniqueInteger()
1074 for status in PackagePublishingStatus.items] + ['0.9']
1075 spphs = make_spphs_for_versions(self.factory, versions)
1076 dominator = self.makeDominator(spphs)
1077
1078 for spph, status in zip(spphs, PackagePublishingStatus.items):
1079 spph.status = status
1080
1081 # These are the Published publications. The other ones will all
1082 # be ignored.
1083 published_spphs = [
1084 spph
1085 for spph in spphs
1086 if spph.status == PackagePublishingStatus.PUBLISHED]
1087
1088 self.assertContentEqual(
1089 published_spphs,
1090 dominator.findSourcesForDomination(
1091 spphs[0].distroseries, spphs[0].pocket))
9241092
=== modified file 'lib/lp/bugs/javascript/buglisting.js'
--- lib/lp/bugs/javascript/buglisting.js 2011-11-01 17:55:21 +0000
+++ lib/lp/bugs/javascript/buglisting.js 2011-11-03 02:37:26 +0000
@@ -87,9 +87,6 @@
87 var navigator = new namespace.ListingNavigator(87 var navigator = new namespace.ListingNavigator(
88 window.location, LP.cache, LP.mustache_listings, target,88 window.location, LP.cache, LP.mustache_listings, target,
89 navigation_indices);89 navigation_indices);
90 if (target === null){
91 return;
92 }
93 namespace.linkify_navigation();90 namespace.linkify_navigation();
94 navigator.backwards_navigation = Y.all('.first,.previous');91 navigator.backwards_navigation = Y.all('.first,.previous');
95 navigator.forwards_navigation = Y.all('.last,.next');92 navigator.forwards_navigation = Y.all('.last,.next');
@@ -111,9 +108,6 @@
111 * The template is always LP.mustache_listings.108 * The template is always LP.mustache_listings.
112 */109 */
113namespace.ListingNavigator.prototype.render = function(){110namespace.ListingNavigator.prototype.render = function(){
114 if (! Y.Lang.isValue(this.target)){
115 return;
116 }
117 var model = this.current_batch.mustache_model;111 var model = this.current_batch.mustache_model;
118 var batch_info = Mustache.to_html(this.batch_info_template, {112 var batch_info = Mustache.to_html(this.batch_info_template, {
119 start: this.current_batch.start + 1,113 start: this.current_batch.start + 1,
120114
=== modified file 'lib/lp/bugs/javascript/tests/test_buglisting.js'
--- lib/lp/bugs/javascript/tests/test_buglisting.js 2011-11-01 17:55:21 +0000
+++ lib/lp/bugs/javascript/tests/test_buglisting.js 2011-11-03 02:37:26 +0000
@@ -27,11 +27,6 @@
27 Y.one('#fixture').setContent('');27 Y.one('#fixture').setContent('');
28 },28 },
2929
30 test_render_no_client_listing: function() {
31 // Rendering should not error with no #client-listing.
32 var navigator = new module.ListingNavigator();
33 navigator.render();
34 },
35 get_render_navigator: function() {30 get_render_navigator: function() {
36 var target = Y.Node.create('<div id="client-listing"></div>');31 var target = Y.Node.create('<div id="client-listing"></div>');
37 var lp_cache = {32 var lp_cache = {
@@ -230,8 +225,9 @@
230 };225 };
231 var template = "<ol>" +226 var template = "<ol>" +
232 "{{#item}}<li>{{name}}</li>{{/item}}</ol>";227 "{{#item}}<li>{{name}}</li>{{/item}}</ol>";
233 var navigator = new module.ListingNavigator(window.location, lp_cache,228 var target = Y.Node.create('<div id="client-listing"></div>');
234 template);229 var navigator = new module.ListingNavigator(
230 window.location, lp_cache, template, target);
235 var key = module.ListingNavigator.get_batch_key({231 var key = module.ListingNavigator.get_batch_key({
236 order_by: "intensity",232 order_by: "intensity",
237 memo: 'memo1',233 memo: 'memo1',
@@ -243,10 +239,13 @@
243 memo: 'memo1',239 memo: 'memo1',
244 forwards: true,240 forwards: true,
245 start: 5,241 start: 5,
246 mustache_model: {item: [242 mustache_model: {
247 {name: 'first'},243 item: [
248 {name: 'second'}244 {name: 'first'},
249 ]}};245 {name: 'second'}
246 ],
247 bugtasks: ['a', 'b', 'c']
248 }};
250 navigator.update_from_model(batch);249 navigator.update_from_model(batch);
251 Y.lp.testing.assert.assert_equal_structure(250 Y.lp.testing.assert.assert_equal_structure(
252 batch, navigator.batches[key]);251 batch, navigator.batches[key]);
@@ -422,13 +421,6 @@
422 getPreviousLink: function(){421 getPreviousLink: function(){
423 return Y.one('.previous').get('href');422 return Y.one('.previous').get('href');
424 },423 },
425 test_from_page_no_client: function(){
426 Y.one('#fixture').setContent(
427 '<a class="previous" href="http://example.org/">PreVious</span>');
428 Y.Assert.areSame('http://example.org/', this.getPreviousLink());
429 module.ListingNavigator.from_page();
430 Y.Assert.areSame('http://example.org/', this.getPreviousLink());
431 },
432 test_from_page_with_client: function(){424 test_from_page_with_client: function(){
433 Y.one('#fixture').setContent(425 Y.one('#fixture').setContent(
434 '<a class="previous" href="http://example.org/">PreVious</span>' +426 '<a class="previous" href="http://example.org/">PreVious</span>' +
435427
=== modified file 'lib/lp/bugs/templates/buglisting-default.pt'
--- lib/lp/bugs/templates/buglisting-default.pt 2011-10-28 19:17:29 +0000
+++ lib/lp/bugs/templates/buglisting-default.pt 2011-11-03 02:37:26 +0000
@@ -18,11 +18,17 @@
18 </tal:comment>18 </tal:comment>
19 <script type="text/javascript"19 <script type="text/javascript"
20 tal:condition="not: view/shouldShowAdvancedForm">20 tal:condition="not: view/shouldShowAdvancedForm">
21 LPS.use('lp.registry.structural_subscription', 'lp.bugs.buglisting',21 LPS.use('lp.registry.structural_subscription', function(Y) {
22 'lp.ordering', function(Y) {
23 Y.on('domready', function() {22 Y.on('domready', function() {
24 Y.lp.registry.structural_subscription.setup(23 Y.lp.registry.structural_subscription.setup(
25 {content_box: "#structural-subscription-content-box"});24 {content_box: "#structural-subscription-content-box"});
25 });
26 });
27 </script>
28 <script type="text/javascript"
29 tal:condition="request/features/bugs.dynamic_bug_listings.enabled">
30 LPS.use('lp.bugs.buglisting', 'lp.ordering', function(Y) {
31 Y.on('domready', function() {
26 var navigator = Y.lp.bugs.buglisting.ListingNavigator.from_page();32 var navigator = Y.lp.bugs.buglisting.ListingNavigator.from_page();
27 var orderby = new Y.lp.ordering.OrderByBar({33 var orderby = new Y.lp.ordering.OrderByBar({
28 srcNode: Y.one('#bugs-orderby'),34 srcNode: Y.one('#bugs-orderby'),
2935
=== added file 'lib/lp/registry/scripts/teamparticipation.py'
--- lib/lp/registry/scripts/teamparticipation.py 1970-01-01 00:00:00 +0000
+++ lib/lp/registry/scripts/teamparticipation.py 2011-11-03 02:37:26 +0000
@@ -0,0 +1,160 @@
1# Copyright 2011 Canonical Ltd. This software is licensed under the
2# GNU Affero General Public License version 3 (see the file LICENSE).
3
4"""Script code relating to team participations."""
5
6__metaclass__ = type
7__all__ = [
8 "check_teamparticipation",
9 ]
10
11from collections import (
12 defaultdict,
13 namedtuple,
14 )
15from itertools import (
16 chain,
17 imap,
18 )
19
20import transaction
21from zope.component import getUtility
22
23from canonical.database.sqlbase import quote
24from canonical.launchpad.webapp.interfaces import (
25 IStoreSelector,
26 MAIN_STORE,
27 SLAVE_FLAVOR,
28 )
29from lp.registry.interfaces.teammembership import ACTIVE_STATES
30from lp.services.scripts.base import LaunchpadScriptFailure
31
32
33def get_store():
34 """Return a slave store.
35
36 Errors in `TeamPartipation` can be detected using a replicated copy.
37 """
38 return getUtility(IStoreSelector).get(MAIN_STORE, SLAVE_FLAVOR)
39
40
41def check_teamparticipation_self(log):
42 """Check self-participation.
43
44 All people and teams should participate in themselves.
45 """
46 query = """
47 SELECT id, name
48 FROM Person
49 WHERE id NOT IN (
50 SELECT person FROM TeamParticipation
51 WHERE person = team)
52 AND merged IS NULL
53 """
54 non_self_participants = list(get_store().execute(query))
55 if len(non_self_participants) > 0:
56 log.warn(
57 "Some people/teams are not members of themselves: %s",
58 non_self_participants)
59
60
61def check_teamparticipation_circular(log):
62 """Check circular references.
63
64 There can be no mutual participation between teams.
65 """
66 query = """
67 SELECT tp.team, tp2.team
68 FROM TeamParticipation AS tp,
69 TeamParticipation AS tp2
70 WHERE tp.team = tp2.person
71 AND tp.person = tp2.team
72 AND tp.id != tp2.id;
73 """
74 circular_references = list(get_store().execute(query))
75 if len(circular_references) > 0:
76 raise LaunchpadScriptFailure(
77 "Circular references found: %s" % circular_references)
78
79
80ConsistencyError = namedtuple(
81 "ConsistencyError", ("type", "team", "people"))
82
83
84def check_teamparticipation_consistency(log):
85 """Check for missing or spurious participations.
86
87 For example, participations for people who are not members, or missing
88 participations for people who are members.
89 """
90 store = get_store()
91
92 # Slurp everything in.
93 people = dict(
94 store.execute(
95 "SELECT id, name FROM Person"
96 " WHERE teamowner IS NULL"
97 " AND merged IS NULL"))
98 teams = dict(
99 store.execute(
100 "SELECT id, name FROM Person"
101 " WHERE teamowner IS NOT NULL"
102 " AND merged IS NULL"))
103 team_memberships = defaultdict(set)
104 results = store.execute(
105 "SELECT team, person FROM TeamMembership"
106 " WHERE status in %s" % quote(ACTIVE_STATES))
107 for (team, person) in results:
108 team_memberships[team].add(person)
109 team_participations = defaultdict(set)
110 results = store.execute(
111 "SELECT team, person FROM TeamParticipation")
112 for (team, person) in results:
113 team_participations[team].add(person)
114
115 # Don't hold any locks.
116 transaction.commit()
117
118 # Check team memberships.
119 def get_participants(team):
120 """Recurse through membership records to get participants."""
121 member_people = team_memberships[team].intersection(people)
122 member_people.add(team) # Teams always participate in themselves.
123 member_teams = team_memberships[team].intersection(teams)
124 return member_people.union(
125 chain.from_iterable(imap(get_participants, member_teams)))
126
127 errors = []
128 for team in teams:
129 participants_observed = team_participations[team]
130 participants_expected = get_participants(team)
131 participants_spurious = participants_expected - participants_observed
132 participants_missing = participants_observed - participants_expected
133 if len(participants_spurious) > 0:
134 error = ConsistencyError("spurious", team, participants_spurious)
135 errors.append(error)
136 if len(participants_missing) > 0:
137 error = ConsistencyError("missing", team, participants_missing)
138 errors.append(error)
139
140 # TODO:
141 # - Check that the only participant of a *person* is the person.
142 # - Check that merged people and teams do not appear in TeamParticipation.
143
144 def get_repr(id):
145 return "%s (%d)" % (people[id] if id in people else teams[id], id)
146
147 for error in errors:
148 people_repr = ", ".join(imap(get_repr, error.people))
149 log.warn(
150 "%s: %s TeamParticipation entries for %s.",
151 get_repr(error.team), error.type, people_repr)
152
153 return errors
154
155
156def check_teamparticipation(log):
157 """Perform various checks on the `TeamParticipation` table."""
158 check_teamparticipation_self(log)
159 check_teamparticipation_circular(log)
160 check_teamparticipation_consistency(log)
0161
=== modified file 'lib/lp/registry/tests/test_teammembership.py'
--- lib/lp/registry/tests/test_teammembership.py 2011-09-01 06:18:57 +0000
+++ lib/lp/registry/tests/test_teammembership.py 2011-11-03 02:37:26 +0000
@@ -9,13 +9,11 @@
9 )9 )
10import re10import re
11import subprocess11import subprocess
12from unittest import TestLoader
13
14import pytz
15from testtools.content import text_content
12from testtools.matchers import Equals16from testtools.matchers import Equals
13from unittest import (
14 TestCase,
15 TestLoader,
16 )
17
18import pytz
19import transaction17import transaction
20from zope.component import getUtility18from zope.component import getUtility
21from zope.security.proxy import removeSecurityProxy19from zope.security.proxy import removeSecurityProxy
@@ -27,10 +25,6 @@
27 flush_database_updates,25 flush_database_updates,
28 sqlvalues,26 sqlvalues,
29 )27 )
30from canonical.launchpad.ftests import (
31 login,
32 login_person,
33 )
34from canonical.launchpad.interfaces.lpstorm import IStore28from canonical.launchpad.interfaces.lpstorm import IStore
35from canonical.launchpad.testing.systemdocs import (29from canonical.launchpad.testing.systemdocs import (
36 default_optionflags,30 default_optionflags,
@@ -53,16 +47,22 @@
53 ITeamMembershipSet,47 ITeamMembershipSet,
54 TeamMembershipStatus,48 TeamMembershipStatus,
55 )49 )
56from lp.registry.model.teammembership import (\50from lp.registry.model.teammembership import (
57 find_team_participations,51 find_team_participations,
58 TeamMembership,52 TeamMembership,
59 TeamParticipation,53 TeamParticipation,
60 )54 )
55from lp.registry.scripts.teamparticipation import check_teamparticipation
56from lp.services.log.logger import BufferLogger
61from lp.testing import (57from lp.testing import (
58 login,
62 login_celebrity,59 login_celebrity,
60 login_person,
63 person_logged_in,61 person_logged_in,
62 StormStatementRecorder,
63 TestCase,
64 TestCaseWithFactory,64 TestCaseWithFactory,
65 StormStatementRecorder)65 )
66from lp.testing.mail_helpers import pop_notifications66from lp.testing.mail_helpers import pop_notifications
67from lp.testing.matchers import HasQueryCount67from lp.testing.matchers import HasQueryCount
68from lp.testing.storm import reload_object68from lp.testing.storm import reload_object
@@ -1047,6 +1047,7 @@
10471047
10481048
1049class TestCheckTeamParticipationScript(TestCase):1049class TestCheckTeamParticipationScript(TestCase):
1050
1050 layer = DatabaseFunctionalLayer1051 layer = DatabaseFunctionalLayer
10511052
1052 def _runScript(self, expected_returncode=0):1053 def _runScript(self, expected_returncode=0):
@@ -1054,14 +1055,19 @@
1054 'cronscripts/check-teamparticipation.py', shell=True,1055 'cronscripts/check-teamparticipation.py', shell=True,
1055 stdin=subprocess.PIPE, stdout=subprocess.PIPE,1056 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
1056 stderr=subprocess.PIPE)1057 stderr=subprocess.PIPE)
1057 (out, err) = process.communicate()1058 out, err = process.communicate()
1058 self.assertEqual(process.returncode, expected_returncode, (out, err))1059 if out != "":
1060 self.addDetail("stdout", text_content(out))
1061 if err != "":
1062 self.addDetail("stderr", text_content(err))
1063 self.assertEqual(process.returncode, expected_returncode)
1059 return out, err1064 return out, err
10601065
1061 def test_no_output_if_no_invalid_entries(self):1066 def test_no_output_if_no_invalid_entries(self):
1062 """No output if there's no invalid teamparticipation entries."""1067 """No output if there's no invalid teamparticipation entries."""
1063 out, err = self._runScript()1068 out, err = self._runScript()
1064 self.assertEqual((out, err), ('', ''))1069 self.assertEqual(0, len(out))
1070 self.assertEqual(0, len(err))
10651071
1066 def test_report_invalid_teamparticipation_entries(self):1072 def test_report_invalid_teamparticipation_entries(self):
1067 """The script reports missing/spurious TeamParticipation entries.1073 """The script reports missing/spurious TeamParticipation entries.
@@ -1103,16 +1109,13 @@
1103 transaction.commit()1109 transaction.commit()
11041110
1105 out, err = self._runScript()1111 out, err = self._runScript()
1106 self.assertEqual(out, '', (out, err))1112 self.assertEqual(0, len(out))
1107 self.failUnless(1113 self.failUnless(
1108 re.search('missing TeamParticipation entries for zzzzz', err),1114 re.search('missing TeamParticipation entries for zzzzz', err))
1109 (out, err))1115 self.failUnless(
1110 self.failUnless(1116 re.search('spurious TeamParticipation entries for zzzzz', err))
1111 re.search('spurious TeamParticipation entries for zzzzz', err),1117 self.failUnless(
1112 (out, err))1118 re.search('not members of themselves:.*zzzzz.*', err))
1113 self.failUnless(
1114 re.search('not members of themselves:.*zzzzz.*', err),
1115 (out, err))
11161119
1117 def test_report_circular_team_references(self):1120 def test_report_circular_team_references(self):
1118 """The script reports circular references between teams.1121 """The script reports circular references between teams.
@@ -1145,9 +1148,34 @@
1145 import transaction1148 import transaction
1146 transaction.commit()1149 transaction.commit()
1147 out, err = self._runScript(expected_returncode=1)1150 out, err = self._runScript(expected_returncode=1)
1148 self.assertEqual(out, '', (out, err))1151 self.assertEqual(0, len(out))
1149 self.failUnless(1152 self.failUnless(re.search('Circular references found', err))
1150 re.search('Circular references found', err), (out, err))1153
1154
1155class TestCheckTeamParticipationScriptPerformance(TestCaseWithFactory):
1156
1157 layer = DatabaseFunctionalLayer
1158
1159 def test_queries(self):
1160 """The script does not overly tax the database.
1161
1162 The whole check_teamparticipation() run executes a constant low number
1163 of queries.
1164 """
1165 # Create a deeply nested team and member structure.
1166 team = self.factory.makeTeam()
1167 for num in xrange(10):
1168 another_team = self.factory.makeTeam()
1169 another_person = self.factory.makePerson()
1170 with person_logged_in(team.teamowner):
1171 team.addMember(another_team, team.teamowner)
1172 team.addMember(another_person, team.teamowner)
1173 team = another_team
1174 transaction.commit()
1175 with StormStatementRecorder() as recorder:
1176 logger = BufferLogger()
1177 check_teamparticipation(logger)
1178 self.assertThat(recorder, HasQueryCount(Equals(6)))
11511179
11521180
1153def test_suite():1181def test_suite():
11541182
=== modified file 'lib/lp/soyuz/model/publishing.py'
--- lib/lp/soyuz/model/publishing.py 2011-10-23 02:58:56 +0000
+++ lib/lp/soyuz/model/publishing.py 2011-11-03 02:37:26 +0000
@@ -33,7 +33,6 @@
33 Desc,33 Desc,
34 LeftJoin,34 LeftJoin,
35 Or,35 Or,
36 Select,
37 Sum,36 Sum,
38 )37 )
39from storm.store import Store38from storm.store import Store
@@ -1154,19 +1153,10 @@
1154 # Avoid circular wotsits.1153 # Avoid circular wotsits.
1155 from lp.soyuz.model.binarypackagebuild import BinaryPackageBuild1154 from lp.soyuz.model.binarypackagebuild import BinaryPackageBuild
1156 from lp.soyuz.model.distroarchseries import DistroArchSeries1155 from lp.soyuz.model.distroarchseries import DistroArchSeries
1157 from lp.soyuz.model.sourcepackagerelease import SourcePackageRelease1156
1158 source_select = Select(
1159 SourcePackageRelease.id,
1160 And(
1161 BinaryPackageBuild.source_package_release_id ==
1162 SourcePackageRelease.id,
1163 BinaryPackageRelease.build == BinaryPackageBuild.id,
1164 self.binarypackagereleaseID == BinaryPackageRelease.id,
1165 ))
1166 pubs = [1157 pubs = [
1167 BinaryPackageBuild.source_package_release_id ==1158 BinaryPackageBuild.source_package_release_id ==
1168 SourcePackageRelease.id,1159 self.binarypackagerelease.build.source_package_release_id,
1169 SourcePackageRelease.id.is_in(source_select),
1170 BinaryPackageRelease.build == BinaryPackageBuild.id,1160 BinaryPackageRelease.build == BinaryPackageBuild.id,
1171 BinaryPackagePublishingHistory.binarypackagereleaseID ==1161 BinaryPackagePublishingHistory.binarypackagereleaseID ==
1172 BinaryPackageRelease.id,1162 BinaryPackageRelease.id,

Subscribers

People subscribed via source and target branches

to status/vote changes: