Merge lp:~wgrant/launchpad/observer-db into lp:launchpad/db-devel

Proposed by William Grant
Status: Superseded
Proposed branch: lp:~wgrant/launchpad/observer-db
Merge into: lp:launchpad/db-devel
Diff against target: 1099 lines (+579/-214)
11 files modified
cronscripts/check-teamparticipation.py (+3/-71)
database/schema/comments.sql (+20/-0)
database/schema/patch-2208-93-1.sql (+46/-0)
lib/lp/archivepublisher/domination.py (+104/-75)
lib/lp/archivepublisher/tests/test_dominator.py (+170/-2)
lib/lp/bugs/javascript/buglisting.js (+0/-6)
lib/lp/bugs/javascript/tests/test_buglisting.js (+10/-18)
lib/lp/bugs/templates/buglisting-default.pt (+8/-2)
lib/lp/registry/scripts/teamparticipation.py (+160/-0)
lib/lp/registry/tests/test_teammembership.py (+56/-28)
lib/lp/soyuz/model/publishing.py (+2/-12)
To merge this branch: bzr merge lp:~wgrant/launchpad/observer-db
Reviewer Review Type Date Requested Status
Robert Collins db Pending
Review via email: mp+81102@code.launchpad.net

This proposal has been superseded by a proposal from 2011-11-03.

Commit message

Initial DB schema for access policies.

Description of the change

This is the initial DB patch for Disclosure's access policies. The migration process will be long and tortuous, but this is a start.

A private artifact (currently a bug or a branch) will be linked to one of its target's access policies, and attempts to access the artifact will be checked against the policy's permissions. This will shortly supplant the existing subscription-and-other-stuff-based checks.

Permissions can be either policy-global or artifact-specific. In the first case APP.artifact is left unset, letting the permission holder see all artifacts under the policy. For the second case both policy and artifact are set, restricting the access to a specific artifact under a specific policy.

The identification of access policies isn't well-defined yet, but this schema will do for now. The intent is that, at least for the initial pass, projects will have preconfigured and immutable "Private" and "Security" policies, with the existing bug privacy/security checkboxes altered to map onto these policies. So we either have to use an enum, or just treat these as well-known names until a later pass.

To post a comment you must log in.

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'cronscripts/check-teamparticipation.py'
2--- cronscripts/check-teamparticipation.py 2011-09-18 03:43:46 +0000
3+++ cronscripts/check-teamparticipation.py 2011-11-03 02:37:26 +0000
4@@ -20,77 +20,8 @@
5
6 import _pythonpath
7
8-import transaction
9-
10-from canonical.database.sqlbase import cursor
11-from lp.services.scripts.base import LaunchpadScript, LaunchpadScriptFailure
12-
13-
14-def check_teamparticipation(log):
15- # Check self-participation.
16- query = """
17- SELECT id, name
18- FROM Person WHERE id NOT IN (
19- SELECT person FROM Teamparticipation WHERE person = team
20- ) AND merged IS NULL
21- """
22- cur = cursor()
23- cur.execute(query)
24- non_self_participants = cur.fetchall()
25- if len(non_self_participants) > 0:
26- log.warn("Some people/teams are not members of themselves: %s"
27- % non_self_participants)
28-
29- # Check if there are any circular references between teams.
30- cur.execute("""
31- SELECT tp.team, tp2.team
32- FROM teamparticipation AS tp, teamparticipation AS tp2
33- WHERE tp.team = tp2.person
34- AND tp.person = tp2.team
35- AND tp.id != tp2.id;
36- """)
37- circular_references = cur.fetchall()
38- if len(circular_references) > 0:
39- raise LaunchpadScriptFailure(
40- "Circular references found: %s" % circular_references)
41-
42- # Check if there are any missing/spurious TeamParticipation entries.
43- cur.execute("SELECT id FROM Person WHERE teamowner IS NOT NULL")
44- team_ids = cur.fetchall()
45- transaction.abort()
46-
47- def get_participants(team):
48- """Recurse through the team's members to get all its participants."""
49- participants = set()
50- for member in team.activemembers:
51- participants.add(member)
52- if member.is_team:
53- participants.update(get_participants(member))
54- return participants
55-
56- from lp.registry.model.person import Person
57- batch = team_ids[:50]
58- team_ids = team_ids[50:]
59- while batch:
60- for [id] in batch:
61- team = Person.get(id)
62- expected = get_participants(team)
63- found = set(team.allmembers)
64- difference = expected.difference(found)
65- if len(difference) > 0:
66- people = ", ".join("%s (%s)" % (person.name, person.id)
67- for person in difference)
68- log.warn("%s (%s): missing TeamParticipation entries for %s."
69- % (team.name, team.id, people))
70- reverse_difference = found.difference(expected)
71- if len(reverse_difference) > 0:
72- people = ", ".join("%s (%s)" % (person.name, person.id)
73- for person in reverse_difference)
74- log.warn("%s (%s): spurious TeamParticipation entries for %s."
75- % (team.name, team.id, people))
76- transaction.abort()
77- batch = team_ids[:50]
78- team_ids = team_ids[50:]
79+from lp.registry.scripts.teamparticipation import check_teamparticipation
80+from lp.services.scripts.base import LaunchpadScript
81
82
83 class CheckTeamParticipationScript(LaunchpadScript):
84@@ -99,5 +30,6 @@
85 def main(self):
86 check_teamparticipation(self.logger)
87
88+
89 if __name__ == '__main__':
90 CheckTeamParticipationScript("check-teamparticipation").run()
91
92=== modified file 'database/schema/comments.sql'
93--- database/schema/comments.sql 2011-10-07 11:32:23 +0000
94+++ database/schema/comments.sql 2011-11-03 02:37:26 +0000
95@@ -6,6 +6,26 @@
96 GNU Affero General Public License version 3 (see the file LICENSE).
97 */
98
99+-- AccessPolicy
100+
101+COMMENT ON TABLE AccessPolicy IS 'A policy to manage access to a pillar\'s artifacts.';
102+COMMENT ON COLUMN AccessPolicy.product IS 'The product that this policy applies to.';
103+COMMENT ON COLUMN AccessPolicy.distribution IS 'The distribution that this policy applies to.';
104+COMMENT ON COLUMN AccessPolicy.display_name IS 'A human-readable name for this policy.';
105+
106+-- AccessPolicyArtifact
107+
108+COMMENT ON TABLE AccessPolicyArtifact IS 'An artifact that an access permission can apply to.';
109+COMMENT ON COLUMN AccessPolicyArtifact.bug IS 'The bug that this abstract artifact represents.';
110+COMMENT ON COLUMN AccessPolicyArtifact.branch IS 'The branch that this abstract artifact represents.';
111+
112+-- AccessPolicyPermission
113+
114+COMMENT ON TABLE AccessPolicyPermission IS 'A permission for a person to access a policy\'s artifacts.';
115+COMMENT ON COLUMN AccessPolicyPermission.policy IS 'The access policy on which access is granted.';
116+COMMENT ON COLUMN AccessPolicyPermission.person IS 'The person that holds the permission.';
117+COMMENT ON COLUMN AccessPolicyPermission.artifact IS 'The optional artifact to which the access is restricted.';
118+
119 -- Announcement
120
121 COMMENT ON TABLE Announcement IS 'A project announcement. This is a single item of news or information that the project is communicating. Announcements can be attached to a Project, a Product or a Distribution.';
122
123=== added file 'database/schema/patch-2208-93-1.sql'
124--- database/schema/patch-2208-93-1.sql 1970-01-01 00:00:00 +0000
125+++ database/schema/patch-2208-93-1.sql 2011-11-03 02:37:26 +0000
126@@ -0,0 +1,46 @@
127+-- Copyright 2011 Canonical Ltd. This software is licensed under the
128+-- GNU Affero General Public License version 3 (see the file LICENSE).
129+SET client_min_messages=ERROR;
130+
131+CREATE TABLE AccessPolicy (
132+ id serial PRIMARY KEY,
133+ product integer REFERENCES Product,
134+ distribution integer REFERENCES Distribution,
135+ display_name text NOT NULL,
136+ CONSTRAINT accesspolicy__product__display_name__key
137+ UNIQUE (product, display_name),
138+ CONSTRAINT accesspolicy__distribution__display_name__key
139+ UNIQUE (distribution, display_name),
140+ CONSTRAINT has_target CHECK (product IS NULL != distribution IS NULL)
141+);
142+
143+CREATE TABLE AccessPolicyArtifact (
144+ id serial PRIMARY KEY,
145+ bug integer REFERENCES Bug,
146+ branch integer REFERENCES Branch,
147+ CONSTRAINT accesspolicyartifact__bug__key UNIQUE (bug),
148+ CONSTRAINT accesspolicyartifact__branch__key UNIQUE (branch),
149+ CONSTRAINT has_artifact CHECK (bug IS NULL != branch IS NULL)
150+);
151+
152+CREATE TABLE AccessPolicyPermission (
153+ id serial PRIMARY KEY,
154+ policy integer NOT NULL REFERENCES AccessPolicy,
155+ person integer NOT NULL REFERENCES Person,
156+ artifact integer REFERENCES AccessPolicyArtifact,
157+ CONSTRAINT accesspolicypermission__policy__person__artifact__key
158+ UNIQUE (policy, person, artifact)
159+);
160+
161+CREATE UNIQUE INDEX accesspolicypermission__policy__person__key
162+ ON AccessPolicyPermission(policy, person) WHERE artifact IS NULL;
163+
164+ALTER TABLE bug
165+ ADD COLUMN access_policy integer REFERENCES AccessPolicy;
166+CREATE INDEX bug__access_policy__idx ON bug(access_policy);
167+
168+ALTER TABLE branch
169+ ADD COLUMN access_policy integer REFERENCES AccessPolicy;
170+CREATE INDEX branch__access_policy__idx ON branch(access_policy);
171+
172+INSERT INTO LaunchpadDatabaseRevision VALUES (2208, 93, 1);
173
174=== modified file 'lib/lp/archivepublisher/domination.py'
175--- lib/lp/archivepublisher/domination.py 2011-10-18 11:56:09 +0000
176+++ lib/lp/archivepublisher/domination.py 2011-11-03 02:37:26 +0000
177@@ -53,6 +53,7 @@
178 __all__ = ['Dominator']
179
180 from datetime import timedelta
181+from operator import itemgetter
182
183 import apt_pkg
184 from storm.expr import (
185@@ -67,6 +68,9 @@
186 flush_database_updates,
187 sqlvalues,
188 )
189+from canonical.launchpad.components.decoratedresultset import (
190+ DecoratedResultSet,
191+ )
192 from canonical.launchpad.interfaces.lpstorm import IStore
193 from lp.registry.model.sourcepackagename import SourcePackageName
194 from lp.services.database.bulk import load_related
195@@ -258,7 +262,8 @@
196 # Go through publications from latest version to oldest. This
197 # makes it easy to figure out which release superseded which:
198 # the dominant is always the oldest live release that is newer
199- # than the one being superseded.
200+ # than the one being superseded. In this loop, that means the
201+ # dominant is always the last live publication we saw.
202 publications = sorted(
203 publications, cmp=generalization.compare, reverse=True)
204
205@@ -272,25 +277,29 @@
206 for pub in publications:
207 version = generalization.getPackageVersion(pub)
208 # There should never be two published releases with the same
209- # version. So this comparison is really a string
210- # comparison, not a version comparison: if the versions are
211- # equal by either measure, they're from the same release.
212- if dominant_version is not None and version == dominant_version:
213+ # version. So it doesn't matter whether this comparison is
214+ # really a string comparison or a version comparison: if the
215+ # versions are equal by either measure, they're from the same
216+ # release.
217+ if version == dominant_version:
218 # This publication is for a live version, but has been
219 # superseded by a newer publication of the same version.
220 # Supersede it.
221 pub.supersede(current_dominant, logger=self.logger)
222 self.logger.debug2(
223 "Superseding older publication for version %s.", version)
224- elif (version in live_versions or
225- (not generalization.is_source and
226- not self._checkArchIndep(pub))):
227+ elif version in live_versions:
228 # This publication stays active; if any publications
229 # that follow right after this are to be superseded,
230 # this is the release that they are superseded by.
231 current_dominant = pub
232 dominant_version = version
233 self.logger.debug2("Keeping version %s.", version)
234+ elif not (generalization.is_source or self._checkArchIndep(pub)):
235+ # As a special case, we keep this version live as well.
236+ current_dominant = pub
237+ dominant_version = version
238+ self.logger.debug2("Keeping version %s.", version)
239 elif current_dominant is None:
240 # This publication is no longer live, but there is no
241 # newer version to supersede it either. Therefore it
242@@ -442,72 +451,81 @@
243 # always equals to "scheduleddeletiondate - quarantine".
244 pub_record.datemadepending = UTC_NOW
245
246+ def findBinariesForDomination(self, distroarchseries, pocket):
247+ """Find binary publications that need dominating.
248+
249+ This is only for traditional domination, where the latest published
250+ publication is always kept published. It will ignore publications
251+ that have no other publications competing for the same binary package.
252+ """
253+ # Avoid circular imports.
254+ from lp.soyuz.model.publishing import BinaryPackagePublishingHistory
255+
256+ bpph_location_clauses = [
257+ BinaryPackagePublishingHistory.status ==
258+ PackagePublishingStatus.PUBLISHED,
259+ BinaryPackagePublishingHistory.distroarchseries ==
260+ distroarchseries,
261+ BinaryPackagePublishingHistory.archive == self.archive,
262+ BinaryPackagePublishingHistory.pocket == pocket,
263+ ]
264+ candidate_binary_names = Select(
265+ BinaryPackageName.id,
266+ And(
267+ BinaryPackageRelease.binarypackagenameID ==
268+ BinaryPackageName.id,
269+ BinaryPackagePublishingHistory.binarypackagereleaseID ==
270+ BinaryPackageRelease.id,
271+ bpph_location_clauses,
272+ ),
273+ group_by=BinaryPackageName.id,
274+ having=Count(BinaryPackagePublishingHistory.id) > 1)
275+ main_clauses = [
276+ BinaryPackageRelease.id ==
277+ BinaryPackagePublishingHistory.binarypackagereleaseID,
278+ BinaryPackageRelease.binarypackagenameID.is_in(
279+ candidate_binary_names),
280+ BinaryPackageRelease.binpackageformat !=
281+ BinaryPackageFormat.DDEB,
282+ ]
283+ main_clauses.extend(bpph_location_clauses)
284+
285+ store = IStore(BinaryPackagePublishingHistory)
286+ return store.find(BinaryPackagePublishingHistory, *main_clauses)
287+
288 def dominateBinaries(self, distroseries, pocket):
289 """Perform domination on binary package publications.
290
291 Dominates binaries, restricted to `distroseries`, `pocket`, and
292 `self.archive`.
293 """
294- # Avoid circular imports.
295- from lp.soyuz.model.publishing import BinaryPackagePublishingHistory
296-
297 generalization = GeneralizedPublication(is_source=False)
298
299 for distroarchseries in distroseries.architectures:
300 self.logger.info(
301 "Performing domination across %s/%s (%s)",
302- distroseries.name, pocket.title,
303+ distroarchseries.distroseries.name, pocket.title,
304 distroarchseries.architecturetag)
305
306- bpph_location_clauses = [
307- BinaryPackagePublishingHistory.status ==
308- PackagePublishingStatus.PUBLISHED,
309- BinaryPackagePublishingHistory.distroarchseries ==
310- distroarchseries,
311- BinaryPackagePublishingHistory.archive == self.archive,
312- BinaryPackagePublishingHistory.pocket == pocket,
313- ]
314- candidate_binary_names = Select(
315- BinaryPackageName.id,
316- And(
317- BinaryPackageRelease.binarypackagenameID ==
318- BinaryPackageName.id,
319- BinaryPackagePublishingHistory.binarypackagereleaseID ==
320- BinaryPackageRelease.id,
321- bpph_location_clauses,
322- ),
323- group_by=BinaryPackageName.id,
324- having=Count(BinaryPackagePublishingHistory.id) > 1)
325- main_clauses = [
326- BinaryPackagePublishingHistory,
327- BinaryPackageRelease.id ==
328- BinaryPackagePublishingHistory.binarypackagereleaseID,
329- BinaryPackageRelease.binarypackagenameID.is_in(
330- candidate_binary_names),
331- BinaryPackageRelease.binpackageformat !=
332- BinaryPackageFormat.DDEB,
333- ]
334- main_clauses.extend(bpph_location_clauses)
335-
336- def do_domination(pass2_msg=""):
337- msg = "binaries..." + pass2_msg
338- self.logger.info("Finding %s" % msg)
339- bins = IStore(
340- BinaryPackagePublishingHistory).find(*main_clauses)
341- self.logger.info("Dominating %s" % msg)
342- sorted_packages = self._sortPackages(bins, generalization)
343- self._dominatePublications(sorted_packages, generalization)
344-
345- do_domination()
346-
347- # We need to make a second pass to cover the cases where:
348- # * arch-specific binaries were not all dominated before arch-all
349- # ones that depend on them
350- # * An arch-all turned into an arch-specific, or vice-versa
351- # * A package is completely schizophrenic and changes all of
352- # its binaries between arch-all and arch-any (apparently
353- # occurs sometimes!)
354- do_domination("(2nd pass)")
355+ self.logger.info("Finding binaries...")
356+ bins = self.findBinariesForDomination(distroarchseries, pocket)
357+ sorted_packages = self._sortPackages(bins, generalization)
358+ self.logger.info("Dominating binaries...")
359+ self._dominatePublications(sorted_packages, generalization)
360+
361+ # We need to make a second pass to cover the cases where:
362+ # * arch-specific binaries were not all dominated before arch-all
363+ # ones that depend on them
364+ # * An arch-all turned into an arch-specific, or vice-versa
365+ # * A package is completely schizophrenic and changes all of
366+ # its binaries between arch-all and arch-any (apparently
367+ # occurs sometimes!)
368+ for distroarchseries in distroseries.architectures:
369+ self.logger.info("Finding binaries...(2nd pass)")
370+ bins = self.findBinariesForDomination(distroarchseries, pocket)
371+ sorted_packages = self._sortPackages(bins, generalization)
372+ self.logger.info("Dominating binaries...(2nd pass)")
373+ self._dominatePublications(sorted_packages, generalization)
374
375 def _composeActiveSourcePubsCondition(self, distroseries, pocket):
376 """Compose ORM condition for restricting relevant source pubs."""
377@@ -522,21 +540,11 @@
378 SourcePackagePublishingHistory.pocket == pocket,
379 )
380
381- def dominateSources(self, distroseries, pocket):
382- """Perform domination on source package publications.
383-
384- Dominates sources, restricted to `distroseries`, `pocket`, and
385- `self.archive`.
386- """
387+ def findSourcesForDomination(self, distroseries, pocket):
388+ """Find binary publications that need dominating."""
389 # Avoid circular imports.
390 from lp.soyuz.model.publishing import SourcePackagePublishingHistory
391
392- generalization = GeneralizedPublication(is_source=True)
393-
394- self.logger.debug(
395- "Performing domination across %s/%s (Source)",
396- distroseries.name, pocket.title)
397-
398 spph_location_clauses = self._composeActiveSourcePubsCondition(
399 distroseries, pocket)
400 having_multiple_active_publications = (
401@@ -546,12 +554,33 @@
402 And(join_spph_spr(), join_spr_spn(), spph_location_clauses),
403 group_by=SourcePackageName.id,
404 having=having_multiple_active_publications)
405- sources = IStore(SourcePackagePublishingHistory).find(
406- SourcePackagePublishingHistory,
407+
408+ # We'll also access the SourcePackageReleases associated with
409+ # the publications we find. Since they're in the join anyway,
410+ # load them alongside the publications.
411+ # Actually we'll also want the SourcePackageNames, but adding
412+ # those to the (outer) query would complicate it, and
413+ # potentially slow it down.
414+ query = IStore(SourcePackagePublishingHistory).find(
415+ (SourcePackagePublishingHistory, SourcePackageRelease),
416 join_spph_spr(),
417 SourcePackageRelease.sourcepackagenameID.is_in(
418 candidate_source_names),
419 spph_location_clauses)
420+ return DecoratedResultSet(query, itemgetter(0))
421+
422+ def dominateSources(self, distroseries, pocket):
423+ """Perform domination on source package publications.
424+
425+ Dominates sources, restricted to `distroseries`, `pocket`, and
426+ `self.archive`.
427+ """
428+ self.logger.debug(
429+ "Performing domination across %s/%s (Source)",
430+ distroseries.name, pocket.title)
431+
432+ generalization = GeneralizedPublication(is_source=True)
433+ sources = self.findSourcesForDomination(distroseries, pocket)
434
435 self.logger.debug("Dominating sources...")
436 self._dominatePublications(
437
438=== modified file 'lib/lp/archivepublisher/tests/test_dominator.py'
439--- lib/lp/archivepublisher/tests/test_dominator.py 2011-10-27 11:36:13 +0000
440+++ lib/lp/archivepublisher/tests/test_dominator.py 2011-11-03 02:37:26 +0000
441@@ -361,7 +361,10 @@
442 def make_spphs_for_versions(factory, versions):
443 """Create publication records for each of `versions`.
444
445- They records are created in the same order in which they are specified.
446+ All these publications will be in the same source package, archive,
447+ distroseries, and pocket. They will all be in Published status.
448+
449+ The records are created in the same order in which they are specified.
450 Make the order irregular to prove that version ordering is not a
451 coincidence of object creation order etc.
452
453@@ -371,6 +374,7 @@
454 spn = factory.makeSourcePackageName()
455 distroseries = factory.makeDistroSeries()
456 pocket = factory.getAnyPocket()
457+ archive = distroseries.main_archive
458 sprs = [
459 factory.makeSourcePackageRelease(
460 sourcepackagename=spn, version=version)
461@@ -378,11 +382,34 @@
462 return [
463 factory.makeSourcePackagePublishingHistory(
464 distroseries=distroseries, pocket=pocket,
465- sourcepackagerelease=spr,
466+ sourcepackagerelease=spr, archive=archive,
467 status=PackagePublishingStatus.PUBLISHED)
468 for spr in sprs]
469
470
471+def make_bpphs_for_versions(factory, versions):
472+ """Create publication records for each of `versions`.
473+
474+ All these publications will be in the same binary package, source
475+ package, archive, distroarchseries, and pocket. They will all be in
476+ Published status.
477+ """
478+ bpn = factory.makeBinaryPackageName()
479+ spn = factory.makeSourcePackageName()
480+ das = factory.makeDistroArchSeries()
481+ archive = das.distroseries.main_archive
482+ pocket = factory.getAnyPocket()
483+ bprs = [
484+ factory.makeBinaryPackageRelease(binarypackagename=bpn)
485+ for version in versions]
486+ return [
487+ factory.makeBinaryPackagePublishingHistory(
488+ binarypackagerelease=bpr, binarypackagename=bpn,
489+ distroarchseries=das, pocket=pocket, archive=archive,
490+ sourcepackagename=spn, status=PackagePublishingStatus.PUBLISHED)
491+ for bpr in bprs]
492+
493+
494 def list_source_versions(spphs):
495 """Extract the versions from `spphs` as a list, in the same order."""
496 return [spph.sourcepackagerelease.version for spph in spphs]
497@@ -921,3 +948,144 @@
498 [],
499 dominator.findPublishedSPPHs(
500 spph.distroseries, spph.pocket, other_package.name))
501+
502+ def test_findBinariesForDomination_finds_published_publications(self):
503+ bpphs = make_bpphs_for_versions(self.factory, ['1.0', '1.1'])
504+ dominator = self.makeDominator(bpphs)
505+ self.assertContentEqual(
506+ bpphs, dominator.findBinariesForDomination(
507+ bpphs[0].distroarchseries, bpphs[0].pocket))
508+
509+ def test_findBinariesForDomination_skips_single_pub_packages(self):
510+ # The domination algorithm that uses findBinariesForDomination
511+ # always keeps the latest version live. Thus, a single
512+ # publication isn't worth dominating. findBinariesForDomination
513+ # won't return it.
514+ bpphs = make_bpphs_for_versions(self.factory, ['1.0'])
515+ dominator = self.makeDominator(bpphs)
516+ self.assertContentEqual(
517+ [], dominator.findBinariesForDomination(
518+ bpphs[0].distroarchseries, bpphs[0].pocket))
519+
520+ def test_findBinariesForDomination_ignores_other_distroseries(self):
521+ bpphs = make_bpphs_for_versions(self.factory, ['1.0', '1.1'])
522+ dominator = self.makeDominator(bpphs)
523+ das = bpphs[0].distroarchseries
524+ other_series = self.factory.makeDistroSeries(
525+ distribution=das.distroseries.distribution)
526+ other_das = self.factory.makeDistroArchSeries(
527+ distroseries=other_series, architecturetag=das.architecturetag,
528+ processorfamily=das.processorfamily)
529+ self.assertContentEqual(
530+ [], dominator.findBinariesForDomination(
531+ other_das, bpphs[0].pocket))
532+
533+ def test_findBinariesForDomination_ignores_other_architectures(self):
534+ bpphs = make_bpphs_for_versions(self.factory, ['1.0', '1.1'])
535+ dominator = self.makeDominator(bpphs)
536+ other_das = self.factory.makeDistroArchSeries(
537+ distroseries=bpphs[0].distroseries)
538+ self.assertContentEqual(
539+ [], dominator.findBinariesForDomination(
540+ other_das, bpphs[0].pocket))
541+
542+ def test_findBinariesForDomination_ignores_other_archive(self):
543+ bpphs = make_bpphs_for_versions(self.factory, ['1.0', '1.1'])
544+ dominator = self.makeDominator(bpphs)
545+ dominator.archive = self.factory.makeArchive()
546+ self.assertContentEqual(
547+ [], dominator.findBinariesForDomination(
548+ bpphs[0].distroarchseries, bpphs[0].pocket))
549+
550+ def test_findBinariesForDomination_ignores_other_pocket(self):
551+ bpphs = make_bpphs_for_versions(self.factory, ['1.0', '1.1'])
552+ dominator = self.makeDominator(bpphs)
553+ for bpph in bpphs:
554+ removeSecurityProxy(bpph).pocket = PackagePublishingPocket.UPDATES
555+ self.assertContentEqual(
556+ [], dominator.findBinariesForDomination(
557+ bpphs[0].distroarchseries, PackagePublishingPocket.SECURITY))
558+
559+ def test_findBinariesForDomination_ignores_other_status(self):
560+ # If we have one BPPH for each possible status, plus one
561+ # Published one to stop findBinariesForDomination from skipping
562+ # the package, findBinariesForDomination returns only the
563+ # Published ones.
564+ versions = [
565+ '1.%d' % self.factory.getUniqueInteger()
566+ for status in PackagePublishingStatus.items] + ['0.9']
567+ bpphs = make_bpphs_for_versions(self.factory, versions)
568+ dominator = self.makeDominator(bpphs)
569+
570+ for bpph, status in zip(bpphs, PackagePublishingStatus.items):
571+ bpph.status = status
572+
573+ # These are the Published publications. The other ones will all
574+ # be ignored.
575+ published_bpphs = [
576+ bpph
577+ for bpph in bpphs
578+ if bpph.status == PackagePublishingStatus.PUBLISHED]
579+
580+ self.assertContentEqual(
581+ published_bpphs,
582+ dominator.findBinariesForDomination(
583+ bpphs[0].distroarchseries, bpphs[0].pocket))
584+
585+ def test_findSourcesForDomination_finds_published_publications(self):
586+ spphs = make_spphs_for_versions(self.factory, ['2.0', '2.1'])
587+ dominator = self.makeDominator(spphs)
588+ self.assertContentEqual(
589+ spphs, dominator.findSourcesForDomination(
590+ spphs[0].distroseries, spphs[0].pocket))
591+
592+ def test_findSourcesForDomination_skips_single_pub_packages(self):
593+ # The domination algorithm that uses findSourcesForDomination
594+ # always keeps the latest version live. Thus, a single
595+ # publication isn't worth dominating. findSourcesForDomination
596+ # won't return it.
597+ spphs = make_spphs_for_versions(self.factory, ['2.0'])
598+ dominator = self.makeDominator(spphs)
599+ self.assertContentEqual(
600+ [], dominator.findSourcesForDomination(
601+ spphs[0].distroseries, spphs[0].pocket))
602+
603+ def test_findSourcesForDomination_ignores_other_distroseries(self):
604+ spphs = make_spphs_for_versions(self.factory, ['2.0', '2.1'])
605+ dominator = self.makeDominator(spphs)
606+ other_series = self.factory.makeDistroSeries(
607+ distribution=spphs[0].distroseries.distribution)
608+ self.assertContentEqual(
609+ [], dominator.findSourcesForDomination(
610+ other_series, spphs[0].pocket))
611+
612+ def test_findSourcesForDomination_ignores_other_pocket(self):
613+ spphs = make_spphs_for_versions(self.factory, ['2.0', '2.1'])
614+ dominator = self.makeDominator(spphs)
615+ for spph in spphs:
616+ removeSecurityProxy(spph).pocket = PackagePublishingPocket.UPDATES
617+ self.assertContentEqual(
618+ [], dominator.findSourcesForDomination(
619+ spphs[0].distroseries, PackagePublishingPocket.SECURITY))
620+
621+ def test_findSourcesForDomination_ignores_other_status(self):
622+ versions = [
623+ '1.%d' % self.factory.getUniqueInteger()
624+ for status in PackagePublishingStatus.items] + ['0.9']
625+ spphs = make_spphs_for_versions(self.factory, versions)
626+ dominator = self.makeDominator(spphs)
627+
628+ for spph, status in zip(spphs, PackagePublishingStatus.items):
629+ spph.status = status
630+
631+ # These are the Published publications. The other ones will all
632+ # be ignored.
633+ published_spphs = [
634+ spph
635+ for spph in spphs
636+ if spph.status == PackagePublishingStatus.PUBLISHED]
637+
638+ self.assertContentEqual(
639+ published_spphs,
640+ dominator.findSourcesForDomination(
641+ spphs[0].distroseries, spphs[0].pocket))
642
643=== modified file 'lib/lp/bugs/javascript/buglisting.js'
644--- lib/lp/bugs/javascript/buglisting.js 2011-11-01 17:55:21 +0000
645+++ lib/lp/bugs/javascript/buglisting.js 2011-11-03 02:37:26 +0000
646@@ -87,9 +87,6 @@
647 var navigator = new namespace.ListingNavigator(
648 window.location, LP.cache, LP.mustache_listings, target,
649 navigation_indices);
650- if (target === null){
651- return;
652- }
653 namespace.linkify_navigation();
654 navigator.backwards_navigation = Y.all('.first,.previous');
655 navigator.forwards_navigation = Y.all('.last,.next');
656@@ -111,9 +108,6 @@
657 * The template is always LP.mustache_listings.
658 */
659 namespace.ListingNavigator.prototype.render = function(){
660- if (! Y.Lang.isValue(this.target)){
661- return;
662- }
663 var model = this.current_batch.mustache_model;
664 var batch_info = Mustache.to_html(this.batch_info_template, {
665 start: this.current_batch.start + 1,
666
667=== modified file 'lib/lp/bugs/javascript/tests/test_buglisting.js'
668--- lib/lp/bugs/javascript/tests/test_buglisting.js 2011-11-01 17:55:21 +0000
669+++ lib/lp/bugs/javascript/tests/test_buglisting.js 2011-11-03 02:37:26 +0000
670@@ -27,11 +27,6 @@
671 Y.one('#fixture').setContent('');
672 },
673
674- test_render_no_client_listing: function() {
675- // Rendering should not error with no #client-listing.
676- var navigator = new module.ListingNavigator();
677- navigator.render();
678- },
679 get_render_navigator: function() {
680 var target = Y.Node.create('<div id="client-listing"></div>');
681 var lp_cache = {
682@@ -230,8 +225,9 @@
683 };
684 var template = "<ol>" +
685 "{{#item}}<li>{{name}}</li>{{/item}}</ol>";
686- var navigator = new module.ListingNavigator(window.location, lp_cache,
687- template);
688+ var target = Y.Node.create('<div id="client-listing"></div>');
689+ var navigator = new module.ListingNavigator(
690+ window.location, lp_cache, template, target);
691 var key = module.ListingNavigator.get_batch_key({
692 order_by: "intensity",
693 memo: 'memo1',
694@@ -243,10 +239,13 @@
695 memo: 'memo1',
696 forwards: true,
697 start: 5,
698- mustache_model: {item: [
699- {name: 'first'},
700- {name: 'second'}
701- ]}};
702+ mustache_model: {
703+ item: [
704+ {name: 'first'},
705+ {name: 'second'}
706+ ],
707+ bugtasks: ['a', 'b', 'c']
708+ }};
709 navigator.update_from_model(batch);
710 Y.lp.testing.assert.assert_equal_structure(
711 batch, navigator.batches[key]);
712@@ -422,13 +421,6 @@
713 getPreviousLink: function(){
714 return Y.one('.previous').get('href');
715 },
716- test_from_page_no_client: function(){
717- Y.one('#fixture').setContent(
718- '<a class="previous" href="http://example.org/">PreVious</span>');
719- Y.Assert.areSame('http://example.org/', this.getPreviousLink());
720- module.ListingNavigator.from_page();
721- Y.Assert.areSame('http://example.org/', this.getPreviousLink());
722- },
723 test_from_page_with_client: function(){
724 Y.one('#fixture').setContent(
725 '<a class="previous" href="http://example.org/">PreVious</span>' +
726
727=== modified file 'lib/lp/bugs/templates/buglisting-default.pt'
728--- lib/lp/bugs/templates/buglisting-default.pt 2011-10-28 19:17:29 +0000
729+++ lib/lp/bugs/templates/buglisting-default.pt 2011-11-03 02:37:26 +0000
730@@ -18,11 +18,17 @@
731 </tal:comment>
732 <script type="text/javascript"
733 tal:condition="not: view/shouldShowAdvancedForm">
734- LPS.use('lp.registry.structural_subscription', 'lp.bugs.buglisting',
735- 'lp.ordering', function(Y) {
736+ LPS.use('lp.registry.structural_subscription', function(Y) {
737 Y.on('domready', function() {
738 Y.lp.registry.structural_subscription.setup(
739 {content_box: "#structural-subscription-content-box"});
740+ });
741+ });
742+ </script>
743+ <script type="text/javascript"
744+ tal:condition="request/features/bugs.dynamic_bug_listings.enabled">
745+ LPS.use('lp.bugs.buglisting', 'lp.ordering', function(Y) {
746+ Y.on('domready', function() {
747 var navigator = Y.lp.bugs.buglisting.ListingNavigator.from_page();
748 var orderby = new Y.lp.ordering.OrderByBar({
749 srcNode: Y.one('#bugs-orderby'),
750
751=== added file 'lib/lp/registry/scripts/teamparticipation.py'
752--- lib/lp/registry/scripts/teamparticipation.py 1970-01-01 00:00:00 +0000
753+++ lib/lp/registry/scripts/teamparticipation.py 2011-11-03 02:37:26 +0000
754@@ -0,0 +1,160 @@
755+# Copyright 2011 Canonical Ltd. This software is licensed under the
756+# GNU Affero General Public License version 3 (see the file LICENSE).
757+
758+"""Script code relating to team participations."""
759+
760+__metaclass__ = type
761+__all__ = [
762+ "check_teamparticipation",
763+ ]
764+
765+from collections import (
766+ defaultdict,
767+ namedtuple,
768+ )
769+from itertools import (
770+ chain,
771+ imap,
772+ )
773+
774+import transaction
775+from zope.component import getUtility
776+
777+from canonical.database.sqlbase import quote
778+from canonical.launchpad.webapp.interfaces import (
779+ IStoreSelector,
780+ MAIN_STORE,
781+ SLAVE_FLAVOR,
782+ )
783+from lp.registry.interfaces.teammembership import ACTIVE_STATES
784+from lp.services.scripts.base import LaunchpadScriptFailure
785+
786+
787+def get_store():
788+ """Return a slave store.
789+
790+ Errors in `TeamPartipation` can be detected using a replicated copy.
791+ """
792+ return getUtility(IStoreSelector).get(MAIN_STORE, SLAVE_FLAVOR)
793+
794+
795+def check_teamparticipation_self(log):
796+ """Check self-participation.
797+
798+ All people and teams should participate in themselves.
799+ """
800+ query = """
801+ SELECT id, name
802+ FROM Person
803+ WHERE id NOT IN (
804+ SELECT person FROM TeamParticipation
805+ WHERE person = team)
806+ AND merged IS NULL
807+ """
808+ non_self_participants = list(get_store().execute(query))
809+ if len(non_self_participants) > 0:
810+ log.warn(
811+ "Some people/teams are not members of themselves: %s",
812+ non_self_participants)
813+
814+
815+def check_teamparticipation_circular(log):
816+ """Check circular references.
817+
818+ There can be no mutual participation between teams.
819+ """
820+ query = """
821+ SELECT tp.team, tp2.team
822+ FROM TeamParticipation AS tp,
823+ TeamParticipation AS tp2
824+ WHERE tp.team = tp2.person
825+ AND tp.person = tp2.team
826+ AND tp.id != tp2.id;
827+ """
828+ circular_references = list(get_store().execute(query))
829+ if len(circular_references) > 0:
830+ raise LaunchpadScriptFailure(
831+ "Circular references found: %s" % circular_references)
832+
833+
834+ConsistencyError = namedtuple(
835+ "ConsistencyError", ("type", "team", "people"))
836+
837+
838+def check_teamparticipation_consistency(log):
839+ """Check for missing or spurious participations.
840+
841+ For example, participations for people who are not members, or missing
842+ participations for people who are members.
843+ """
844+ store = get_store()
845+
846+ # Slurp everything in.
847+ people = dict(
848+ store.execute(
849+ "SELECT id, name FROM Person"
850+ " WHERE teamowner IS NULL"
851+ " AND merged IS NULL"))
852+ teams = dict(
853+ store.execute(
854+ "SELECT id, name FROM Person"
855+ " WHERE teamowner IS NOT NULL"
856+ " AND merged IS NULL"))
857+ team_memberships = defaultdict(set)
858+ results = store.execute(
859+ "SELECT team, person FROM TeamMembership"
860+ " WHERE status in %s" % quote(ACTIVE_STATES))
861+ for (team, person) in results:
862+ team_memberships[team].add(person)
863+ team_participations = defaultdict(set)
864+ results = store.execute(
865+ "SELECT team, person FROM TeamParticipation")
866+ for (team, person) in results:
867+ team_participations[team].add(person)
868+
869+ # Don't hold any locks.
870+ transaction.commit()
871+
872+ # Check team memberships.
873+ def get_participants(team):
874+ """Recurse through membership records to get participants."""
875+ member_people = team_memberships[team].intersection(people)
876+ member_people.add(team) # Teams always participate in themselves.
877+ member_teams = team_memberships[team].intersection(teams)
878+ return member_people.union(
879+ chain.from_iterable(imap(get_participants, member_teams)))
880+
881+ errors = []
882+ for team in teams:
883+ participants_observed = team_participations[team]
884+ participants_expected = get_participants(team)
885+ participants_spurious = participants_expected - participants_observed
886+ participants_missing = participants_observed - participants_expected
887+ if len(participants_spurious) > 0:
888+ error = ConsistencyError("spurious", team, participants_spurious)
889+ errors.append(error)
890+ if len(participants_missing) > 0:
891+ error = ConsistencyError("missing", team, participants_missing)
892+ errors.append(error)
893+
894+ # TODO:
895+ # - Check that the only participant of a *person* is the person.
896+ # - Check that merged people and teams do not appear in TeamParticipation.
897+
898+ def get_repr(id):
899+ return "%s (%d)" % (people[id] if id in people else teams[id], id)
900+
901+ for error in errors:
902+ people_repr = ", ".join(imap(get_repr, error.people))
903+ log.warn(
904+ "%s: %s TeamParticipation entries for %s.",
905+ get_repr(error.team), error.type, people_repr)
906+
907+ return errors
908+
909+
910+def check_teamparticipation(log):
911+ """Perform various checks on the `TeamParticipation` table."""
912+ check_teamparticipation_self(log)
913+ check_teamparticipation_circular(log)
914+ check_teamparticipation_consistency(log)
915
916=== modified file 'lib/lp/registry/tests/test_teammembership.py'
917--- lib/lp/registry/tests/test_teammembership.py 2011-09-01 06:18:57 +0000
918+++ lib/lp/registry/tests/test_teammembership.py 2011-11-03 02:37:26 +0000
919@@ -9,13 +9,11 @@
920 )
921 import re
922 import subprocess
923+from unittest import TestLoader
924+
925+import pytz
926+from testtools.content import text_content
927 from testtools.matchers import Equals
928-from unittest import (
929- TestCase,
930- TestLoader,
931- )
932-
933-import pytz
934 import transaction
935 from zope.component import getUtility
936 from zope.security.proxy import removeSecurityProxy
937@@ -27,10 +25,6 @@
938 flush_database_updates,
939 sqlvalues,
940 )
941-from canonical.launchpad.ftests import (
942- login,
943- login_person,
944- )
945 from canonical.launchpad.interfaces.lpstorm import IStore
946 from canonical.launchpad.testing.systemdocs import (
947 default_optionflags,
948@@ -53,16 +47,22 @@
949 ITeamMembershipSet,
950 TeamMembershipStatus,
951 )
952-from lp.registry.model.teammembership import (\
953+from lp.registry.model.teammembership import (
954 find_team_participations,
955 TeamMembership,
956 TeamParticipation,
957 )
958+from lp.registry.scripts.teamparticipation import check_teamparticipation
959+from lp.services.log.logger import BufferLogger
960 from lp.testing import (
961+ login,
962 login_celebrity,
963+ login_person,
964 person_logged_in,
965+ StormStatementRecorder,
966+ TestCase,
967 TestCaseWithFactory,
968- StormStatementRecorder)
969+ )
970 from lp.testing.mail_helpers import pop_notifications
971 from lp.testing.matchers import HasQueryCount
972 from lp.testing.storm import reload_object
973@@ -1047,6 +1047,7 @@
974
975
976 class TestCheckTeamParticipationScript(TestCase):
977+
978 layer = DatabaseFunctionalLayer
979
980 def _runScript(self, expected_returncode=0):
981@@ -1054,14 +1055,19 @@
982 'cronscripts/check-teamparticipation.py', shell=True,
983 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
984 stderr=subprocess.PIPE)
985- (out, err) = process.communicate()
986- self.assertEqual(process.returncode, expected_returncode, (out, err))
987+ out, err = process.communicate()
988+ if out != "":
989+ self.addDetail("stdout", text_content(out))
990+ if err != "":
991+ self.addDetail("stderr", text_content(err))
992+ self.assertEqual(process.returncode, expected_returncode)
993 return out, err
994
995 def test_no_output_if_no_invalid_entries(self):
996 """No output if there's no invalid teamparticipation entries."""
997 out, err = self._runScript()
998- self.assertEqual((out, err), ('', ''))
999+ self.assertEqual(0, len(out))
1000+ self.assertEqual(0, len(err))
1001
1002 def test_report_invalid_teamparticipation_entries(self):
1003 """The script reports missing/spurious TeamParticipation entries.
1004@@ -1103,16 +1109,13 @@
1005 transaction.commit()
1006
1007 out, err = self._runScript()
1008- self.assertEqual(out, '', (out, err))
1009- self.failUnless(
1010- re.search('missing TeamParticipation entries for zzzzz', err),
1011- (out, err))
1012- self.failUnless(
1013- re.search('spurious TeamParticipation entries for zzzzz', err),
1014- (out, err))
1015- self.failUnless(
1016- re.search('not members of themselves:.*zzzzz.*', err),
1017- (out, err))
1018+ self.assertEqual(0, len(out))
1019+ self.failUnless(
1020+ re.search('missing TeamParticipation entries for zzzzz', err))
1021+ self.failUnless(
1022+ re.search('spurious TeamParticipation entries for zzzzz', err))
1023+ self.failUnless(
1024+ re.search('not members of themselves:.*zzzzz.*', err))
1025
1026 def test_report_circular_team_references(self):
1027 """The script reports circular references between teams.
1028@@ -1145,9 +1148,34 @@
1029 import transaction
1030 transaction.commit()
1031 out, err = self._runScript(expected_returncode=1)
1032- self.assertEqual(out, '', (out, err))
1033- self.failUnless(
1034- re.search('Circular references found', err), (out, err))
1035+ self.assertEqual(0, len(out))
1036+ self.failUnless(re.search('Circular references found', err))
1037+
1038+
1039+class TestCheckTeamParticipationScriptPerformance(TestCaseWithFactory):
1040+
1041+ layer = DatabaseFunctionalLayer
1042+
1043+ def test_queries(self):
1044+ """The script does not overly tax the database.
1045+
1046+ The whole check_teamparticipation() run executes a constant low number
1047+ of queries.
1048+ """
1049+ # Create a deeply nested team and member structure.
1050+ team = self.factory.makeTeam()
1051+ for num in xrange(10):
1052+ another_team = self.factory.makeTeam()
1053+ another_person = self.factory.makePerson()
1054+ with person_logged_in(team.teamowner):
1055+ team.addMember(another_team, team.teamowner)
1056+ team.addMember(another_person, team.teamowner)
1057+ team = another_team
1058+ transaction.commit()
1059+ with StormStatementRecorder() as recorder:
1060+ logger = BufferLogger()
1061+ check_teamparticipation(logger)
1062+ self.assertThat(recorder, HasQueryCount(Equals(6)))
1063
1064
1065 def test_suite():
1066
1067=== modified file 'lib/lp/soyuz/model/publishing.py'
1068--- lib/lp/soyuz/model/publishing.py 2011-10-23 02:58:56 +0000
1069+++ lib/lp/soyuz/model/publishing.py 2011-11-03 02:37:26 +0000
1070@@ -33,7 +33,6 @@
1071 Desc,
1072 LeftJoin,
1073 Or,
1074- Select,
1075 Sum,
1076 )
1077 from storm.store import Store
1078@@ -1154,19 +1153,10 @@
1079 # Avoid circular wotsits.
1080 from lp.soyuz.model.binarypackagebuild import BinaryPackageBuild
1081 from lp.soyuz.model.distroarchseries import DistroArchSeries
1082- from lp.soyuz.model.sourcepackagerelease import SourcePackageRelease
1083- source_select = Select(
1084- SourcePackageRelease.id,
1085- And(
1086- BinaryPackageBuild.source_package_release_id ==
1087- SourcePackageRelease.id,
1088- BinaryPackageRelease.build == BinaryPackageBuild.id,
1089- self.binarypackagereleaseID == BinaryPackageRelease.id,
1090- ))
1091+
1092 pubs = [
1093 BinaryPackageBuild.source_package_release_id ==
1094- SourcePackageRelease.id,
1095- SourcePackageRelease.id.is_in(source_select),
1096+ self.binarypackagerelease.build.source_package_release_id,
1097 BinaryPackageRelease.build == BinaryPackageBuild.id,
1098 BinaryPackagePublishingHistory.binarypackagereleaseID ==
1099 BinaryPackageRelease.id,

Subscribers

People subscribed via source and target branches

to status/vote changes: