Merge lp:~stub/launchpad/bugsummary into lp:launchpad/db-devel

Proposed by Stuart Bishop
Status: Merged
Approved by: Stuart Bishop
Approved revision: not available
Merged at revision: 10683
Proposed branch: lp:~stub/launchpad/bugsummary
Merge into: lp:launchpad/db-devel
Prerequisite: lp:~stub/launchpad/pending-db-changes
Diff against target: 1541 lines (+1274/-2) (has conflicts)
8 files modified
database/schema/patch-2208-75-0.sql (+689/-0)
lib/lp/bugs/javascript/tests/test_subscriber.js.OTHER (+288/-0)
lib/lp/soyuz/doc/distroseriesqueue-ddtp-tarball.txt (+28/-0)
lib/lp/soyuz/doc/distroseriesqueue-dist-upgrader.txt (+52/-0)
lib/lp/soyuz/doc/soyuz-set-of-uploads.txt (+120/-0)
lib/lp/soyuz/doc/soyuz-upload.txt (+56/-0)
lib/lp/soyuz/model/sourcepackagerelease.py (+8/-0)
lib/lp/testing/__init__.py (+33/-2)
Text conflict in database/schema/patch-2208-75-0.sql
Contents conflict in lib/lp/bugs/javascript/tests/test_subscriber.js
Text conflict in lib/lp/soyuz/doc/distroseriesqueue-ddtp-tarball.txt
Text conflict in lib/lp/soyuz/doc/distroseriesqueue-dist-upgrader.txt
Text conflict in lib/lp/soyuz/doc/soyuz-set-of-uploads.txt
Text conflict in lib/lp/soyuz/doc/soyuz-upload.txt
Text conflict in lib/lp/soyuz/model/sourcepackagerelease.py
Text conflict in lib/lp/testing/__init__.py
To merge this branch: bzr merge lp:~stub/launchpad/bugsummary
Reviewer Review Type Date Requested Status
Stuart Bishop (community) db Approve
Review via email: mp+64944@code.launchpad.net

Commit message

[r=stub][bug=793848]

Description of the change

= Summary =

DB patch is sloooow.

== Proposed fix ==

Make it fast by cut and pasting the original INSERT.

== Pre-implementation notes ==

== Implementation details ==

== Tests ==

== Demo and Q/A ==

= Launchpad lint =

Checking for conflicts and issues in changed files.

Linting changed files:

blah blah blah

To post a comment you must log in.
Revision history for this message
Stuart Bishop (stub) :
review: Approve (db)

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'database/schema/patch-2208-75-0.sql'
2--- database/schema/patch-2208-75-0.sql 2011-06-14 17:25:29 +0000
3+++ database/schema/patch-2208-75-0.sql 2011-06-17 07:08:54 +0000
4@@ -1,3 +1,4 @@
5+<<<<<<< TREE
6 -- Copyright 2011 Canonical Ltd. This software is licensed under the
7 -- GNU Affero General Public License version 3 (see the file LICENSE).
8
9@@ -599,3 +600,691 @@
10
11
12 INSERT INTO LaunchpadDatabaseRevision VALUES (2208, 75, 0);
13+=======
14+-- Copyright 2011 Canonical Ltd. This software is licensed under the
15+-- GNU Affero General Public License version 3 (see the file LICENSE).
16+
17+SET client_min_messages=ERROR;
18+
19+-- Trash our existing data, which we will rebuild in a minute.
20+-- Slony-I doesn't like TRUNCATE noramlly, but OK in a DB patch.
21+TRUNCATE BugSummary;
22+TRUNCATE BugSummaryJournal;
23+
24+-- Drop indexes we will rebuild later.
25+DROP INDEX bugsummary__dimensions__unique;
26+DROP INDEX bugsummary__full__idx;
27+
28+ALTER TABLE BugSummary
29+ -- Add a missing foreign key constraint we were unable to add live.
30+ -- Person table is always being read, so locks are never acquired.
31+ ADD CONSTRAINT bugsummaryjournal_viewed_by_fkey
32+ FOREIGN KEY(viewed_by) REFERENCES Person ON DELETE CASCADE,
33+ ADD COLUMN importance integer NOT NULL,
34+ ADD COLUMN has_patch boolean NOT NULL,
35+ ADD COLUMN fixed_upstream boolean NOT NULL;
36+
37+ALTER TABLE BugSummaryJournal
38+ ADD COLUMN importance integer NOT NULL,
39+ ADD COLUMN has_patch boolean NOT NULL,
40+ ADD COLUMN fixed_upstream boolean NOT NULL;
41+
42+DROP VIEW CombinedBugSummary;
43+CREATE VIEW CombinedBugSummary AS (
44+ SELECT
45+ id, count, product, productseries, distribution, distroseries,
46+ sourcepackagename, viewed_by, tag, status, milestone,
47+ importance, has_patch, fixed_upstream
48+ FROM BugSummary
49+ UNION ALL
50+ SELECT
51+ -id as id, count, product, productseries, distribution, distroseries,
52+ sourcepackagename, viewed_by, tag, status, milestone,
53+ importance, has_patch, fixed_upstream
54+ FROM BugSummaryJournal);
55+
56+
57+-- Rebuild the BugSummary data with the new columns.
58+INSERT INTO BugSummary (
59+ count, product, productseries, distribution, distroseries,
60+ sourcepackagename, viewed_by, tag, status, importance, has_patch,
61+ fixed_upstream, milestone)
62+WITH
63+ -- kill dupes
64+ relevant_bug AS (SELECT * FROM bug where duplicateof is NULL),
65+
66+ -- (bug.id, tag) for all bug-tag pairs plus (bug.id, NULL) for all bugs
67+ bug_tags AS (
68+ SELECT relevant_bug.id, NULL::text AS tag FROM relevant_bug
69+ UNION
70+ SELECT relevant_bug.id, tag
71+ FROM relevant_bug INNER JOIN bugtag ON relevant_bug.id=bugtag.bug),
72+
73+ -- (bug.id, NULL) for all public bugs + (bug.id, viewer) for all
74+ -- (subscribers+assignee) on private bugs
75+ bug_viewers AS (
76+ SELECT relevant_bug.id, NULL::integer AS person
77+ FROM relevant_bug WHERE NOT relevant_bug.private
78+ UNION
79+ SELECT relevant_bug.id, assignee AS person
80+ FROM relevant_bug
81+ INNER JOIN bugtask ON relevant_bug.id=bugtask.bug
82+ WHERE relevant_bug.private and bugtask.assignee IS NOT NULL
83+ UNION
84+ SELECT relevant_bug.id, bugsubscription.person
85+ FROM relevant_bug INNER JOIN bugsubscription
86+ ON bugsubscription.bug=relevant_bug.id WHERE relevant_bug.private),
87+
88+ -- (bugtask.(bug, product, productseries, distribution, distroseries,
89+ -- sourcepackagename, status, milestone) for all bugs + the same with
90+ -- sourcepackage squashed to NULL)
91+ tasks AS (
92+ SELECT
93+ bug, product, productseries, distribution, distroseries,
94+ sourcepackagename, status, importance,
95+ (EXISTS
96+ (SELECT TRUE
97+ FROM BugTask AS RelatedBugTask
98+ WHERE RelatedBugTask.bug = BugTask.bug
99+ AND RelatedBugTask.id != BugTask.id
100+ AND ((RelatedBugTask.bugwatch IS NOT NULL
101+ AND RelatedBugTask.status IN (17, 25, 30))
102+ OR (RelatedBugTask.product IS NOT NULL
103+ AND RelatedBugTask.bugwatch IS NULL
104+ AND RelatedBugTask.status IN (25, 30))))
105+ ) as fixed_upstream, milestone
106+ FROM bugtask
107+ UNION
108+ SELECT DISTINCT ON (
109+ bug, product, productseries, distribution, distroseries,
110+ sourcepackagename, milestone)
111+ bug, product, productseries, distribution, distroseries,
112+ NULL::integer as sourcepackagename,
113+ status, importance,
114+ (EXISTS
115+ (SELECT TRUE
116+ FROM BugTask AS RelatedBugTask
117+ WHERE RelatedBugTask.bug = BugTask.bug
118+ AND RelatedBugTask.id != BugTask.id
119+ AND ((RelatedBugTask.bugwatch IS NOT NULL
120+ AND RelatedBugTask.status IN (17, 25, 30))
121+ OR (RelatedBugTask.product IS NOT NULL
122+ AND RelatedBugTask.bugwatch IS NULL
123+ AND RelatedBugTask.status IN (25, 30))))
124+ ) as fixed_upstream, milestone
125+ FROM bugtask where sourcepackagename IS NOT NULL)
126+
127+ -- Now combine
128+ SELECT
129+ count(*), product, productseries, distribution, distroseries,
130+ sourcepackagename, person, tag, status, importance,
131+ latest_patch_uploaded IS NOT NULL AS has_patch, fixed_upstream,
132+ milestone
133+ FROM relevant_bug
134+ INNER JOIN bug_tags ON relevant_bug.id=bug_tags.id
135+ INNER JOIN bug_viewers ON relevant_bug.id=bug_viewers.id
136+ INNER JOIN tasks on tasks.bug=relevant_bug.id
137+ GROUP BY
138+ product, productseries, distribution, distroseries,
139+ sourcepackagename, person, tag, status, importance, has_patch,
140+ fixed_upstream, milestone;
141+
142+
143+
144+
145+
146+
147+WITH
148+ -- kill dupes
149+ relevant_bug AS (SELECT * FROM bug where duplicateof is NULL),
150+
151+ -- (bug.id, tag) for all bug-tag pairs plus (bug.id, NULL) for all bugs
152+ bug_tags AS (
153+ SELECT relevant_bug.id, NULL::text AS tag FROM relevant_bug
154+ UNION
155+ SELECT relevant_bug.id, tag
156+ FROM relevant_bug INNER JOIN bugtag ON relevant_bug.id=bugtag.bug),
157+ -- (bug.id, NULL) for all public bugs + (bug.id, viewer) for all
158+ -- (subscribers+assignee) on private bugs
159+ bug_viewers AS (
160+ SELECT relevant_bug.id, NULL::integer AS person
161+ FROM relevant_bug WHERE NOT relevant_bug.private
162+ UNION
163+ SELECT relevant_bug.id, assignee AS person
164+ FROM relevant_bug
165+ INNER JOIN bugtask ON relevant_bug.id=bugtask.bug
166+ WHERE relevant_bug.private and bugtask.assignee IS NOT NULL
167+ UNION
168+ SELECT relevant_bug.id, bugsubscription.person
169+ FROM relevant_bug INNER JOIN bugsubscription
170+ ON bugsubscription.bug=relevant_bug.id WHERE relevant_bug.private),
171+
172+ fixed_upstream_tasks AS (
173+ SELECT BugTask.id, BugTask.bug FROM BugTask
174+ WHERE (
175+ (bugwatch IS NOT NULL AND status IN (17, 25, 30))
176+ OR (
177+ bugwatch IS NULL AND product IS NOT NULL
178+ AND status IN (25, 30)))),
179+
180+ -- (bugtask.(bug, product, productseries, distribution, distroseries,
181+ -- sourcepackagename, status, milestone) for all bugs + the same with
182+ -- sourcepackage squashed to NULL)
183+ tasks AS (
184+ SELECT
185+ bug, product, productseries, distribution, distroseries,
186+ sourcepackagename, status, milestone, importance,
187+ EXISTS (
188+ SELECT TRUE FROM fixed_upstream_tasks
189+ WHERE
190+ BugTask.bug = fixed_upstream_tasks.bug
191+ AND BugTask.id <> fixed_upstream_tasks.id
192+ ) AS fixed_upstream
193+ FROM bugtask
194+ UNION
195+ SELECT DISTINCT ON (
196+ bug, product, productseries, distribution, distroseries,
197+ sourcepackagename, milestone)
198+ bug, product, productseries, distribution, distroseries,
199+ NULL::integer as sourcepackagename,
200+ status, milestone, importance,
201+ EXISTS (
202+ SELECT TRUE FROM fixed_upstream_tasks
203+ WHERE
204+ BugTask.bug = fixed_upstream_tasks.bug
205+ AND BugTask.id <> fixed_upstream_tasks.id
206+ ) AS fixed_upstream
207+ FROM bugtask where sourcepackagename IS NOT NULL)
208+
209+ -- Now combine
210+ SELECT
211+ count(*), product, productseries, distribution, distroseries,
212+ sourcepackagename, person, tag, status, milestone, importance,
213+ latest_patch_uploaded IS NOT NULL AS has_patch, fixed_upstream
214+ FROM relevant_bug
215+ INNER JOIN bug_tags ON relevant_bug.id=bug_tags.id
216+ INNER JOIN bug_viewers ON relevant_bug.id=bug_viewers.id
217+ INNER JOIN tasks on tasks.bug=relevant_bug.id
218+ GROUP BY
219+ product, productseries, distribution, distroseries,
220+ sourcepackagename, person, tag, status, milestone,
221+ importance, has_patch, fixed_upstream;
222+
223+
224+-- Rebuild indexes.
225+CREATE INDEX bugsummary__full__idx ON BugSummary (
226+ tag, status, product, productseries, distribution,
227+ distroseries, sourcepackagename, viewed_by, milestone,
228+ importance, has_patch, fixed_upstream);
229+-- Enforce uniqueness again.
230+CREATE UNIQUE INDEX bugsummary__product__unique
231+ ON BugSummary(
232+ product, status, importance, has_patch, fixed_upstream,
233+ COALESCE(tag, ''), COALESCE(milestone, -1), COALESCE(viewed_by, -1))
234+ WHERE product IS NOT NULL;
235+CREATE UNIQUE INDEX bugsummary__productseries__unique
236+ ON BugSummary(
237+ productseries, status, importance, has_patch, fixed_upstream,
238+ COALESCE(tag, ''), COALESCE(milestone, -1), COALESCE(viewed_by, -1))
239+ WHERE productseries IS NOT NULL;
240+CREATE UNIQUE INDEX bugsummary__distribution__unique
241+ ON BugSummary(
242+ distribution, status, importance, has_patch, fixed_upstream,
243+ COALESCE(sourcepackagename, -1),
244+ COALESCE(tag, ''), COALESCE(milestone, -1), COALESCE(viewed_by, -1))
245+ WHERE distribution IS NOT NULL;
246+CREATE UNIQUE INDEX bugsummary__distroseries__unique
247+ ON BugSummary(
248+ distroseries, status, importance, has_patch, fixed_upstream,
249+ COALESCE(sourcepackagename, -1),
250+ COALESCE(tag, ''), COALESCE(milestone, -1), COALESCE(viewed_by, -1))
251+ WHERE distroseries IS NOT NULL;
252+
253+
254+-- Rebuild relevant trigger functions.
255+CREATE OR REPLACE FUNCTION bugsummary_journal_ins(d bugsummary)
256+RETURNS VOID
257+LANGUAGE plpgsql AS
258+$$
259+BEGIN
260+ IF d.count <> 0 THEN
261+ INSERT INTO BugSummaryJournal (
262+ count, product, productseries, distribution,
263+ distroseries, sourcepackagename, viewed_by, tag,
264+ status, milestone,
265+ importance, has_patch, fixed_upstream)
266+ VALUES (
267+ d.count, d.product, d.productseries, d.distribution,
268+ d.distroseries, d.sourcepackagename, d.viewed_by, d.tag,
269+ d.status, d.milestone,
270+ d.importance, d.has_patch, d.fixed_upstream);
271+ END IF;
272+END;
273+$$;
274+
275+COMMENT ON FUNCTION bugsummary_journal_ins(bugsummary) IS
276+'Add an entry into BugSummaryJournal';
277+
278+
279+CREATE OR REPLACE FUNCTION bugsummary_rollup_journal() RETURNS VOID
280+LANGUAGE plpgsql VOLATILE
281+SECURITY DEFINER SET search_path TO public AS
282+$$
283+DECLARE
284+ d bugsummary%ROWTYPE;
285+ max_id integer;
286+BEGIN
287+ -- Lock so we don't content with other invokations of this
288+ -- function. We can happily lock the BugSummary table for writes
289+ -- as this function is the only thing that updates that table.
290+ -- BugSummaryJournal remains unlocked so nothing should be blocked.
291+ LOCK TABLE BugSummary IN ROW EXCLUSIVE MODE;
292+
293+ SELECT MAX(id) INTO max_id FROM BugSummaryJournal;
294+
295+ FOR d IN
296+ SELECT
297+ NULL as id,
298+ SUM(count),
299+ product,
300+ productseries,
301+ distribution,
302+ distroseries,
303+ sourcepackagename,
304+ viewed_by,
305+ tag,
306+ status,
307+ milestone,
308+ importance,
309+ has_patch,
310+ fixed_upstream
311+ FROM BugSummaryJournal
312+ WHERE id <= max_id
313+ GROUP BY
314+ product, productseries, distribution, distroseries,
315+ sourcepackagename, viewed_by, tag, status, milestone,
316+ importance, has_patch, fixed_upstream
317+ HAVING sum(count) <> 0
318+ LOOP
319+ IF d.count < 0 THEN
320+ PERFORM bug_summary_dec(d);
321+ ELSIF d.count > 0 THEN
322+ PERFORM bug_summary_inc(d);
323+ END IF;
324+ END LOOP;
325+
326+ DELETE FROM BugSummaryJournal WHERE id <= max_id;
327+END;
328+$$;
329+
330+CREATE OR REPLACE FUNCTION bug_summary_dec(bugsummary) RETURNS VOID
331+LANGUAGE SQL AS
332+$$
333+ -- We own the row reference, so in the absence of bugs this cannot
334+ -- fail - just decrement the row.
335+ UPDATE BugSummary SET count = count + $1.count
336+ WHERE
337+ product IS NOT DISTINCT FROM $1.product
338+ AND productseries IS NOT DISTINCT FROM $1.productseries
339+ AND distribution IS NOT DISTINCT FROM $1.distribution
340+ AND distroseries IS NOT DISTINCT FROM $1.distroseries
341+ AND sourcepackagename IS NOT DISTINCT FROM $1.sourcepackagename
342+ AND viewed_by IS NOT DISTINCT FROM $1.viewed_by
343+ AND tag IS NOT DISTINCT FROM $1.tag
344+ AND status IS NOT DISTINCT FROM $1.status
345+ AND milestone IS NOT DISTINCT FROM $1.milestone
346+ AND importance IS NOT DISTINCT FROM $1.importance
347+ AND has_patch IS NOT DISTINCT FROM $1.has_patch
348+ AND fixed_upstream IS NOT DISTINCT FROM $1.fixed_upstream;
349+ -- gc the row (perhaps should be garbo but easy enough to add here:
350+ DELETE FROM bugsummary
351+ WHERE
352+ count=0
353+ AND product IS NOT DISTINCT FROM $1.product
354+ AND productseries IS NOT DISTINCT FROM $1.productseries
355+ AND distribution IS NOT DISTINCT FROM $1.distribution
356+ AND distroseries IS NOT DISTINCT FROM $1.distroseries
357+ AND sourcepackagename IS NOT DISTINCT FROM $1.sourcepackagename
358+ AND viewed_by IS NOT DISTINCT FROM $1.viewed_by
359+ AND tag IS NOT DISTINCT FROM $1.tag
360+ AND status IS NOT DISTINCT FROM $1.status
361+ AND milestone IS NOT DISTINCT FROM $1.milestone
362+ AND importance IS NOT DISTINCT FROM $1.importance
363+ AND has_patch IS NOT DISTINCT FROM $1.has_patch
364+ AND fixed_upstream IS NOT DISTINCT FROM $1.fixed_upstream;
365+ -- If its not found then someone else also dec'd and won concurrently.
366+$$;
367+
368+CREATE OR REPLACE FUNCTION bug_summary_inc(d bugsummary) RETURNS VOID
369+LANGUAGE plpgsql AS
370+$$
371+BEGIN
372+ -- Shameless adaption from postgresql manual
373+ LOOP
374+ -- first try to update the row
375+ UPDATE BugSummary SET count = count + d.count
376+ WHERE
377+ product IS NOT DISTINCT FROM d.product
378+ AND productseries IS NOT DISTINCT FROM d.productseries
379+ AND distribution IS NOT DISTINCT FROM d.distribution
380+ AND distroseries IS NOT DISTINCT FROM d.distroseries
381+ AND sourcepackagename IS NOT DISTINCT FROM d.sourcepackagename
382+ AND viewed_by IS NOT DISTINCT FROM d.viewed_by
383+ AND tag IS NOT DISTINCT FROM d.tag
384+ AND status IS NOT DISTINCT FROM d.status
385+ AND milestone IS NOT DISTINCT FROM d.milestone
386+ AND importance IS NOT DISTINCT FROM d.importance
387+ AND has_patch IS NOT DISTINCT FROM d.has_patch
388+ AND fixed_upstream IS NOT DISTINCT FROM d.fixed_upstream;
389+ IF found THEN
390+ RETURN;
391+ END IF;
392+ -- not there, so try to insert the key
393+ -- if someone else inserts the same key concurrently,
394+ -- we could get a unique-key failure
395+ BEGIN
396+ INSERT INTO BugSummary(
397+ count, product, productseries, distribution,
398+ distroseries, sourcepackagename, viewed_by, tag,
399+ status, milestone,
400+ importance, has_patch, fixed_upstream)
401+ VALUES (
402+ d.count, d.product, d.productseries, d.distribution,
403+ d.distroseries, d.sourcepackagename, d.viewed_by, d.tag,
404+ d.status, d.milestone,
405+ d.importance, d.has_patch, d.fixed_upstream);
406+ RETURN;
407+ EXCEPTION WHEN unique_violation THEN
408+ -- do nothing, and loop to try the UPDATE again
409+ END;
410+ END LOOP;
411+END;
412+$$;
413+
414+COMMENT ON FUNCTION bugsummary_rollup_journal() IS
415+'Collate and migrate rows from BugSummaryJournal to BugSummary';
416+
417+
418+CREATE OR REPLACE FUNCTION ensure_bugsummary_temp_journal() RETURNS VOID
419+LANGUAGE plpgsql VOLATILE AS
420+$$
421+DECLARE
422+BEGIN
423+ CREATE TEMPORARY TABLE bugsummary_temp_journal (
424+ LIKE bugsummary ) ON COMMIT DROP;
425+ ALTER TABLE bugsummary_temp_journal ALTER COLUMN id DROP NOT NULL;
426+EXCEPTION
427+ WHEN duplicate_table THEN
428+ NULL;
429+END;
430+$$;
431+
432+COMMENT ON FUNCTION ensure_bugsummary_temp_journal() IS
433+'Create a temporary table bugsummary_temp_journal if it does not exist.';
434+
435+
436+CREATE OR REPLACE FUNCTION bug_summary_temp_journal_ins(d bugsummary)
437+RETURNS VOID LANGUAGE plpgsql AS
438+$$
439+BEGIN
440+ INSERT INTO BugSummary_Temp_Journal(
441+ count, product, productseries, distribution,
442+ distroseries, sourcepackagename, viewed_by, tag,
443+ status, milestone, importance, has_patch, fixed_upstream)
444+ VALUES (
445+ d.count, d.product, d.productseries, d.distribution,
446+ d.distroseries, d.sourcepackagename, d.viewed_by, d.tag,
447+ d.status, d.milestone, d.importance, d.has_patch, d.fixed_upstream);
448+ RETURN;
449+END;
450+$$;
451+
452+COMMENT ON FUNCTION bug_summary_temp_journal_ins(bugsummary) IS
453+'Insert a BugSummary into the temporary journal';
454+
455+
456+-- Don't need these. Faster to just append rows to the journal
457+-- than attempt to update existing rows.
458+DROP FUNCTION bug_summary_temp_journal_dec(bugsummary);
459+DROP FUNCTION bug_summary_temp_journal_inc(bugsummary);
460+
461+
462+CREATE OR REPLACE FUNCTION bug_summary_flush_temp_journal() RETURNS VOID
463+LANGUAGE plpgsql VOLATILE AS
464+$$
465+DECLARE
466+ d bugsummary%ROWTYPE;
467+BEGIN
468+ -- may get called even though no summaries were made (for simplicity in the
469+ -- callers)
470+ PERFORM ensure_bugsummary_temp_journal();
471+ FOR d IN
472+ SELECT
473+ NULL::integer AS id, SUM(count), product, productseries,
474+ distribution, distroseries, sourcepackagename,
475+ viewed_by, tag, status, milestone,
476+ importance, has_patch, fixed_upstream
477+ FROM BugSummary_temp_journal
478+ GROUP BY
479+ product, productseries,
480+ distribution, distroseries, sourcepackagename,
481+ viewed_by, tag, status, milestone, importance,
482+ has_patch, fixed_upstream
483+ HAVING SUM(count) <> 0
484+ LOOP
485+ IF d.count < 0 THEN
486+ PERFORM bug_summary_dec(d);
487+ ELSE
488+ PERFORM bug_summary_inc(d);
489+ END IF;
490+ END LOOP;
491+ TRUNCATE bugsummary_temp_journal;
492+END;
493+$$;
494+
495+COMMENT ON FUNCTION bug_summary_flush_temp_journal() IS
496+'flush the temporary bugsummary journal into the bugsummary table';
497+
498+
499+CREATE OR REPLACE FUNCTION unsummarise_bug(BUG_ROW bug) RETURNS VOID
500+LANGUAGE plpgsql VOLATILE AS
501+$$
502+DECLARE
503+ d bugsummary%ROWTYPE;
504+BEGIN
505+ PERFORM ensure_bugsummary_temp_journal();
506+ FOR d IN SELECT * FROM bugsummary_locations(BUG_ROW) LOOP
507+ d.count = -1;
508+ PERFORM bug_summary_temp_journal_ins(d);
509+ END LOOP;
510+END;
511+$$;
512+
513+CREATE OR REPLACE FUNCTION summarise_bug(BUG_ROW bug) RETURNS VOID
514+LANGUAGE plpgsql VOLATILE AS
515+$$
516+DECLARE
517+ d bugsummary%ROWTYPE;
518+BEGIN
519+ PERFORM ensure_bugsummary_temp_journal();
520+ FOR d IN SELECT * FROM bugsummary_locations(BUG_ROW) LOOP
521+ d.count = 1;
522+ PERFORM bug_summary_temp_journal_ins(d);
523+ END LOOP;
524+END;
525+$$;
526+
527+
528+CREATE OR REPLACE FUNCTION bug_maintain_bug_summary() RETURNS TRIGGER
529+LANGUAGE plpgsql VOLATILE SECURITY DEFINER SET search_path TO public AS
530+$$
531+BEGIN
532+ -- There is no INSERT logic, as a bug will not have any summary
533+ -- information until BugTask rows have been attached.
534+ IF TG_OP = 'UPDATE' THEN
535+ IF OLD.duplicateof IS DISTINCT FROM NEW.duplicateof
536+ OR OLD.private IS DISTINCT FROM NEW.private
537+ OR (OLD.latest_patch_uploaded IS NULL)
538+ <> (NEW.latest_patch_uploaded IS NULL) THEN
539+ PERFORM unsummarise_bug(OLD);
540+ PERFORM summarise_bug(NEW);
541+ END IF;
542+
543+ ELSIF TG_OP = 'DELETE' THEN
544+ PERFORM unsummarise_bug(OLD);
545+ END IF;
546+
547+ PERFORM bug_summary_flush_temp_journal();
548+ RETURN NULL; -- Ignored - this is an AFTER trigger
549+END;
550+$$;
551+
552+
553+CREATE OR REPLACE FUNCTION bugtask_maintain_bug_summary() RETURNS TRIGGER
554+LANGUAGE plpgsql VOLATILE SECURITY DEFINER SET search_path TO public AS
555+$$
556+BEGIN
557+ -- This trigger only works if we are inserting, updating or deleting
558+ -- a single row per statement.
559+
560+ -- Unlike bug_maintain_bug_summary, this trigger does not have access
561+ -- to the old bug when invoked as an AFTER trigger. To work around this
562+ -- we install this trigger as both a BEFORE and an AFTER trigger.
563+ IF TG_OP = 'INSERT' THEN
564+ IF TG_WHEN = 'BEFORE' THEN
565+ PERFORM unsummarise_bug(bug_row(NEW.bug));
566+ ELSE
567+ PERFORM summarise_bug(bug_row(NEW.bug));
568+ END IF;
569+ PERFORM bug_summary_flush_temp_journal();
570+ RETURN NEW;
571+
572+ ELSIF TG_OP = 'DELETE' THEN
573+ IF TG_WHEN = 'BEFORE' THEN
574+ PERFORM unsummarise_bug(bug_row(OLD.bug));
575+ ELSE
576+ PERFORM summarise_bug(bug_row(OLD.bug));
577+ END IF;
578+ PERFORM bug_summary_flush_temp_journal();
579+ RETURN OLD;
580+
581+ ELSE
582+ IF (OLD.product IS DISTINCT FROM NEW.product
583+ OR OLD.productseries IS DISTINCT FROM NEW.productseries
584+ OR OLD.distribution IS DISTINCT FROM NEW.distribution
585+ OR OLD.distroseries IS DISTINCT FROM NEW.distroseries
586+ OR OLD.sourcepackagename IS DISTINCT FROM NEW.sourcepackagename
587+ OR OLD.status IS DISTINCT FROM NEW.status
588+ OR OLD.importance IS DISTINCT FROM NEW.importance
589+ OR OLD.bugwatch IS DISTINCT FROM NEW.bugwatch
590+ OR OLD.milestone IS DISTINCT FROM NEW.milestone) THEN
591+
592+ IF TG_WHEN = 'BEFORE' THEN
593+ PERFORM unsummarise_bug(bug_row(OLD.bug));
594+ IF OLD.bug <> NEW.bug THEN
595+ PERFORM unsummarise_bug(bug_row(NEW.bug));
596+ END IF;
597+ ELSE
598+ PERFORM summarise_bug(bug_row(OLD.bug));
599+ IF OLD.bug <> NEW.bug THEN
600+ PERFORM summarise_bug(bug_row(NEW.bug));
601+ END IF;
602+ END IF;
603+ END IF;
604+ PERFORM bug_summary_flush_temp_journal();
605+ RETURN NEW;
606+ END IF;
607+END;
608+$$;
609+
610+
611+CREATE OR REPLACE FUNCTION bugsummary_locations(BUG_ROW bug)
612+RETURNS SETOF bugsummary LANGUAGE plpgsql AS
613+$$
614+BEGIN
615+ IF BUG_ROW.duplicateof IS NOT NULL THEN
616+ RETURN;
617+ END IF;
618+ RETURN QUERY
619+ SELECT
620+ CAST(NULL AS integer) AS id,
621+ CAST(1 AS integer) AS count,
622+ product, productseries, distribution, distroseries,
623+ sourcepackagename, person AS viewed_by, tag, status, milestone,
624+ importance,
625+ BUG_ROW.latest_patch_uploaded IS NOT NULL AS has_patch,
626+ (EXISTS (
627+ SELECT TRUE FROM BugTask AS RBT
628+ WHERE
629+ RBT.bug = tasks.bug
630+ -- This would just be 'RBT.id <> tasks.id', except
631+ -- that the records from tasks are summaries and not
632+ -- real bugtasks, and do not have an id.
633+ AND (RBT.product IS DISTINCT FROM tasks.product
634+ OR RBT.productseries
635+ IS DISTINCT FROM tasks.productseries
636+ OR RBT.distribution IS DISTINCT FROM tasks.distribution
637+ OR RBT.distroseries IS DISTINCT FROM tasks.distroseries
638+ OR RBT.sourcepackagename
639+ IS DISTINCT FROM tasks.sourcepackagename)
640+ -- Flagged as INVALID, FIXCOMMITTED or FIXRELEASED
641+ -- via a bugwatch, or FIXCOMMITTED or FIXRELEASED on
642+ -- the product.
643+ AND ((bugwatch IS NOT NULL AND status IN (17, 25, 30))
644+ OR (bugwatch IS NULL AND product IS NOT NULL
645+ AND status IN (25, 30))))
646+ )::boolean AS fixed_upstream
647+ FROM bugsummary_tasks(BUG_ROW) AS tasks
648+ JOIN bugsummary_tags(BUG_ROW) AS bug_tags ON TRUE
649+ LEFT OUTER JOIN bugsummary_viewers(BUG_ROW) AS bug_viewers ON TRUE;
650+END;
651+$$;
652+
653+COMMENT ON FUNCTION bugsummary_locations(bug) IS
654+'Calculate what BugSummary rows should exist for a given Bug.';
655+
656+
657+CREATE OR REPLACE FUNCTION bugsummary_tasks(BUG_ROW bug)
658+RETURNS SETOF bugtask LANGUAGE plpgsql STABLE AS
659+$$
660+DECLARE
661+ bt bugtask%ROWTYPE;
662+ r record;
663+BEGIN
664+ bt.bug = BUG_ROW.id;
665+
666+ -- One row only for each target permutation - need to ignore other fields
667+ -- like date last modified to deal with conjoined masters and multiple
668+ -- sourcepackage tasks in a distro.
669+ FOR r IN
670+ SELECT
671+ product, productseries, distribution, distroseries,
672+ sourcepackagename, status, milestone, importance, bugwatch
673+ FROM BugTask WHERE bug=BUG_ROW.id
674+ UNION -- Implicit DISTINCT
675+ SELECT
676+ product, productseries, distribution, distroseries,
677+ NULL, status, milestone, importance, bugwatch
678+ FROM BugTask WHERE bug=BUG_ROW.id AND sourcepackagename IS NOT NULL
679+ LOOP
680+ bt.product = r.product;
681+ bt.productseries = r.productseries;
682+ bt.distribution = r.distribution;
683+ bt.distroseries = r.distroseries;
684+ bt.sourcepackagename = r.sourcepackagename;
685+ bt.status = r.status;
686+ bt.milestone = r.milestone;
687+ bt.importance = r.importance;
688+ bt.bugwatch = r.bugwatch;
689+ RETURN NEXT bt;
690+ END LOOP;
691+END;
692+$$;
693+
694+COMMENT ON FUNCTION bugsummary_tasks(bug) IS
695+'Return all tasks for the bug + all sourcepackagename tasks again with the sourcepackagename squashed';
696+
697+
698+
699+INSERT INTO LaunchpadDatabaseRevision VALUES (2208, 75, 0);
700+>>>>>>> MERGE-SOURCE
701
702=== added file 'lib/lp/bugs/javascript/tests/test_subscriber.js.OTHER'
703--- lib/lp/bugs/javascript/tests/test_subscriber.js.OTHER 1970-01-01 00:00:00 +0000
704+++ lib/lp/bugs/javascript/tests/test_subscriber.js.OTHER 2011-06-17 07:08:54 +0000
705@@ -0,0 +1,288 @@
706+YUI({
707+ base: '../../../../canonical/launchpad/icing/yui/',
708+ filter: 'raw', combine: false, fetchCSS: false
709+ }).use('test', 'console', 'lp.bugs.subscriber', function(Y) {
710+
711+var suite = new Y.Test.Suite("lp.bugs.subscriber Tests");
712+
713+/*
714+ * Test that all the parts of the user name
715+ * are set when given just a URI.
716+ */
717+suite.add(new Y.Test.Case({
718+ name: 'Subscriber From Simple Config',
719+
720+ setUp: function() {
721+ this.config = {
722+ uri: '/~deryck'
723+ };
724+ this.subscriber = new Y.lp.bugs.subscriber.Subscriber(this.config);
725+ },
726+
727+ tearDown: function() {
728+ delete this.config;
729+ delete this.subscriber;
730+ },
731+
732+ test_uri_config: function() {
733+ Y.Assert.areEqual(
734+ '/~deryck',
735+ this.subscriber.get('uri'),
736+ 'User URI should be /~deryck');
737+ Y.Assert.areEqual(
738+ 'deryck',
739+ this.subscriber.get('name'),
740+ 'User name should be deryck');
741+ Y.Assert.areEqual(
742+ this.subscriber.get('uri'),
743+ this.subscriber.get('escaped_uri'),
744+ 'The escaped user uri should be the same as the unescaped uri.');
745+ Y.Assert.isNull(
746+ this.subscriber.get('user_node'),
747+ 'User node should not be known and be null at this point.');
748+ Y.Assert.areSame(
749+ '',
750+ this.subscriber.get('css_name'),
751+ 'Without subscriber_ids object, css_name should not be set yet.');
752+ Y.Assert.areSame(
753+ '',
754+ this.subscriber.get('display_name'),
755+ 'Without user node or client, the display name should be empty.');
756+ }
757+}));
758+
759+/*
760+ * Test that all the parts of the user name
761+ * are set correctly when a name needs escaping.
762+ */
763+suite.add(new Y.Test.Case({
764+ name: 'Escaping Subscriber From Simple Config',
765+
766+ setUp: function() {
767+ this.config = {
768+ uri: '/~foo+bar',
769+ subscriber_ids: {'foo+bar': 'subscriber-16'}
770+ };
771+ this.subscriber = new Y.lp.bugs.subscriber.Subscriber(this.config);
772+ },
773+
774+ tearDown: function() {
775+ delete this.config;
776+ delete this.subscriber;
777+ },
778+
779+ test_escaping_uri_config: function() {
780+ Y.Assert.areEqual(
781+ '/~foo+bar',
782+ this.subscriber.get('uri'),
783+ 'User URI should be /~foo+bar');
784+ Y.Assert.areEqual(
785+ 'foo+bar',
786+ this.subscriber.get('name'),
787+ 'User name should be foo+bar');
788+ Y.Assert.areEqual(
789+ '/~foo%2Bbar',
790+ this.subscriber.get('escaped_uri'),
791+ 'Escaped user URI should be /~foo%2Bbar');
792+ Y.Assert.areEqual(
793+ 'subscriber-16',
794+ this.subscriber.get('css_name'),
795+ 'css_name for user should be subscriber-16');
796+ }
797+}));
798+
799+/*
800+ * Test that the display_name is correctly worked out
801+ * when passed a Node.
802+ */
803+suite.add(new Y.Test.Case({
804+ name: 'Subscriber Name When Passed Node',
805+
806+ setUp: function() {
807+ var node = Y.one('.subscriber-tester');
808+ this.config = {
809+ uri: '/~tester',
810+ user_node: node
811+ };
812+ this.subscriber = new Y.lp.bugs.subscriber.Subscriber(this.config);
813+ },
814+
815+ tearDown: function() {
816+ delete this.config;
817+ delete this.subscriber;
818+ },
819+
820+ test_display_name: function() {
821+ Y.Assert.areEqual(
822+ 'JS Test User',
823+ this.subscriber.get('display_name'),
824+ 'The user name should be JS Test User.');
825+ }
826+}));
827+
828+/*
829+ * Test that display_name is correctly worked out from
830+ * the DOM when not passed a Node.
831+ */
832+suite.add(new Y.Test.Case({
833+ name: 'Subscriber Name When Not Passed Node',
834+
835+ setUp: function() {
836+ this.config = {
837+ uri: '/~tester'
838+ };
839+ this.subscriber = new Y.lp.bugs.subscriber.Subscriber(this.config);
840+ },
841+
842+ tearDown: function() {
843+ delete this.config;
844+ delete this.subscriber;
845+ },
846+
847+ test_display_name_from_dom: function() {
848+ Y.Assert.areEqual(
849+ 'JS Test User',
850+ this.subscriber.get('display_name'),
851+ 'The user name should be JS Test User.');
852+ }
853+}));
854+
855+/*
856+ * Subscriber class that stubs out API calls.
857+ */
858+function APIStubSubscriber(config) {
859+ APIStubSubscriber.superclass.constructor.apply(this, arguments);
860+}
861+Y.extend(APIStubSubscriber, Y.lp.bugs.subscriber.Subscriber, {
862+ get_display_name_from_api: function(client) {
863+ this.set('display_name', 'From API');
864+ this.set_truncated_display_name();
865+ }
866+});
867+
868+/*
869+ * Test that the API is consulted when the display_name cannot be
870+ * worked out from a given Node or the DOM.
871+ */
872+suite.add(new Y.Test.Case({
873+ name: 'Subscriber Name From API',
874+
875+ setUp: function() {
876+ // LP is global.
877+ window.LP = {
878+ cache: {},
879+ links: {}
880+ };
881+ Y.lp.client.Launchpad = function() {};
882+ },
883+
884+ tearDown: function() {
885+ delete window.LP;
886+ },
887+
888+ test_display_name_from_api: function() {
889+ // The API should be consulted when the user is logged in. Set
890+ // the link to "me" to something other than undefined to
891+ // indicate that there is a logged-in user.
892+ LP.links.me = 'not-undefined';
893+ var subscriber = new APIStubSubscriber({});
894+ Y.Assert.areEqual(
895+ 'From API', subscriber.get('display_name'),
896+ 'The display name should be "From API".');
897+ },
898+
899+ test_display_name_when_not_logged_in: function() {
900+ // The API should not be consulted when no user is logged in.
901+ var subscriber = new APIStubSubscriber({});
902+ Y.Assert.areEqual(
903+ '', subscriber.get('display_name'),
904+ 'The display name should be the empty string.');
905+ }
906+}));
907+
908+/*
909+ * Test that a Subscription is properly initialized from
910+ * a simple config and that the basic methods work.
911+ */
912+suite.add(new Y.Test.Case({
913+ name: 'Subscription Test',
914+
915+ setUp: function() {
916+ this.config = {
917+ can_be_unsubscribed: false,
918+ is_direct: true,
919+ is_team: true
920+ };
921+ this.subscription = new Y.lp.bugs.subscriber.Subscription(
922+ this.config);
923+ },
924+
925+ tearDown: function() {
926+ delete this.config;
927+ delete this.subscription;
928+ },
929+
930+ test_subscription_config: function() {
931+ Y.Assert.isFalse(
932+ this.subscription.can_be_unsubscribed_by_user(),
933+ 'The user should not be able to unsubscribed this subscription.');
934+ Y.Assert.isTrue(
935+ this.subscription.is_team(),
936+ 'This subscription should be for a team.');
937+ Y.Assert.isTrue(
938+ this.subscription.is_direct_subscription(),
939+ 'This should be a direct subscription.');
940+ // Also check that the defaults were set.
941+ Y.Assert.isNull(
942+ this.subscription.get('person'),
943+ 'The subscription should not be setup for a person.');
944+ Y.Assert.isNull(
945+ this.subscription.get('subscriber'),
946+ 'The subscription should not be setup for a subscriber.');
947+ },
948+
949+ test_subscription_is_node: function() {
950+ Y.Assert.isFalse(
951+ this.subscription.is_node(),
952+ 'Initially, no node should be supplied to the config.');
953+ var link = Y.one('.menu-link-subscription');
954+ this.subscription.set('link', link);
955+ Y.Assert.isTrue(
956+ this.subscription.is_node(),
957+ 'This subscription should have a node for subscription link.');
958+ },
959+
960+ test_already_subscribed: function() {
961+ var person = new Y.lp.bugs.subscriber.Subscriber({uri: '/~tester'});
962+ this.subscription.set('person', person);
963+ Y.Assert.isTrue(
964+ this.subscription.is_already_subscribed(),
965+ 'The JS Test User should be already subscribed.');
966+ },
967+
968+ test_is_current_user_subscribing: function() {
969+ var person = new Y.lp.bugs.subscriber.Subscriber({uri: '/~tester'});
970+ this.subscription.set('person', person);
971+ var subscriber = this.subscription.get('person');
972+ this.subscription.set('subscriber', subscriber);
973+ Y.Assert.isTrue(
974+ this.subscription.is_current_user_subscribing(),
975+ 'Current user should be the same person being subscribed.');
976+ }
977+}));
978+
979+
980+var handle_complete = function(data) {
981+ window.status = '::::' + JSON.stringify(data);
982+ };
983+Y.Test.Runner.on('complete', handle_complete);
984+Y.Test.Runner.add(suite);
985+
986+var console = new Y.Console({newestOnTop: false});
987+console.render('#log');
988+
989+Y.on('domready', function() {
990+ Y.Test.Runner.run();
991+});
992+});
993+
994
995=== modified file 'lib/lp/soyuz/doc/distroseriesqueue-ddtp-tarball.txt'
996--- lib/lp/soyuz/doc/distroseriesqueue-ddtp-tarball.txt 2011-06-15 09:49:37 +0000
997+++ lib/lp/soyuz/doc/distroseriesqueue-ddtp-tarball.txt 2011-06-17 07:08:54 +0000
998@@ -140,6 +140,7 @@
999
1000 Let's use the script to fetch the ddtp-tarball upload:
1001
1002+<<<<<<< TREE
1003 >>> import subprocess
1004 >>> import os
1005 >>> import sys
1006@@ -165,6 +166,33 @@
1007 ---------------------------------------------------------------------------
1008 1/1 total
1009 <BLANKLINE>
1010+=======
1011+ >>> import subprocess
1012+ >>> import os
1013+ >>> import sys
1014+ >>> from canonical.config import config
1015+
1016+ >>> script = os.path.join(
1017+ ... config.root, "scripts", "ftpmaster-tools", "queue")
1018+
1019+ >>> process = subprocess.Popen([sys.executable, script, "-Q", "accepted",
1020+ ... "-s", "breezy-autotest", "fetch", "trans",
1021+ ... "-d", "ubuntutest"],
1022+ ... stdout=subprocess.PIPE)
1023+ >>> stdout, stderr = process.communicate()
1024+ >>> process.returncode
1025+ 0
1026+ >>> print stdout
1027+ Initializing connection to queue accepted
1028+ Running: "fetch trans"
1029+ Fetching ubuntutest/breezy-autotest (ACCEPTED) 1/1
1030+ ---------------------------------------------------------------------------
1031+ Constructing translations-main_20060728_all.changes
1032+ Constructing translations_main_20060728.tar.gz
1033+ ---------------------------------------------------------------------------
1034+ 1/1 total
1035+ <BLANKLINE>
1036+>>>>>>> MERGE-SOURCE
1037
1038 Check if the files were written:
1039
1040
1041=== modified file 'lib/lp/soyuz/doc/distroseriesqueue-dist-upgrader.txt'
1042--- lib/lp/soyuz/doc/distroseriesqueue-dist-upgrader.txt 2011-06-15 09:49:37 +0000
1043+++ lib/lp/soyuz/doc/distroseriesqueue-dist-upgrader.txt 2011-06-17 07:08:54 +0000
1044@@ -93,6 +93,7 @@
1045 Rejecting item from accepted queue (very unlikely, normally it would
1046 be rejecting from unapproved or new, fix bug #54649):
1047
1048+<<<<<<< TREE
1049 >>> process = subprocess.Popen([
1050 ... sys.executable, script,
1051 ... "-Q", "accepted",
1052@@ -108,11 +109,27 @@
1053 ...
1054 Rejecting dist-upgrader_20060302.0120_all.tar.gz
1055 ...
1056+=======
1057+ >>> process = subprocess.Popen([sys.executable, script, "-Q", "accepted",
1058+ ... "-s", "breezy-autotest", "reject", "dist",
1059+ ... "-d", "ubuntutest", "-M"],
1060+ ... stderr=subprocess.PIPE,
1061+ ... stdout=subprocess.PIPE)
1062+ >>> stdout, stderr = process.communicate()
1063+ >>> process.returncode
1064+ 0
1065+ >>> print stdout
1066+ Initializing connection to queue accepted
1067+ ...
1068+ Rejecting dist-upgrader_20060302.0120_all.tar.gz
1069+ ...
1070+>>>>>>> MERGE-SOURCE
1071
1072
1073 Accepting from rejected queue (ressurecting an reject upload is also
1074 very unlikely, however it's only for testing purpose, fix #54649):
1075
1076+<<<<<<< TREE
1077 >>> process = subprocess.Popen([sys.executable, script, "-Q", "rejected",
1078 ... "-s", "breezy-autotest", "accept", "dist",
1079 ... "-d", "ubuntutest", "-M"],
1080@@ -126,10 +143,26 @@
1081 ...
1082 Accepting dist-upgrader_20060302.0120_all.tar.gz
1083 ...
1084+=======
1085+ >>> process = subprocess.Popen([sys.executable, script, "-Q", "rejected",
1086+ ... "-s", "breezy-autotest", "accept", "dist",
1087+ ... "-d", "ubuntutest", "-M"],
1088+ ... stderr=subprocess.PIPE,
1089+ ... stdout=subprocess.PIPE)
1090+ >>> stdout, stderr = process.communicate()
1091+ >>> process.returncode
1092+ 0
1093+ >>> print stdout
1094+ Initializing connection to queue rejected
1095+ ...
1096+ Accepting dist-upgrader_20060302.0120_all.tar.gz
1097+ ...
1098+>>>>>>> MERGE-SOURCE
1099
1100
1101 Let's use the script to fetch the dist-upgrader upload:
1102
1103+<<<<<<< TREE
1104 >>> process = subprocess.Popen([sys.executable, script, "-Q", "accepted",
1105 ... "-s", "breezy-autotest", "fetch", "dist",
1106 ... "-d", "ubuntutest"],
1107@@ -147,6 +180,25 @@
1108 ---------------------------------------------------------------------------
1109 1/1 total
1110 <BLANKLINE>
1111+=======
1112+ >>> process = subprocess.Popen([sys.executable, script, "-Q", "accepted",
1113+ ... "-s", "breezy-autotest", "fetch", "dist",
1114+ ... "-d", "ubuntutest"],
1115+ ... stdout=subprocess.PIPE)
1116+ >>> stdout, stderr = process.communicate()
1117+ >>> process.returncode
1118+ 0
1119+ >>> print stdout
1120+ Initializing connection to queue accepted
1121+ Running: "fetch dist"
1122+ Fetching ubuntutest/breezy-autotest (ACCEPTED) 1/1
1123+ ---------------------------------------------------------------------------
1124+ Constructing dist-upgrader_20060302.0120_all.changes
1125+ Constructing dist-upgrader_20060302.0120_all.tar.gz
1126+ ---------------------------------------------------------------------------
1127+ 1/1 total
1128+ <BLANKLINE>
1129+>>>>>>> MERGE-SOURCE
1130
1131
1132 Check if the files were written:
1133
1134=== modified file 'lib/lp/soyuz/doc/soyuz-set-of-uploads.txt'
1135--- lib/lp/soyuz/doc/soyuz-set-of-uploads.txt 2011-06-15 09:49:37 +0000
1136+++ lib/lp/soyuz/doc/soyuz-set-of-uploads.txt 2011-06-17 07:08:54 +0000
1137@@ -74,6 +74,7 @@
1138 Having set up that infrastructure we need to prepare a breezy distroseries
1139 for the ubuntutest distribution.
1140
1141+<<<<<<< TREE
1142 >>> from lp.registry.interfaces.pocket import PackagePublishingPocket
1143 >>> from lp.registry.model.distribution import Distribution
1144 >>> from lp.soyuz.enums import PackageUploadStatus
1145@@ -92,6 +93,26 @@
1146 >>> breezy.changeslist = 'breezy-changes@ubuntu.com'
1147 >>> fake_chroot = LibraryFileAlias.get(1)
1148 >>> unused = breezy['i386'].addOrUpdateChroot(fake_chroot)
1149+=======
1150+ >>> from lp.registry.interfaces.pocket import PackagePublishingPocket
1151+ >>> from lp.registry.model.distribution import Distribution
1152+ >>> from lp.soyuz.enums import PackageUploadStatus
1153+ >>> from lp.soyuz.scripts.initialize_distroseries import (
1154+ ... InitializeDistroSeries)
1155+ >>> from canonical.launchpad.database.librarian import LibraryFileAlias
1156+ >>> ubuntu = Distribution.byName('ubuntu')
1157+ >>> breezy_autotest = ubuntu['breezy-autotest']
1158+ >>> ubuntutest = Distribution.byName('ubuntutest')
1159+ >>> breezy = ubuntutest.newSeries(
1160+ ... 'breezy', 'Breezy Badger', 'The Breezy Badger',
1161+ ... 'Black and White', 'Someone', '5.10', None,
1162+ ... breezy_autotest.owner)
1163+ >>> ids = InitializeDistroSeries(breezy, [breezy_autotest.id])
1164+ >>> ids.initialize()
1165+ >>> breezy.changeslist = 'breezy-changes@ubuntu.com'
1166+ >>> fake_chroot = LibraryFileAlias.get(1)
1167+ >>> unused = breezy['i386'].addOrUpdateChroot(fake_chroot)
1168+>>>>>>> MERGE-SOURCE
1169
1170 Add disk content for file inherited from ubuntu/breezy-autotest:
1171
1172@@ -434,6 +455,7 @@
1173 Let's start a new package series by uploading foo_1.0-1 source in
1174 ubututest/breezy-RELEASE:
1175
1176+<<<<<<< TREE
1177 >>> simulate_upload(
1178 ... 'foo_1.0-1', upload_policy='sync', is_new=True,
1179 ... loglevel=logging.DEBUG)
1180@@ -471,6 +493,41 @@
1181 upload.
1182 ...
1183 Upload complete.
1184+=======
1185+ >>> simulate_upload(
1186+ ... 'foo_1.0-1', upload_policy='sync', is_new=True,
1187+ ... loglevel=logging.DEBUG)
1188+ DEBUG Initializing connection.
1189+ ...
1190+ DEBUG Sent a mail:
1191+ DEBUG Subject: [ubuntutest/breezy] foo 1.0-1 (New)
1192+ DEBUG Sender: Root <root@localhost>
1193+ DEBUG Recipients: Daniel Silverstone <daniel.silverstone@canonical.com>
1194+ DEBUG Bcc: Root <root@localhost>
1195+ DEBUG Body:
1196+ DEBUG NEW: foo_1.0.orig.tar.gz
1197+ DEBUG NEW: foo_1.0-1.diff.gz
1198+ DEBUG NEW: foo_1.0-1.dsc
1199+ DEBUG
1200+ DEBUG foo (1.0-1) breezy; urgency=low
1201+ DEBUG
1202+ DEBUG * Initial version
1203+ DEBUG
1204+ DEBUG
1205+ DEBUG Your package contains new components which requires manual editing of
1206+ DEBUG the override file. It is ok otherwise, so please be patient. New
1207+ DEBUG packages are usually added to the overrides about once a week.
1208+ DEBUG
1209+ DEBUG You may have gotten the distroseries wrong. If so, you may get warnings
1210+ DEBUG above if files already exist in other distroseries.
1211+ DEBUG
1212+ DEBUG --
1213+ DEBUG You are receiving this email because you are the uploader, maintainer or
1214+ DEBUG signer of the above package.
1215+ INFO Committing the transaction and any mails associated with this upload.
1216+ ...
1217+ Upload complete.
1218+>>>>>>> MERGE-SOURCE
1219
1220 And its binary:
1221
1222@@ -496,6 +553,7 @@
1223
1224 Upload a newer version of source package "foo" to breezy-backports:
1225
1226+<<<<<<< TREE
1227 >>> simulate_upload(
1228 ... 'foo_2.9-1', upload_policy='sync', loglevel=logging.DEBUG)
1229 DEBUG Initializing connection.
1230@@ -503,6 +561,15 @@
1231 DEBUG Setting it to ACCEPTED
1232 ...
1233 Upload complete.
1234+=======
1235+ >>> simulate_upload(
1236+ ... 'foo_2.9-1', upload_policy='sync', loglevel=logging.DEBUG)
1237+ DEBUG Initializing connection.
1238+ ...
1239+ DEBUG Setting it to ACCEPTED
1240+ ...
1241+ Upload complete.
1242+>>>>>>> MERGE-SOURCE
1243
1244
1245 In order to verify if the binary ancestry lookup algorithm works we
1246@@ -537,6 +604,7 @@
1247 it should be rejected by the package reviewer, otherwise people can
1248 live with this inconsistency.
1249
1250+<<<<<<< TREE
1251 >>> simulate_upload(
1252 ... 'foo_2.9-2', upload_policy='sync', loglevel=logging.DEBUG)
1253 DEBUG Initializing connection.
1254@@ -544,11 +612,21 @@
1255 DEBUG Setting it to ACCEPTED
1256 ...
1257 Upload complete.
1258+=======
1259+ >>> simulate_upload(
1260+ ... 'foo_2.9-2', upload_policy='sync', loglevel=logging.DEBUG)
1261+ DEBUG Initializing connection.
1262+ ...
1263+ DEBUG Setting it to ACCEPTED
1264+ ...
1265+ Upload complete.
1266+>>>>>>> MERGE-SOURCE
1267
1268
1269 Same behaviour is expected for a version in SECURITY lower than that
1270 in PROPOSED:
1271
1272+<<<<<<< TREE
1273 >>> simulate_upload(
1274 ... 'foo_2.9-4', upload_policy='sync', loglevel=logging.DEBUG)
1275 DEBUG Initializing connection.
1276@@ -556,7 +634,17 @@
1277 DEBUG Setting it to ACCEPTED
1278 ...
1279 Upload complete.
1280+=======
1281+ >>> simulate_upload(
1282+ ... 'foo_2.9-4', upload_policy='sync', loglevel=logging.DEBUG)
1283+ DEBUG Initializing connection.
1284+ ...
1285+ DEBUG Setting it to ACCEPTED
1286+ ...
1287+ Upload complete.
1288+>>>>>>> MERGE-SOURCE
1289
1290+<<<<<<< TREE
1291 >>> simulate_upload(
1292 ... 'foo_2.9-3', upload_policy='sync', loglevel=logging.DEBUG)
1293 DEBUG Initializing connection.
1294@@ -564,6 +652,15 @@
1295 DEBUG Setting it to ACCEPTED
1296 ...
1297 Upload complete.
1298+=======
1299+ >>> simulate_upload(
1300+ ... 'foo_2.9-3', upload_policy='sync', loglevel=logging.DEBUG)
1301+ DEBUG Initializing connection.
1302+ ...
1303+ DEBUG Setting it to ACCEPTED
1304+ ...
1305+ Upload complete.
1306+>>>>>>> MERGE-SOURCE
1307
1308
1309 However, the source upload of a smaller version than the one already
1310@@ -710,6 +807,7 @@
1311 >>> fillLibrarianFile(68)
1312 >>> fillLibrarianFile(70)
1313
1314+<<<<<<< TREE
1315 >>> run_publish_distro(careful=True)
1316 DEBUG Initializing zopeless.
1317 DEBUG Distribution: ubuntutest
1318@@ -717,6 +815,15 @@
1319 DEBUG Added /var/tmp/archive/ubuntutest/pool/universe/b/bar/bar_1.0-2_i386.deb from library
1320 DEBUG Added /var/tmp/archive/ubuntutest/pool/universe/b/bar/bar_1.0-1_i386.deb from library
1321 ...
1322+=======
1323+ >>> run_publish_distro(careful=True)
1324+ DEBUG Initializing zopeless.
1325+ DEBUG Distribution: ubuntutest
1326+ ...
1327+ DEBUG Added /var/tmp/archive/ubuntutest/pool/universe/b/bar/bar_1.0-2_i386.deb from library
1328+ DEBUG Added /var/tmp/archive/ubuntutest/pool/universe/b/bar/bar_1.0-1_i386.deb from library
1329+ ...
1330+>>>>>>> MERGE-SOURCE
1331
1332
1333 Delete the uncompressed Packages and Sources files from the archive folder.
1334@@ -739,6 +846,7 @@
1335 Check that breezy-autotest is skipped, to ensure that changes to what's
1336 uploaded in the test above don't break the assumptions of this test.
1337
1338+<<<<<<< TREE
1339 >>> run_publish_distro(careful_publishing=True)
1340 DEBUG Initializing zopeless.
1341 DEBUG Distribution: ubuntutest
1342@@ -749,6 +857,18 @@
1343 ...
1344 DEBUG Skipping release files for breezy-autotest/RELEASE
1345 ...
1346+=======
1347+ >>> run_publish_distro(careful_publishing=True)
1348+ DEBUG Initializing zopeless.
1349+ DEBUG Distribution: ubuntutest
1350+ ...
1351+ DEBUG /var/tmp/archive/ubuntutest/pool/universe/b/bar/bar_1.0-2_i386.deb is already in pool with the same content.
1352+ ...
1353+ DEBUG Skipping a-f stanza for breezy-autotest/RELEASE
1354+ ...
1355+ DEBUG Skipping release files for breezy-autotest/RELEASE
1356+ ...
1357+>>>>>>> MERGE-SOURCE
1358
1359 Check the breezy-security release file doesn't exhibit bug 54039.
1360
1361
1362=== modified file 'lib/lp/soyuz/doc/soyuz-upload.txt'
1363--- lib/lp/soyuz/doc/soyuz-upload.txt 2011-06-15 09:49:37 +0000
1364+++ lib/lp/soyuz/doc/soyuz-upload.txt 2011-06-17 07:08:54 +0000
1365@@ -556,6 +556,7 @@
1366
1367 Invoke Publisher script against the 'ubuntutest' distribution:
1368
1369+<<<<<<< TREE
1370 >>> script = os.path.join(config.root, "scripts", "publish-distro.py")
1371 >>> process = subprocess.Popen([sys.executable, script, "-vvCq",
1372 ... "-d", "ubuntutest"],
1373@@ -572,6 +573,24 @@
1374 DEBUG Added /var/tmp/archive/ubuntutest/pool/universe/e/etherwake/etherwake_1.08-1.diff.gz from library
1375 DEBUG Added /var/tmp/archive/ubuntutest/pool/universe/e/etherwake/etherwake_1.08-1.dsc from library
1376 ...
1377+=======
1378+ >>> script = os.path.join(config.root, "scripts", "publish-distro.py")
1379+ >>> process = subprocess.Popen([sys.executable, script, "-vvCq",
1380+ ... "-d", "ubuntutest"],
1381+ ... stdout=subprocess.PIPE,
1382+ ... stderr=subprocess.PIPE)
1383+ >>> stdout, stderr = process.communicate()
1384+ >>> print stdout
1385+ <BLANKLINE>
1386+ >>> print stderr
1387+ DEBUG Initializing zopeless.
1388+ DEBUG Distribution: ubuntutest
1389+ ...
1390+ DEBUG Added /var/tmp/archive/ubuntutest/pool/universe/e/etherwake/etherwake_1.08.orig.tar.gz from library
1391+ DEBUG Added /var/tmp/archive/ubuntutest/pool/universe/e/etherwake/etherwake_1.08-1.diff.gz from library
1392+ DEBUG Added /var/tmp/archive/ubuntutest/pool/universe/e/etherwake/etherwake_1.08-1.dsc from library
1393+ ...
1394+>>>>>>> MERGE-SOURCE
1395
1396
1397 # XXX cprov 2006-04-12 bug=3989
1398@@ -662,6 +681,7 @@
1399 already marked as published in the database and, if the files are
1400 already on disk, verify the contents are as expected.
1401
1402+<<<<<<< TREE
1403 >>> print stderr
1404 DEBUG Initializing zopeless.
1405 DEBUG Distribution: ubuntutest
1406@@ -670,6 +690,16 @@
1407 DEBUG /var/tmp/archive/ubuntutest/pool/universe/e/etherwake/etherwake_1.08-1.diff.gz is already in pool with the same content.
1408 DEBUG /var/tmp/archive/ubuntutest/pool/universe/e/etherwake/etherwake_1.08-1.dsc is already in pool with the same content.
1409 ...
1410+=======
1411+ >>> print stderr
1412+ DEBUG Initializing zopeless.
1413+ DEBUG Distribution: ubuntutest
1414+ ...
1415+ DEBUG /var/tmp/archive/ubuntutest/pool/universe/e/etherwake/etherwake_1.08.orig.tar.gz is already in pool with the same content.
1416+ DEBUG /var/tmp/archive/ubuntutest/pool/universe/e/etherwake/etherwake_1.08-1.diff.gz is already in pool with the same content.
1417+ DEBUG /var/tmp/archive/ubuntutest/pool/universe/e/etherwake/etherwake_1.08-1.dsc is already in pool with the same content.
1418+ ...
1419+>>>>>>> MERGE-SOURCE
1420
1421 Invalidates SQLObject cache to cope with publisher.
1422
1423@@ -743,6 +773,7 @@
1424
1425 Defining path to the script:
1426
1427+<<<<<<< TREE
1428 >>> script = os.path.join(config.root, "scripts", "ftpmaster-tools",
1429 ... "archive-cruft-check.py")
1430 >>> process = subprocess.Popen([sys.executable, script, "-vn",
1431@@ -766,6 +797,31 @@
1432 DEBUG Building all superseded by any list (ASBA):
1433 DEBUG No NBS found
1434 <BLANKLINE>
1435+=======
1436+ >>> script = os.path.join(config.root, "scripts", "ftpmaster-tools",
1437+ ... "archive-cruft-check.py")
1438+ >>> process = subprocess.Popen([sys.executable, script, "-vn",
1439+ ... "-d", "ubuntutest",
1440+ ... "-s", "breezy-autotest",
1441+ ... "/var/tmp/archive"],
1442+ ... stdout=subprocess.PIPE,
1443+ ... stderr=subprocess.PIPE,)
1444+ >>> stdout, stderr = process.communicate()
1445+ >>> process.returncode
1446+ 0
1447+ >>> print stderr
1448+ DEBUG Acquiring lock
1449+ DEBUG Initializing connection.
1450+ DEBUG Considering Sources:
1451+ DEBUG Processing /var/tmp/archive/ubuntutest/dists/breezy-autotest/restricted/source/Sources.gz
1452+ DEBUG Processing /var/tmp/archive/ubuntutest/dists/breezy-autotest/main/source/Sources.gz
1453+ DEBUG Processing /var/tmp/archive/ubuntutest/dists/breezy-autotest/multiverse/source/Sources.gz
1454+ DEBUG Processing /var/tmp/archive/ubuntutest/dists/breezy-autotest/universe/source/Sources.gz
1455+ DEBUG Building not build from source list (NBS):
1456+ DEBUG Building all superseded by any list (ASBA):
1457+ DEBUG No NBS found
1458+ <BLANKLINE>
1459+>>>>>>> MERGE-SOURCE
1460
1461
1462 Nice! That's enough for now.. let's kill the process and clean
1463
1464=== modified file 'lib/lp/soyuz/model/sourcepackagerelease.py'
1465--- lib/lp/soyuz/model/sourcepackagerelease.py 2011-06-16 11:31:24 +0000
1466+++ lib/lp/soyuz/model/sourcepackagerelease.py 2011-06-17 07:08:54 +0000
1467@@ -470,6 +470,14 @@
1468 "BinaryPackageBuild.source_package_release = %s" % (
1469 sqlvalues(archive.id, distroarchseries.architecturetag, self))]
1470
1471+<<<<<<< TREE
1472+=======
1473+ # XXX bigjools 2011-05-04 bug=777234
1474+ # We'll need exceptions in here for when we start initializing
1475+ # derived distros without rebuilding binaries. The matched
1476+ # archives will need to traverse the DistroSeriesParent tree.
1477+
1478+>>>>>>> MERGE-SOURCE
1479 # Query only the last build record for this sourcerelease
1480 # across all possible locations.
1481 query = " AND ".join(queries)
1482
1483=== modified file 'lib/lp/testing/__init__.py'
1484--- lib/lp/testing/__init__.py 2011-06-16 20:26:48 +0000
1485+++ lib/lp/testing/__init__.py 2011-06-17 07:08:54 +0000
1486@@ -74,8 +74,20 @@
1487 import time
1488 import unittest
1489
1490-import simplejson
1491-
1492+<<<<<<< TREE
1493+import simplejson
1494+
1495+=======
1496+import simplejson
1497+
1498+try:
1499+ import html5browser
1500+ # Hush lint.
1501+ html5browser
1502+except ImportError:
1503+ html5browser = None
1504+
1505+>>>>>>> MERGE-SOURCE
1506 from bzrlib import trace
1507 from bzrlib.bzrdir import (
1508 BzrDir,
1509@@ -938,6 +950,7 @@
1510
1511 def setUp(self):
1512 super(YUIUnitTestCase, self).setUp()
1513+<<<<<<< TREE
1514 # html5browser imports from the gir/pygtk stack which causes
1515 # twisted tests to break because of gtk's initialize.
1516 try:
1517@@ -962,6 +975,24 @@
1518 # Did not get a report back.
1519 self._yui_results = self.MISSING_REPORT
1520 return
1521+=======
1522+ client = html5browser.Browser()
1523+ html_uri = 'file://%s' % os.path.join(
1524+ config.root, 'lib', self.test_path)
1525+ page = client.load_page(html_uri, timeout=self.js_timeout)
1526+ if page.return_code == page.CODE_FAIL:
1527+ self._yui_results = self.TIMEOUT
1528+ return
1529+ # Data['type'] is complete (an event).
1530+ # Data['results'] is a dict (type=report)
1531+ # with 1 or more dicts (type=testcase)
1532+ # with 1 for more dicts (type=test).
1533+ report = simplejson.loads(page.content)
1534+ if report.get('type', None) != 'complete':
1535+ # Did not get a report back.
1536+ self._yui_results = self.MISSING_REPORT
1537+ return
1538+>>>>>>> MERGE-SOURCE
1539 self._yui_results = {}
1540 for key, value in report['results'].items():
1541 if isinstance(value, dict) and value['type'] == 'testcase':

Subscribers

People subscribed via source and target branches

to status/vote changes: