Merge lp:~bjornt/launchpad/bug-376990 into lp:launchpad

Proposed by Björn Tillenius
Status: Rejected
Rejected by: Björn Tillenius
Proposed branch: lp:~bjornt/launchpad/bug-376990
Merge into: lp:launchpad
Diff against target: None lines
To merge this branch: bzr merge lp:~bjornt/launchpad/bug-376990
Reviewer Review Type Date Requested Status
Canonical Launchpad Engineering Pending
Review via email: mp+9319@code.launchpad.net
To post a comment you must log in.
Revision history for this message
Björn Tillenius (bjornt) wrote :

This branch makes canonical_url() produce browser URLs, even though the
current request is a web service request. We have some problems that API
URLs are used instead of browser URLs, for example in XHTML
presentations and when e-mail notifications are generated inside API
methods.

IMHO, canonical_url() should be split into two functions, but it's too
late to do that now. I'd like one function that always returns a browser
URL, and one that returns either a browser or web service URL, depending
on which request is currently used. That's basically how I made
canonical_url() work with this change. If no request is explicitly
passed to it, a browser URL is always returned. In the case where you
may want an API URL, you have to pass in the current request explicitly.
That's why I had to pass in the request to canonical_url() in the
/people/foo rediretor, since it's used in both the app and web service
servers. Basically all code that redirects objects to canonical_url()
will need to pass in the request explicitly. At the moment all tests
pass, but we might find a few bugs later. I don't expect this to be a
big deal, though.

In this branch I also made canonical_url.txt run in FunctionalLayer, so
that it runs a bit faster.

BTW, I'm not going to land this branch before someone with more web
service foo has commented on it, but I have discussed this approach with
them already.

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'database/replication/initialize.py'
2--- database/replication/initialize.py 2009-06-24 21:17:33 +0000
3+++ database/replication/initialize.py 2009-07-19 04:41:14 +0000
4@@ -144,9 +144,11 @@
5 comment='Launchpad tables and sequences');
6 """)
7
8+ script.append(
9+ "echo 'Adding %d tables to replication set @lpmain_set';"
10+ % len(lpmain_tables))
11 for table in sorted(lpmain_tables):
12 script.append("""
13- echo 'Adding %(table)s to replication set @lpmain_set';
14 set add table (
15 set id=@lpmain_set,
16 origin=@master_node,
17@@ -156,9 +158,11 @@
18 entry_id += 1
19
20 entry_id = 200
21+ script.append(
22+ "echo 'Adding %d sequences to replication set @lpmain_set';"
23+ % len(lpmain_sequences))
24 for sequence in sorted(lpmain_sequences):
25 script.append("""
26- echo 'Adding %(sequence)s to replication set @lpmain_set';
27 set add sequence (
28 set id=@lpmain_set,
29 origin=@master_node,
30
31=== modified file 'database/replication/report.py'
32--- database/replication/report.py 2009-06-24 21:17:33 +0000
33+++ database/replication/report.py 2009-07-19 04:41:14 +0000
34@@ -39,7 +39,7 @@
35 self.labels = labels[:]
36 self.rows = []
37
38-
39+
40 class HtmlReport:
41
42 def alert(self, text):
43@@ -157,10 +157,11 @@
44
45 cur.execute("""
46 SELECT li_receiver, li_origin, li_provider
47- FROM sl_listen ORDER BY li_receiver
48+ FROM sl_listen
49+ ORDER BY li_receiver, li_origin, li_provider
50 """)
51 for row in cur.fetchall():
52- table.rows.append('Node %s' % node for node in row)
53+ table.rows.append(['Node %s' % node for node in row])
54 return report.table(table)
55
56
57
58=== modified file 'database/sampledata/current-dev.sql'
59--- database/sampledata/current-dev.sql 2009-07-17 00:26:05 +0000
60+++ database/sampledata/current-dev.sql 2009-07-19 04:41:14 +0000
61@@ -4756,7 +4756,8 @@
62 INSERT INTO person (id, displayname, teamowner, teamdescription, name, language, fti, defaultmembershipperiod, defaultrenewalperiod, subscriptionpolicy, merged, datecreated, addressline1, addressline2, organization, city, province, country, postcode, phone, homepage_content, icon, mugshot, hide_email_addresses, creation_rationale, creation_comment, registrant, logo, renewal_policy, personal_standing, personal_standing_reason, mail_resumption_date, mailing_list_auto_subscribe_policy, mailing_list_receive_duplicates, visibility, verbose_bugnotifications, account) VALUES (243626, 'Commercial Subscription Approvers', 243623, NULL, 'commercial-approvers', NULL, NULL, NULL, NULL, 1, NULL, '2008-06-27 14:49:38.676264', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, false, NULL, NULL, NULL, NULL, 10, 0, NULL, NULL, 1, true, 1, true, NULL);
63 INSERT INTO person (id, displayname, teamowner, teamdescription, name, language, fti, defaultmembershipperiod, defaultrenewalperiod, subscriptionpolicy, merged, datecreated, addressline1, addressline2, organization, city, province, country, postcode, phone, homepage_content, icon, mugshot, hide_email_addresses, creation_rationale, creation_comment, registrant, logo, renewal_policy, personal_standing, personal_standing_reason, mail_resumption_date, mailing_list_auto_subscribe_policy, mailing_list_receive_duplicates, visibility, verbose_bugnotifications, account) VALUES (243627, 'Ubuntu-branches-owner', NULL, NULL, 'ubuntu-branches-owner', NULL, NULL, NULL, NULL, 1, NULL, '2009-03-17 07:28:15.948042', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, false, 1, NULL, NULL, NULL, 10, 0, NULL, NULL, 1, true, 1, true, 243625);
64 INSERT INTO person (id, displayname, teamowner, teamdescription, name, language, fti, defaultmembershipperiod, defaultrenewalperiod, subscriptionpolicy, merged, datecreated, addressline1, addressline2, organization, city, province, country, postcode, phone, homepage_content, icon, mugshot, hide_email_addresses, creation_rationale, creation_comment, registrant, logo, renewal_policy, personal_standing, personal_standing_reason, mail_resumption_date, mailing_list_auto_subscribe_policy, mailing_list_receive_duplicates, visibility, verbose_bugnotifications, account) VALUES (243628, 'Ubuntu branches', 243627, 'Celebrity team that controls Ubuntu source package branches.', 'ubuntu-branches', NULL, NULL, NULL, NULL, 3, NULL, '2009-03-17 07:29:13.259033', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, false, NULL, NULL, NULL, NULL, 10, 0, NULL, NULL, 1, true, 1, true, NULL);
65-INSERT INTO person (id, displayname, teamowner, teamdescription, name, language, fti, defaultmembershipperiod, defaultrenewalperiod, subscriptionpolicy, merged, datecreated, addressline1, addressline2, organization, city, province, country, postcode, phone, homepage_content, icon, mugshot, hide_email_addresses, creation_rationale, creation_comment, registrant, logo, renewal_policy, personal_standing, personal_standing_reason, mail_resumption_date, mailing_list_auto_subscribe_policy, mailing_list_receive_duplicates, visibility, verbose_bugnotifications, account) VALUES (243629, 'HWDB Team', 16, NULL, 'hwdb-team', NULL, NULL, NULL, NULL, 3, NULL, '2009-07-09 09:12:39.400351', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, false, NULL, NULL, NULL, NULL, 10, 0, NULL, NULL, 1, true, 1, true, NULL);
66+INSERT INTO person (id, displayname, teamowner, teamdescription, name, language, fti, defaultmembershipperiod, defaultrenewalperiod, subscriptionpolicy, merged, datecreated, addressline1, addressline2, organization, city, province, country, postcode, phone, homepage_content, icon, mugshot, hide_email_addresses, creation_rationale, creation_comment, registrant, logo, renewal_policy, personal_standing, personal_standing_reason, mail_resumption_date, mailing_list_auto_subscribe_policy, mailing_list_receive_duplicates, visibility, verbose_bugnotifications, account) VALUES (243629, 'Ubuntu Security Team', 4, NULL, 'ubuntu-security', NULL, NULL, NULL, NULL, 2, NULL, '2009-07-14 20:23:59.698654', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, false, NULL, NULL, NULL, NULL, 10, 0, NULL, NULL, 1, true, 1, true, NULL);
67+INSERT INTO person (id, displayname, teamowner, teamdescription, name, language, fti, defaultmembershipperiod, defaultrenewalperiod, subscriptionpolicy, merged, datecreated, addressline1, addressline2, organization, city, province, country, postcode, phone, homepage_content, icon, mugshot, hide_email_addresses, creation_rationale, creation_comment, registrant, logo, renewal_policy, personal_standing, personal_standing_reason, mail_resumption_date, mailing_list_auto_subscribe_policy, mailing_list_receive_duplicates, visibility, verbose_bugnotifications, account) VALUES (243630, 'HWDB Team', 16, NULL, 'hwdb-team', NULL, NULL, NULL, NULL, 3, NULL, '2009-07-09 09:12:39.400351', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, false, NULL, NULL, NULL, NULL, 10, 0, NULL, NULL, 1, true, 1, true, NULL);
68
69
70 ALTER TABLE person ENABLE TRIGGER ALL;
71@@ -9142,23 +9143,24 @@
72 INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (81, 16, 243620, 2, '2008-05-14 12:07:14.22745', NULL, NULL, NULL, 16, NULL, 16, '2008-05-14 12:07:14.22745', NULL, NULL, '2008-05-14 12:07:14.22745', NULL, NULL, NULL, '2008-05-14 12:07:14.140921');
73 INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (83, 243622, 243621, 3, '2008-05-12 17:40:08.720578', NULL, NULL, NULL, 16, NULL, 16, '2008-05-12 17:40:08.720578', NULL, NULL, '2008-05-12 17:40:08.720578', NULL, NULL, NULL, '2008-05-12 17:40:08.637114');
74 INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (85, 243627, 243628, 3, '2009-03-17 07:29:13.30381', NULL, NULL, NULL, 243627, NULL, 243627, '2009-03-17 07:29:13.30381', NULL, NULL, '2009-03-17 07:29:13.30381', NULL, NULL, NULL, '2009-03-17 07:29:13.259033');
75-INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (86, 1, 243629, 2, '2009-07-09 11:58:46.481813', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:46.481813');
76-INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (87, 12, 243629, 2, '2009-07-09 11:58:46.481813', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:46.481813');
77-INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (88, 16, 243629, 2, '2009-07-09 11:58:46.481813', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:46.481813');
78-INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (89, 22, 243629, 2, '2009-07-09 11:58:46.481813', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:46.481813');
79-INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (90, 23, 243629, 2, '2009-07-09 11:58:46.481813', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:46.481813');
80-INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (91, 26, 243629, 2, '2009-07-09 11:58:46.481813', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:46.481813');
81-INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (92, 27, 243629, 2, '2009-07-09 11:58:46.481813', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:46.481813');
82-INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (93, 28, 243629, 2, '2009-07-09 11:58:46.481813', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:46.481813');
83-INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (94, 29, 243629, 2, '2009-07-09 11:58:46.481813', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:46.481813');
84-INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (95, 38, 243629, 2, '2009-07-09 11:58:46.481813', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:46.481813');
85-INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (96, 63, 243629, 2, '2009-07-09 11:58:46.481813', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:46.481813');
86-INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (97, 70, 243629, 2, '2009-07-09 11:58:46.481813', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:46.481813');
87-INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (98, 243610, 243629, 2, '2009-07-09 11:58:46.481813', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:46.481813');
88-INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (99, 243611, 243629, 2, '2009-07-09 11:58:46.481813', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:46.481813');
89-INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (100, 243617, 243629, 2, '2009-07-09 11:58:46.481813', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:46.481813');
90-INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (101, 243622, 243629, 2, '2009-07-09 11:58:46.481813', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:46.481813');
91-INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (102, 243623, 243629, 2, '2009-07-09 11:58:46.481813', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:46.481813');
92+INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (86, 4, 243629, 3, '2009-07-14 20:23:59.769346', NULL, NULL, NULL, 4, NULL, 4, '2009-07-14 20:23:59.769346', NULL, NULL, '2009-07-14 20:23:59.769346', NULL, NULL, NULL, '2009-07-14 20:23:59.698654');
93+INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (87, 1, 243630, 2, '2009-07-09 11:58:46.481813', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:46.481813');
94+INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (88, 12, 243630, 2, '2009-07-09 11:58:46.481813', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:46.481813');
95+INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (89, 16, 243630, 2, '2009-07-09 11:58:46.481813', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:46.481813');
96+INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (90, 22, 243630, 2, '2009-07-09 11:58:46.481813', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:46.481813');
97+INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (91, 23, 243630, 2, '2009-07-09 11:58:46.481813', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:46.481813');
98+INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (92, 26, 243630, 2, '2009-07-09 11:58:46.481813', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:46.481813');
99+INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (93, 27, 243630, 2, '2009-07-09 11:58:46.481813', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:46.481813');
100+INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (94, 28, 243630, 2, '2009-07-09 11:58:46.481813', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:46.481813');
101+INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (95, 29, 243630, 2, '2009-07-09 11:58:46.481813', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:46.481813');
102+INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (96, 38, 243630, 2, '2009-07-09 11:58:46.481813', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:46.481813');
103+INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (97, 63, 243630, 2, '2009-07-09 11:58:46.481813', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:46.481813');
104+INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (98, 70, 243630, 2, '2009-07-09 11:58:46.481813', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:46.481813');
105+INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (99, 243610, 243630, 2, '2009-07-09 11:58:46.481813', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:46.481813');
106+INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (100, 243611, 243630, 2, '2009-07-09 11:58:46.481813', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:46.481813');
107+INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (101, 243617, 243630, 2, '2009-07-09 11:58:46.481813', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:46.481813');
108+INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (102, 243622, 243630, 2, '2009-07-09 11:58:46.481813', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:46.481813');
109+INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (103, 243623, 243630, 2, '2009-07-09 11:58:46.481813', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:46.481813');
110
111
112 ALTER TABLE teammembership ENABLE TRIGGER ALL;
113@@ -9342,23 +9344,25 @@
114 INSERT INTO teamparticipation (id, team, person) VALUES (195, 243628, 243628);
115 INSERT INTO teamparticipation (id, team, person) VALUES (196, 243628, 243627);
116 INSERT INTO teamparticipation (id, team, person) VALUES (197, 243629, 243629);
117-INSERT INTO teamparticipation (id, team, person) VALUES (198, 243629, 1);
118-INSERT INTO teamparticipation (id, team, person) VALUES (199, 243629, 12);
119-INSERT INTO teamparticipation (id, team, person) VALUES (200, 243629, 16);
120-INSERT INTO teamparticipation (id, team, person) VALUES (201, 243629, 22);
121-INSERT INTO teamparticipation (id, team, person) VALUES (202, 243629, 23);
122-INSERT INTO teamparticipation (id, team, person) VALUES (203, 243629, 26);
123-INSERT INTO teamparticipation (id, team, person) VALUES (204, 243629, 27);
124-INSERT INTO teamparticipation (id, team, person) VALUES (205, 243629, 28);
125-INSERT INTO teamparticipation (id, team, person) VALUES (206, 243629, 29);
126-INSERT INTO teamparticipation (id, team, person) VALUES (207, 243629, 38);
127-INSERT INTO teamparticipation (id, team, person) VALUES (208, 243629, 63);
128-INSERT INTO teamparticipation (id, team, person) VALUES (209, 243629, 70);
129-INSERT INTO teamparticipation (id, team, person) VALUES (210, 243629, 243610);
130-INSERT INTO teamparticipation (id, team, person) VALUES (211, 243629, 243611);
131-INSERT INTO teamparticipation (id, team, person) VALUES (212, 243629, 243617);
132-INSERT INTO teamparticipation (id, team, person) VALUES (213, 243629, 243622);
133-INSERT INTO teamparticipation (id, team, person) VALUES (214, 243629, 243623);
134+INSERT INTO teamparticipation (id, team, person) VALUES (198, 243629, 4);
135+INSERT INTO teamparticipation (id, team, person) VALUES (199, 243630, 243630);
136+INSERT INTO teamparticipation (id, team, person) VALUES (200, 243630, 1);
137+INSERT INTO teamparticipation (id, team, person) VALUES (201, 243630, 12);
138+INSERT INTO teamparticipation (id, team, person) VALUES (202, 243630, 16);
139+INSERT INTO teamparticipation (id, team, person) VALUES (203, 243630, 22);
140+INSERT INTO teamparticipation (id, team, person) VALUES (204, 243630, 23);
141+INSERT INTO teamparticipation (id, team, person) VALUES (205, 243630, 26);
142+INSERT INTO teamparticipation (id, team, person) VALUES (206, 243630, 27);
143+INSERT INTO teamparticipation (id, team, person) VALUES (207, 243630, 28);
144+INSERT INTO teamparticipation (id, team, person) VALUES (208, 243630, 29);
145+INSERT INTO teamparticipation (id, team, person) VALUES (209, 243630, 38);
146+INSERT INTO teamparticipation (id, team, person) VALUES (210, 243630, 63);
147+INSERT INTO teamparticipation (id, team, person) VALUES (211, 243630, 70);
148+INSERT INTO teamparticipation (id, team, person) VALUES (212, 243630, 243610);
149+INSERT INTO teamparticipation (id, team, person) VALUES (213, 243630, 243611);
150+INSERT INTO teamparticipation (id, team, person) VALUES (214, 243630, 243617);
151+INSERT INTO teamparticipation (id, team, person) VALUES (215, 243630, 243622);
152+INSERT INTO teamparticipation (id, team, person) VALUES (216, 243630, 243623);
153
154
155 ALTER TABLE teamparticipation ENABLE TRIGGER ALL;
156
157=== modified file 'database/sampledata/current.sql'
158--- database/sampledata/current.sql 2009-07-17 00:26:05 +0000
159+++ database/sampledata/current.sql 2009-07-19 04:41:14 +0000
160@@ -4754,7 +4754,8 @@
161 INSERT INTO person (id, displayname, teamowner, teamdescription, name, language, fti, defaultmembershipperiod, defaultrenewalperiod, subscriptionpolicy, merged, datecreated, addressline1, addressline2, organization, city, province, country, postcode, phone, homepage_content, icon, mugshot, hide_email_addresses, creation_rationale, creation_comment, registrant, logo, renewal_policy, personal_standing, personal_standing_reason, mail_resumption_date, mailing_list_auto_subscribe_policy, mailing_list_receive_duplicates, visibility, verbose_bugnotifications, account) VALUES (243626, 'Launchpad Users', 12, NULL, 'launchpad-users', NULL, NULL, NULL, NULL, 2, NULL, '2008-11-26 18:19:53.547918', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, false, NULL, NULL, NULL, NULL, 10, 0, NULL, NULL, 1, true, 1, true, NULL);
162 INSERT INTO person (id, displayname, teamowner, teamdescription, name, language, fti, defaultmembershipperiod, defaultrenewalperiod, subscriptionpolicy, merged, datecreated, addressline1, addressline2, organization, city, province, country, postcode, phone, homepage_content, icon, mugshot, hide_email_addresses, creation_rationale, creation_comment, registrant, logo, renewal_policy, personal_standing, personal_standing_reason, mail_resumption_date, mailing_list_auto_subscribe_policy, mailing_list_receive_duplicates, visibility, verbose_bugnotifications, account) VALUES (243627, 'Ubuntu-branches-owner', NULL, NULL, 'ubuntu-branches-owner', NULL, NULL, NULL, NULL, 1, NULL, '2009-03-17 07:26:14.024613', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, false, 1, NULL, NULL, NULL, 10, 0, NULL, NULL, 1, true, 1, true, 2436242);
163 INSERT INTO person (id, displayname, teamowner, teamdescription, name, language, fti, defaultmembershipperiod, defaultrenewalperiod, subscriptionpolicy, merged, datecreated, addressline1, addressline2, organization, city, province, country, postcode, phone, homepage_content, icon, mugshot, hide_email_addresses, creation_rationale, creation_comment, registrant, logo, renewal_policy, personal_standing, personal_standing_reason, mail_resumption_date, mailing_list_auto_subscribe_policy, mailing_list_receive_duplicates, visibility, verbose_bugnotifications, account) VALUES (243628, 'Ubuntu branches', 243627, 'Celebrity team that controls official source package branches.', 'ubuntu-branches', NULL, NULL, NULL, NULL, 3, NULL, '2009-03-17 07:27:39.306182', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, false, NULL, NULL, NULL, NULL, 10, 0, NULL, NULL, 1, true, 1, true, NULL);
164-INSERT INTO person (id, displayname, teamowner, teamdescription, name, language, fti, defaultmembershipperiod, defaultrenewalperiod, subscriptionpolicy, merged, datecreated, addressline1, addressline2, organization, city, province, country, postcode, phone, homepage_content, icon, mugshot, hide_email_addresses, creation_rationale, creation_comment, registrant, logo, renewal_policy, personal_standing, personal_standing_reason, mail_resumption_date, mailing_list_auto_subscribe_policy, mailing_list_receive_duplicates, visibility, verbose_bugnotifications, account) VALUES (243629, 'HWDB Team', 16, NULL, 'hwdb-team', NULL, NULL, NULL, NULL, 3, NULL, '2009-07-09 09:12:39.400351', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, false, NULL, NULL, NULL, NULL, 10, 0, NULL, NULL, 1, true, 1, true, NULL);
165+INSERT INTO person (id, displayname, teamowner, teamdescription, name, language, fti, defaultmembershipperiod, defaultrenewalperiod, subscriptionpolicy, merged, datecreated, addressline1, addressline2, organization, city, province, country, postcode, phone, homepage_content, icon, mugshot, hide_email_addresses, creation_rationale, creation_comment, registrant, logo, renewal_policy, personal_standing, personal_standing_reason, mail_resumption_date, mailing_list_auto_subscribe_policy, mailing_list_receive_duplicates, visibility, verbose_bugnotifications, account) VALUES (243629, 'Ubuntu Security Team', 4, NULL, 'ubuntu-security', NULL, NULL, NULL, NULL, 2, NULL, '2009-07-14 20:23:59.698654', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, false, NULL, NULL, NULL, NULL, 10, 0, NULL, NULL, 1, true, 1, true, NULL);
166+INSERT INTO person (id, displayname, teamowner, teamdescription, name, language, fti, defaultmembershipperiod, defaultrenewalperiod, subscriptionpolicy, merged, datecreated, addressline1, addressline2, organization, city, province, country, postcode, phone, homepage_content, icon, mugshot, hide_email_addresses, creation_rationale, creation_comment, registrant, logo, renewal_policy, personal_standing, personal_standing_reason, mail_resumption_date, mailing_list_auto_subscribe_policy, mailing_list_receive_duplicates, visibility, verbose_bugnotifications, account) VALUES (243630, 'HWDB Team', 16, NULL, 'hwdb-team', NULL, NULL, NULL, NULL, 3, NULL, '2009-07-09 09:12:39.400351', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, false, NULL, NULL, NULL, NULL, 10, 0, NULL, NULL, 1, true, 1, true, NULL);
167
168
169 ALTER TABLE person ENABLE TRIGGER ALL;
170@@ -9139,23 +9140,24 @@
171 INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (84, 243623, 243624, 3, '2008-06-27 14:49:38.698594', NULL, NULL, NULL, 243623, NULL, 243623, '2008-06-27 14:49:38.698594', NULL, NULL, '2008-06-27 14:49:38.698594', NULL, NULL, NULL, '2008-06-27 14:49:38.676264');
172 INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (85, 12, 243626, 3, '2008-11-26 18:19:53.849673', NULL, NULL, NULL, 12, NULL, 12, '2008-11-26 18:19:53.849673', NULL, NULL, '2008-11-26 18:19:53.849673', NULL, NULL, NULL, '2008-11-26 18:19:53.547918');
173 INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (86, 243627, 243628, 3, '2009-03-17 07:27:39.361471', NULL, NULL, NULL, 243627, NULL, 243627, '2009-03-17 07:27:39.361471', NULL, NULL, '2009-03-17 07:27:39.361471', NULL, NULL, NULL, '2009-03-17 07:27:39.306182');
174-INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (87, 1, 243629, 2, '2009-07-09 11:58:38.122886', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:38.122886');
175-INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (88, 12, 243629, 2, '2009-07-09 11:58:38.122886', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:38.122886');
176-INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (89, 16, 243629, 2, '2009-07-09 11:58:38.122886', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:38.122886');
177-INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (90, 22, 243629, 2, '2009-07-09 11:58:38.122886', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:38.122886');
178-INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (91, 23, 243629, 2, '2009-07-09 11:58:38.122886', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:38.122886');
179-INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (92, 26, 243629, 2, '2009-07-09 11:58:38.122886', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:38.122886');
180-INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (93, 27, 243629, 2, '2009-07-09 11:58:38.122886', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:38.122886');
181-INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (94, 28, 243629, 2, '2009-07-09 11:58:38.122886', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:38.122886');
182-INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (95, 29, 243629, 2, '2009-07-09 11:58:38.122886', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:38.122886');
183-INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (96, 38, 243629, 2, '2009-07-09 11:58:38.122886', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:38.122886');
184-INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (97, 63, 243629, 2, '2009-07-09 11:58:38.122886', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:38.122886');
185-INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (98, 70, 243629, 2, '2009-07-09 11:58:38.122886', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:38.122886');
186-INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (99, 243610, 243629, 2, '2009-07-09 11:58:38.122886', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:38.122886');
187-INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (100, 243611, 243629, 2, '2009-07-09 11:58:38.122886', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:38.122886');
188-INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (101, 243617, 243629, 2, '2009-07-09 11:58:38.122886', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:38.122886');
189-INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (102, 243622, 243629, 2, '2009-07-09 11:58:38.122886', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:38.122886');
190-INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (103, 243623, 243629, 2, '2009-07-09 11:58:38.122886', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:38.122886');
191+INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (87, 4, 243629, 3, '2009-07-14 20:23:59.769346', NULL, NULL, NULL, 4, NULL, 4, '2009-07-14 20:23:59.769346', NULL, NULL, '2009-07-14 20:23:59.769346', NULL, NULL, NULL, '2009-07-14 20:23:59.698654');
192+INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (88, 1, 243630, 2, '2009-07-09 11:58:38.122886', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:38.122886');
193+INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (89, 12, 243630, 2, '2009-07-09 11:58:38.122886', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:38.122886');
194+INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (90, 16, 243630, 2, '2009-07-09 11:58:38.122886', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:38.122886');
195+INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (91, 22, 243630, 2, '2009-07-09 11:58:38.122886', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:38.122886');
196+INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (92, 23, 243630, 2, '2009-07-09 11:58:38.122886', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:38.122886');
197+INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (93, 26, 243630, 2, '2009-07-09 11:58:38.122886', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:38.122886');
198+INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (94, 27, 243630, 2, '2009-07-09 11:58:38.122886', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:38.122886');
199+INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (95, 28, 243630, 2, '2009-07-09 11:58:38.122886', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:38.122886');
200+INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (96, 29, 243630, 2, '2009-07-09 11:58:38.122886', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:38.122886');
201+INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (97, 38, 243630, 2, '2009-07-09 11:58:38.122886', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:38.122886');
202+INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (98, 63, 243630, 2, '2009-07-09 11:58:38.122886', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:38.122886');
203+INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (99, 70, 243630, 2, '2009-07-09 11:58:38.122886', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:38.122886');
204+INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (100, 243610, 243630, 2, '2009-07-09 11:58:38.122886', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:38.122886');
205+INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (101, 243611, 243630, 2, '2009-07-09 11:58:38.122886', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:38.122886');
206+INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (102, 243617, 243630, 2, '2009-07-09 11:58:38.122886', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:38.122886');
207+INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (103, 243622, 243630, 2, '2009-07-09 11:58:38.122886', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:38.122886');
208+INSERT INTO teammembership (id, person, team, status, date_joined, date_expires, last_changed_by, last_change_comment, proposed_by, acknowledged_by, reviewed_by, date_proposed, date_last_changed, date_acknowledged, date_reviewed, proponent_comment, acknowledger_comment, reviewer_comment, date_created) VALUES (104, 243623, 243630, 2, '2009-07-09 11:58:38.122886', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '2009-07-09 11:58:38.122886');
209
210
211 ALTER TABLE teammembership ENABLE TRIGGER ALL;
212@@ -9344,23 +9346,25 @@
213 INSERT INTO teamparticipation (id, team, person) VALUES (198, 243628, 243628);
214 INSERT INTO teamparticipation (id, team, person) VALUES (199, 243628, 243627);
215 INSERT INTO teamparticipation (id, team, person) VALUES (200, 243629, 243629);
216-INSERT INTO teamparticipation (id, team, person) VALUES (201, 243629, 1);
217-INSERT INTO teamparticipation (id, team, person) VALUES (202, 243629, 12);
218-INSERT INTO teamparticipation (id, team, person) VALUES (203, 243629, 16);
219-INSERT INTO teamparticipation (id, team, person) VALUES (204, 243629, 22);
220-INSERT INTO teamparticipation (id, team, person) VALUES (205, 243629, 23);
221-INSERT INTO teamparticipation (id, team, person) VALUES (206, 243629, 26);
222-INSERT INTO teamparticipation (id, team, person) VALUES (207, 243629, 27);
223-INSERT INTO teamparticipation (id, team, person) VALUES (208, 243629, 28);
224-INSERT INTO teamparticipation (id, team, person) VALUES (209, 243629, 29);
225-INSERT INTO teamparticipation (id, team, person) VALUES (210, 243629, 38);
226-INSERT INTO teamparticipation (id, team, person) VALUES (211, 243629, 63);
227-INSERT INTO teamparticipation (id, team, person) VALUES (212, 243629, 70);
228-INSERT INTO teamparticipation (id, team, person) VALUES (213, 243629, 243610);
229-INSERT INTO teamparticipation (id, team, person) VALUES (214, 243629, 243611);
230-INSERT INTO teamparticipation (id, team, person) VALUES (215, 243629, 243617);
231-INSERT INTO teamparticipation (id, team, person) VALUES (216, 243629, 243622);
232-INSERT INTO teamparticipation (id, team, person) VALUES (217, 243629, 243623);
233+INSERT INTO teamparticipation (id, team, person) VALUES (201, 243629, 4);
234+INSERT INTO teamparticipation (id, team, person) VALUES (202, 243630, 243630);
235+INSERT INTO teamparticipation (id, team, person) VALUES (203, 243630, 1);
236+INSERT INTO teamparticipation (id, team, person) VALUES (204, 243630, 12);
237+INSERT INTO teamparticipation (id, team, person) VALUES (205, 243630, 16);
238+INSERT INTO teamparticipation (id, team, person) VALUES (206, 243630, 22);
239+INSERT INTO teamparticipation (id, team, person) VALUES (207, 243630, 23);
240+INSERT INTO teamparticipation (id, team, person) VALUES (208, 243630, 26);
241+INSERT INTO teamparticipation (id, team, person) VALUES (209, 243630, 27);
242+INSERT INTO teamparticipation (id, team, person) VALUES (210, 243630, 28);
243+INSERT INTO teamparticipation (id, team, person) VALUES (211, 243630, 29);
244+INSERT INTO teamparticipation (id, team, person) VALUES (212, 243630, 38);
245+INSERT INTO teamparticipation (id, team, person) VALUES (213, 243630, 63);
246+INSERT INTO teamparticipation (id, team, person) VALUES (214, 243630, 70);
247+INSERT INTO teamparticipation (id, team, person) VALUES (215, 243630, 243610);
248+INSERT INTO teamparticipation (id, team, person) VALUES (216, 243630, 243611);
249+INSERT INTO teamparticipation (id, team, person) VALUES (217, 243630, 243617);
250+INSERT INTO teamparticipation (id, team, person) VALUES (218, 243630, 243622);
251+INSERT INTO teamparticipation (id, team, person) VALUES (219, 243630, 243623);
252
253
254 ALTER TABLE teamparticipation ENABLE TRIGGER ALL;
255
256=== added file 'database/schema/patch-2109-55-2.sql'
257--- database/schema/patch-2109-55-2.sql 1970-01-01 00:00:00 +0000
258+++ database/schema/patch-2109-55-2.sql 2009-06-30 06:33:22 +0000
259@@ -0,0 +1,9 @@
260+SET client_min_messages=ERROR;
261+
262+CREATE INDEX revisionauthor__lower_email__idx ON RevisionAuthor(lower(email));
263+CREATE INDEX HWSubmission__lower_raw_emailaddress__idx
264+ ON HWSubmission(lower(raw_emailaddress));
265+CREATE INDEX question__status__datecreated__idx
266+ ON Question(status, datecreated);
267+
268+INSERT INTO LaunchpadDatabaseRevision VALUES (2109, 55, 2);
269
270=== added file 'database/schema/patch-2109-61-1.sql'
271--- database/schema/patch-2109-61-1.sql 1970-01-01 00:00:00 +0000
272+++ database/schema/patch-2109-61-1.sql 2009-07-06 12:03:49 +0000
273@@ -0,0 +1,71 @@
274+SET client_min_messages=ERROR;
275+
276+DROP VIEW IF EXISTS POExport;
277+
278+CREATE VIEW POExport AS
279+SELECT
280+ ((COALESCE((potmsgset.id)::text, 'X'::text) || '.'::text) || COALESCE((translationmessage.id)::text, 'X'::text)) AS id,
281+ POTemplate.productseries,
282+ POTemplate.sourcepackagename,
283+ POTemplate.distroseries,
284+ POTemplate.id AS potemplate,
285+ POTemplate.header AS template_header,
286+ POTemplate.languagepack,
287+ POFile.id AS pofile,
288+ POFile.language,
289+ POFile.variant,
290+ POFile.topcomment AS translation_file_comment,
291+ POFile.header AS translation_header,
292+ POFile.fuzzyheader AS is_translation_header_fuzzy,
293+ TranslationTemplateItem.sequence,
294+ POTMsgSet.id AS potmsgset,
295+ TranslationMessage.comment,
296+ POTMsgSet.sourcecomment AS source_comment,
297+ POTMsgSet.filereferences AS file_references,
298+ POTMsgSet.flagscomment AS flags_comment,
299+ POTMsgSet.context,
300+ msgid_singular.msgid AS msgid_singular,
301+ msgid_plural.msgid AS msgid_plural,
302+ TranslationMessage.is_current,
303+ TranslationMessage.is_imported,
304+ TranslationMessage.potemplate AS diverged,
305+ potranslation0.translation AS translation0,
306+ potranslation1.translation AS translation1,
307+ potranslation2.translation AS translation2,
308+ potranslation3.translation AS translation3,
309+ potranslation4.translation AS translation4,
310+ potranslation5.translation AS translation5
311+FROM POTMsgSet
312+JOIN TranslationTemplateItem ON
313+ TranslationTemplateItem.potmsgset = POTMsgSet.id
314+JOIN POTemplate ON
315+ POTemplate.id = TranslationTemplateItem.potemplate
316+JOIN POFile ON
317+ POTemplate.id = POFile.potemplate
318+LEFT JOIN TranslationMessage ON
319+ POTMsgSet.id = TranslationMessage.potmsgset AND
320+ TranslationMessage.is_current IS TRUE AND
321+ TranslationMessage.language = POFile.language AND
322+ TranslationMessage.variant IS NOT DISTINCT FROM POFile.variant
323+LEFT JOIN POMsgID AS msgid_singular ON
324+ msgid_singular.id = POTMsgSet.msgid_singular
325+LEFT JOIN POMsgID AS msgid_plural ON
326+ msgid_plural.id = POTMsgSet.msgid_plural
327+LEFT JOIN POTranslation AS potranslation0 ON
328+ potranslation0.id = TranslationMessage.msgstr0
329+LEFT JOIN POTranslation AS potranslation1 ON
330+ potranslation1.id = TranslationMessage.msgstr1
331+LEFT JOIN POTranslation AS potranslation2 ON
332+ potranslation2.id = TranslationMessage.msgstr2
333+LEFT JOIN POTranslation AS potranslation3 ON
334+ potranslation3.id = TranslationMessage.msgstr3
335+LEFT JOIN POTranslation AS potranslation4 ON
336+ potranslation4.id = TranslationMessage.msgstr4
337+LEFT JOIN POTranslation potranslation5 ON
338+ potranslation5.id = TranslationMessage.msgstr5
339+WHERE
340+ TranslationMessage.potemplate IS NULL OR
341+ TranslationMessage.potemplate = POFile.potemplate;
342+
343+
344+INSERT INTO LaunchpadDatabaseRevision VALUES (2109, 61, 1);
345
346=== added file 'database/schema/patch-2109-62-0.sql'
347--- database/schema/patch-2109-62-0.sql 1970-01-01 00:00:00 +0000
348+++ database/schema/patch-2109-62-0.sql 2009-07-09 13:40:12 +0000
349@@ -0,0 +1,5 @@
350+SET client_min_messages=ERROR;
351+
352+DROP VIEW IF EXISTS POExport;
353+
354+INSERT INTO LaunchpadDatabaseRevision VALUES (2109, 62, 0);
355
356=== modified file 'database/schema/security.cfg'
357--- database/schema/security.cfg 2009-07-17 18:46:25 +0000
358+++ database/schema/security.cfg 2009-07-19 04:41:14 +0000
359@@ -592,6 +592,7 @@
360 public.revisioncache = SELECT, INSERT
361 public.revisionparent = SELECT, INSERT
362 public.revisionproperty = SELECT, INSERT
363+public.seriessourcepackagebranch = SELECT
364 public.sourcepackagename = SELECT
365 public.staticdiff = SELECT, INSERT, DELETE
366 public.validpersoncache = SELECT
367@@ -1116,7 +1117,7 @@
368 public.packageupload = SELECT, UPDATE
369 public.packageuploadsource = SELECT
370 public.packageuploadbuild = SELECT
371-public.packageuploadcustom = SELECT
372+public.packageuploadcustom = SELECT, UPDATE
373
374 # Distribution/Publishing stuff
375 public.archive = SELECT, UPDATE
376@@ -1133,20 +1134,21 @@
377 public.pocketchroot = SELECT
378 public.sourcepackagerelease = SELECT, UPDATE
379 public.binarypackagerelease = SELECT, UPDATE
380-public.sourcepackagereleasefile = SELECT
381-public.binarypackagefile = SELECT
382+public.sourcepackagereleasefile = SELECT, UPDATE
383+public.binarypackagefile = SELECT, UPDATE
384 public.sourcepackagename = SELECT
385 public.binarypackagename = SELECT
386 public.binarypackagepublishinghistory = SELECT
387 public.sourcepackagepublishinghistory = SELECT
388 public.sourcepackagefilepublishing = SELECT
389 public.binarypackagefilepublishing = SELECT
390-public.securesourcepackagepublishinghistory = SELECT, INSERT
391-public.securebinarypackagepublishinghistory = SELECT, INSERT
392+public.securesourcepackagepublishinghistory = SELECT, INSERT, UPDATE
393+public.securebinarypackagepublishinghistory = SELECT, INSERT, UPDATE
394 public.component = SELECT
395 public.section = SELECT
396 public.componentselection = SELECT
397 public.sectionselection = SELECT
398+public.packagediff = SELECT, UPDATE
399
400 # Librarian stuff
401 public.libraryfilealias = SELECT, INSERT
402@@ -1602,6 +1604,7 @@
403 public.product = SELECT
404 public.productseries = SELECT
405 public.revision = SELECT
406+public.revisionauthor = SELECT, INSERT
407 public.sourcepackagename = SELECT
408 public.staticdiff = SELECT, INSERT
409 public.teammembership = SELECT
410
411=== modified file 'database/schema/trusted.sql'
412--- database/schema/trusted.sql 2009-07-17 00:26:05 +0000
413+++ database/schema/trusted.sql 2009-07-19 04:41:14 +0000
414@@ -409,13 +409,13 @@
415
416 IF v_trash_old THEN
417 -- Was this somebody's most-recently-changed message?
418+ -- If so, delete the entry for that change.
419 DELETE FROM POFileTranslator
420 WHERE latest_message = OLD.id;
421-
422 IF FOUND THEN
423- -- Delete old records.
424-
425- -- Insert a past record if there is one.
426+ -- We deleted the entry for somebody's latest contribution.
427+ -- Find that person's latest remaining contribution and
428+ -- create a new record for that.
429 INSERT INTO POFileTranslator (
430 person, pofile, latest_message, date_last_touched
431 )
432@@ -427,25 +427,24 @@
433 new_latest_message.date_reviewed)
434 FROM POFile
435 JOIN TranslationTemplateItem AS old_template_item
436- ON (OLD.potmsgset =
437- old_template_item.potmsgset) AND
438- (old_template_item.potemplate = pofile.potemplate) AND
439- (pofile.language
440- IS NOT DISTINCT FROM OLD.language) AND
441- (pofile.variant
442- IS NOT DISTINCT FROM OLD.variant)
443+ ON OLD.potmsgset = old_template_item.potmsgset AND
444+ old_template_item.potemplate = pofile.potemplate AND
445+ pofile.language = OLD.language AND
446+ pofile.variant IS NOT DISTINCT FROM OLD.variant
447 JOIN TranslationTemplateItem AS new_template_item
448 ON (old_template_item.potemplate =
449 new_template_item.potemplate)
450 JOIN TranslationMessage AS new_latest_message
451- ON (new_latest_message.potmsgset =
452- new_template_item.potmsgset) AND
453- (new_latest_message.language
454- IS NOT DISTINCT FROM OLD.language AND
455- (new_latest_message.variant)
456- IS NOT DISTINCT FROM OLD.variant)
457+ ON new_latest_message.potmsgset =
458+ new_template_item.potmsgset AND
459+ new_latest_message.language = OLD.language AND
460+ new_latest_message.variant IS NOT DISTINCT FROM OLD.variant
461+ LEFT OUTER JOIN POfileTranslator AS ExistingEntry
462+ ON ExistingEntry.person = OLD.submitter AND
463+ ExistingEntry.pofile = POFile.id
464 WHERE
465- new_latest_message.submitter=OLD.submitter
466+ new_latest_message.submitter = OLD.submitter AND
467+ ExistingEntry IS NULL
468 ORDER BY new_latest_message.submitter, pofile.id,
469 new_latest_message.date_created DESC,
470 new_latest_message.id DESC;
471
472=== modified file 'lib/canonical/config/schema-lazr.conf'
473--- lib/canonical/config/schema-lazr.conf 2009-07-16 21:12:16 +0000
474+++ lib/canonical/config/schema-lazr.conf 2009-07-22 13:02:10 +0000
475@@ -2,6 +2,8 @@
476 # The database user which will be used to expire questions.
477 # datatype: string
478 dbuser: answertracker
479+storm_cache: generational
480+storm_cache_size: 500
481
482 # The number of days of inactivity required before a question in
483 # the open or needs information state is expired.
484@@ -31,6 +33,8 @@
485 # The database user which will be used by this process.
486 # datatype: string
487 dbuser: branchscanner
488+storm_cache: generational
489+storm_cache_size: 500
490
491 # See [error_reports].
492 error_dir: none
493@@ -46,6 +50,8 @@
494 # The database user which will be used by this process.
495 # datatype: string
496 dbuser: fiera
497+storm_cache: generational
498+storm_cache_size: 500
499
500 # datatype: string
501 default_sender_address: noreply@launchpad.net
502@@ -178,6 +184,8 @@
503 # The database user to run this process as.
504 # datatype: string
505 dbuser: checkwatches
506+storm_cache: generational
507+storm_cache_size: 500
508
509 # See [error_reports].
510 error_dir: none
511@@ -477,6 +485,8 @@
512 # The database user which will be used by this process.
513 # datatype: string
514 dbuser: create-merge-proposals
515+storm_cache: generational
516+storm_cache_size: 500
517
518 # See [error_reports].
519 error_dir: none
520@@ -540,10 +550,17 @@
521 soft_request_timeout: None
522
523 # The Storm cache type to use. May be 'default', 'generational' or 'stupid'
524+# datatype: string
525 storm_cache: generational
526
527-# The size of the Storm cache.
528-storm_cache_size: 5000
529+# The size of the Storm cache in objects. We start small, because this
530+# is the default used by everything and we have no idea if we are
531+# dealing with tiny objects or huge objects. Individual scripts can
532+# easily increase the cache size if database performance is an issue
533+# and RAM usage is not. See Bug #393625 for the issue that prompted
534+# this change.
535+# datatype: integer
536+storm_cache_size: 500
537
538
539 [debug]
540@@ -609,13 +626,16 @@
541 # The database user which will be used by this process.
542 # datatype: string
543 dbuser: teammembership
544+storm_cache: generational
545+storm_cache_size: 500
546
547
548 [gina]
549 # The database user which will be used by this process.
550 # datatype: string
551 dbuser: gina
552-
553+storm_cache: generational
554+storm_cache_size: 500
555
556 [gina_target.template]
557 # The distribution name (e.g. ubuntu) from where the packages
558@@ -803,6 +823,8 @@
559 # The database user which will be used by this process.
560 # datatype: string
561 dbuser: karma
562+storm_cache: generational
563+storm_cache_size: 500
564
565 # When calculating karma, if a categories scaling factor is
566 # larger than this it is reduced down to this maximum. This is
567@@ -821,6 +843,8 @@
568 # datatype: string
569 dbuser: launchpad_main
570 auth_dbuser: launchpad_auth
571+storm_cache: generational
572+storm_cache_size: 10000
573
574 # If true, Launchpad is running in read-only mode. Attempts to
575 # write to the Launchpad database will be denied, and an explanatory
576@@ -1019,10 +1043,6 @@
577 # ba-ws.geonames.net.
578 geonames_identity:
579
580-storm_cache: generational
581-storm_cache_size: 10000
582-
583-
584 [launchpad_session]
585 # The hostname where the session database is located.
586 # If the value is empty or None, localhost via UNIX sockets
587@@ -1051,6 +1071,8 @@
588 # The database user which will be used by this process.
589 # datatype: string
590 dbuser: librarian
591+storm_cache: generational
592+storm_cache_size: 500
593
594 isolation_level: read_committed
595
596@@ -1104,6 +1126,8 @@
597 # The database user which will be used by this process.
598 # datatype: string
599 dbuser: librariangc
600+storm_cache: generational
601+storm_cache_size: 500
602
603
604 [librarian_server]
605@@ -1304,6 +1328,8 @@
606 # The database user which will be used by this process.
607 # datatype: string
608 dbuser: mp-creation-job
609+storm_cache: generational
610+storm_cache_size: 500
611
612 # See [error_reports].
613 error_dir: none
614@@ -1323,7 +1349,6 @@
615 # datatype: integer
616 retained_days: 366
617
618-
619 [personalpackagearchive]
620 # Directory to be created to store PPAs.
621 # datatype: string
622@@ -1359,18 +1384,22 @@
623 # The database user which will be used by this process.
624 # datatype: string
625 dbuser: poimport
626-
627+storm_cache: generational
628+storm_cache_size: 500
629
630 [processmail]
631 # The database user which will be used by this process.
632 # datatype: string
633 dbuser: processmail
634-
635+storm_cache: generational
636+storm_cache_size: 500
637
638 [productreleasefinder]
639 # The database user which will be used by this process.
640 # datatype: string
641 dbuser: productreleasefinder
642+storm_cache: generational
643+storm_cache_size: 500
644
645 [profiling]
646 # When set to True, each requests will be profiled and the resulting data
647@@ -1392,6 +1421,8 @@
648 # The database user which will be used by this process.
649 # datatype: string
650 dbuser: reclaim-branch-space
651+storm_cache: generational
652+storm_cache_size: 500
653
654 # See [error_reports].
655 error_dir: none
656@@ -1434,9 +1465,14 @@
657 translate_pages_max_batch_size: 50
658
659 [rosettabranches]
660+# XXX stub 20090722 bug=402891: Scripts need to use unique
661+# database users but the rosetta branch scanner is incorrectly reusing an
662+# existing database user.
663 # The database user which will be used by the rosetta-branches cronscript.
664 # datatype: string
665 dbuser: branchscanner
666+storm_cache: generational
667+storm_cache_size: 500
668
669 # See [error_reports].
670 error_dir: none
671@@ -1452,6 +1488,8 @@
672 # The database user which will be used by this process.
673 # datatype: string
674 dbuser: send-branch-mail
675+storm_cache: generational
676+storm_cache_size: 500
677
678 # See [error_reports].
679 error_dir: none
680@@ -1467,6 +1505,8 @@
681 # The database user which will be used by this process.
682 # datatype: string
683 dbuser: shipit
684+storm_cache: generational
685+storm_cache_size: 500
686
687 # datatype: string
688 admins_email_address: info@shipit.ubuntu.com
689@@ -1501,7 +1541,8 @@
690 # The database user which will be used by this process.
691 # datatype: string
692 dbuser: statistician
693-
694+storm_cache: generational
695+storm_cache_size: 500
696
697 [supermirror]
698 # The longest period of time, in seconds, that the scheduler will
699@@ -1550,22 +1591,30 @@
700 # The database user which will be used by this process.
701 # datatype: string
702 dbuser: targetnamecacheupdater
703+storm_cache: generational
704+storm_cache_size: 500
705
706
707 [updateremoteproduct]
708 # The database user to run this process as.
709 # datatype: string
710 dbuser: updateremoteproduct
711+storm_cache: generational
712+storm_cache_size: 500
713
714 [updatesourceforgeremoteproduct]
715 # The database user to run this process as.
716 # datatype: string
717 dbuser: updatesourceforgeremoteproduct
718+storm_cache: generational
719+storm_cache_size: 500
720
721 [uploader]
722 # The database user which will be used by this process.
723 # datatype: string
724 dbuser: uploader
725+storm_cache: generational
726+storm_cache_size: 500
727
728 # datatype: string
729 default_recipient_name: none
730@@ -1584,13 +1633,19 @@
731 # The database user which will be used by this process.
732 # datatype: string
733 dbuser: queued
734+storm_cache: generational
735+storm_cache_size: 500
736
737
738 [binaryfile_expire]
739 dbuser: binaryfile-expire
740+storm_cache: generational
741+storm_cache_size: 500
742
743 [generateppahtaccess]
744 dbuser: generateppahtaccess
745+storm_cache: generational
746+storm_cache_size: 500
747
748 [vhosts]
749 # When true, use https URLs unless explicitly overridden.
750
751=== modified file 'lib/canonical/launchpad/doc/celebrities.txt'
752--- lib/canonical/launchpad/doc/celebrities.txt 2009-05-06 16:10:16 +0000
753+++ lib/canonical/launchpad/doc/celebrities.txt 2009-07-15 02:58:08 +0000
754@@ -200,3 +200,14 @@
755 >>> ubuntu_branches = personset.getByName('ubuntu-branches')
756 >>> celebs.ubuntu_branches == ubuntu_branches
757 True
758+
759+
760+== Ubuntu security team ==
761+
762+There is a celebrity representing the 'ubuntu-security' team, which is
763+mainly used for granting special permissions on the ubuntu primary
764+archive.
765+
766+ >>> ubuntu_security = personset.getByName('ubuntu-security')
767+ >>> celebs.ubuntu_security == ubuntu_security
768+ True
769
770=== modified file 'lib/canonical/launchpad/doc/vocabularies.txt'
771--- lib/canonical/launchpad/doc/vocabularies.txt 2009-06-02 08:20:49 +0000
772+++ lib/canonical/launchpad/doc/vocabularies.txt 2009-07-03 11:34:14 +0000
773@@ -457,6 +457,31 @@
774 BranchVocabulary with respect to the tokens and privacy awareness.
775
776
777+=== HostedBranchRestrictedOnOwner ===
778+
779+Here's a vocabulary for all hosted branches owned by the current user.
780+
781+ >>> from lp.code.enums import BranchType
782+
783+ >>> a_user = factory.makePerson(name='a-branching-user')
784+ >>> product1 = factory.makeProduct(name='product-one')
785+ >>> mirrored_branch = factory.makeBranch(
786+ ... owner=a_user, product=product1, name='mirrored',
787+ ... branch_type=BranchType.MIRRORED)
788+ >>> product2 = factory.makeProduct(name='product-two')
789+ >>> hosted_branch = factory.makeBranch(
790+ ... owner=a_user, product=product2, name='hosted')
791+ >>> foreign_branch = factory.makeBranch()
792+
793+It returns branches owned by the user, but not ones owned by others, nor
794+ones that aren't hosted on Launchpad.
795+
796+ >>> branch_vocabulary = vocabulary_registry.get(
797+ ... a_user, "HostedBranchRestrictedOnOwner")
798+ >>> print_vocab_branches(branch_vocabulary, None)
799+ ~a-branching-user/product-two/hosted
800+
801+
802 === Processor ===
803
804 All processors type available in Launchpad.
805
806=== modified file 'lib/canonical/launchpad/icing/style.css'
807--- lib/canonical/launchpad/icing/style.css 2009-07-16 04:17:24 +0000
808+++ lib/canonical/launchpad/icing/style.css 2009-07-21 03:23:28 +0000
809@@ -1618,9 +1618,15 @@
810 font-size: 1.5em;
811 font-family: "URW Gothic L","MgOpen Moderna","Lucida Sans",sans-serif;
812 text-align: center;
813- margin: 1em 0 1em 0;
814- padding: 0.2em;
815+ margin: auto;
816+ margin-top: 1em;
817+ margin-bottom: 1em;
818+ padding: 0.5em;
819 background-color: #ededed;
820+ width: 90%;
821+}
822+#home-description .smaller {
823+ font-size: 80%;
824 }
825 #home-stats {
826 margin: auto;
827
828=== added file 'lib/canonical/launchpad/images/edit-transparent.png'
829Binary files lib/canonical/launchpad/images/edit-transparent.png 1970-01-01 00:00:00 +0000 and lib/canonical/launchpad/images/edit-transparent.png 2009-07-20 14:59:27 +0000 differ
830=== modified file 'lib/canonical/launchpad/interfaces/launchpad.py'
831--- lib/canonical/launchpad/interfaces/launchpad.py 2009-07-17 00:26:05 +0000
832+++ lib/canonical/launchpad/interfaces/launchpad.py 2009-07-19 04:41:14 +0000
833@@ -128,10 +128,10 @@
834 ubuntu_branches = Attribute("The Ubuntu branches team")
835 ubuntu_bugzilla = Attribute("The Ubuntu Bugzilla.")
836 ubuntu_cdimage_mirror = Attribute("The main cdimage mirror for Ubuntu.")
837+ ubuntu_security = Attribute("The 'ubuntu-security' team.")
838 vcs_imports = Attribute("The 'vcs-imports' team.")
839
840
841-
842 class ICrowd(Interface):
843
844 def __contains__(person_or_team_or_anything):
845
846=== modified file 'lib/canonical/launchpad/javascript/bugs/bugtask-index.js'
847--- lib/canonical/launchpad/javascript/bugs/bugtask-index.js 2009-07-17 18:46:25 +0000
848+++ lib/canonical/launchpad/javascript/bugs/bugtask-index.js 2009-07-21 22:46:23 +0000
849@@ -259,11 +259,6 @@
850 subscriber: new Y.lp.Subscriber({uri: LP.client.links.me})
851 });
852
853- // XXX deryck 2009-07-09 bug=397406 The classnames used to
854- // determine direct vs. dupe subscriptions are not set
855- // correctly and fix_subscription_link_classes works around
856- // this bug.
857- fix_subscription_link_classes(subscription);
858 var is_direct = subscription.get(
859 'link').get('parentNode').hasClass('subscribed-true');
860 var has_dupes = subscription.get(
861@@ -298,27 +293,6 @@
862 }
863
864 /*
865- * XXX deryck 2009-07-09 bug=397406 The classnames used to
866- * determine direct vs. dupe subscriptions are not set
867- * correctly and fix_subscription_link_classes works around
868- * this bug.
869- *
870- * @method fix_subscription_link_classes
871- * @param subscription {Object} A Y.lp.Subscription object.
872- */
873-function fix_subscription_link_classes(subscription) {
874- var subscriber = subscription.get('subscriber');
875- var subscription_link = subscription.get('link');
876- var me_nodes = Y.all('.subscriber-' + subscriber.get('name'));
877- if (Y.Lang.isValue(me_nodes) && me_nodes.size() > 1) {
878- set_subscription_link_parent_class(subscription_link, true, true);
879- me_nodes.each(function(div) {
880- set_subscription_link_parent_class(div.query('img'), true, true);
881- });
882- }
883-}
884-
885-/*
886 * Set click handlers for unsubscribe remove icons.
887 *
888 * @method setup_unsubscribe_icon_handlers
889@@ -1233,7 +1207,7 @@
890 edit_icon.setAttribute('src', '/@@/edit');
891 });
892 content.on('mouseout', function(e) {
893- edit_icon.setAttribute('src', null);
894+ edit_icon.setAttribute('src', '/@@/edit-transparent');
895 });
896 content.setStyle('cursor', 'pointer');
897 };
898@@ -1259,8 +1233,8 @@
899 // canel clicks on the edit links. Users most likely don't
900 // want to edit the bugtasks.
901 if (Y.Lang.isValue(LP.client.cache.bug.duplicate_of_link)) {
902- status_content.on('click', function(e) { e.halt() });
903- importance_content.on('click', function(e) { e.halt() });
904+ status_content.on('click', function(e) { e.halt(); });
905+ importance_content.on('click', function(e) { e.halt(); });
906 return;
907 }
908
909
910=== modified file 'lib/canonical/launchpad/javascript/lp/lp.js'
911--- lib/canonical/launchpad/javascript/lp/lp.js 2009-07-13 10:52:46 +0000
912+++ lib/canonical/launchpad/javascript/lp/lp.js 2009-07-21 17:14:29 +0000
913@@ -516,7 +516,17 @@
914 for (var i = 0; i < nodes.length; i++) {
915 var node = nodes[i];
916 if (node.focus) {
917- node.focus();
918+ try {
919+ // Trying to focus a hidden element throws an error in IE8.
920+ if (node.offsetHeight !== 0) {
921+ node.focus();
922+ }
923+ } catch (e) {
924+ YUI().use('console', function(Y) {
925+ Y.log('In setFocusByName(<' +
926+ node.tagName + ' type=' + node.type + '>): ' + e);
927+ });
928+ }
929 break;
930 }
931 }
932
933=== modified file 'lib/canonical/launchpad/pagetitles.py'
934--- lib/canonical/launchpad/pagetitles.py 2009-07-17 00:26:05 +0000
935+++ lib/canonical/launchpad/pagetitles.py 2009-07-19 04:41:14 +0000
936@@ -1136,6 +1136,9 @@
937
938 productseries_linkbranch = ContextTitle('Link an existing branch to %s')
939
940+productseries_link_translations_branch = ContextTitle(
941+ "Set translations export branch for %s")
942+
943 productseries_index = ContextTitle('%s')
944
945 productseries_delete = ContextTitle('Delete %s')
946
947=== modified file 'lib/canonical/launchpad/security.py'
948--- lib/canonical/launchpad/security.py 2009-07-19 02:09:34 +0000
949+++ lib/canonical/launchpad/security.py 2009-07-19 04:41:14 +0000
950@@ -1333,10 +1333,18 @@
951 usedfor = IPackageUpload
952
953 def checkAuthenticated(self, user):
954- """Return True if user has an ArchivePermission or is an admin."""
955+ """Return True if user has an ArchivePermission or is an admin.
956+
957+ If it's a delayed-copy, check if the user can upload to its targeted
958+ archive.
959+ """
960 if AdminByAdminsTeam.checkAuthenticated(self, user):
961 return True
962
963+ if self.obj.is_delayed_copy:
964+ archive_append = AppendArchive(self.obj.archive)
965+ return archive_append.checkAuthenticated(user)
966+
967 permission_set = getUtility(IArchivePermissionSet)
968 permissions = permission_set.componentsForQueueAdmin(
969 self.obj.archive, user)
970@@ -1907,6 +1915,9 @@
971 PPA upload rights are managed via `IArchive.canUpload`;
972
973 Appending to PRIMARY, PARTNER or COPY archives is restricted to owners.
974+
975+ Appending to ubuntu main archives can also be done by the
976+ 'ubuntu-security' celebrity.
977 """
978 permission = 'launchpad.Append'
979 usedfor = IArchive
980@@ -1918,6 +1929,12 @@
981 if self.obj.is_ppa and self.obj.canUpload(user):
982 return True
983
984+ celebrities = getUtility(ILaunchpadCelebrities)
985+ if (self.obj.is_main and
986+ self.obj.distribution == celebrities.ubuntu and
987+ user.inTeam(celebrities.ubuntu_security)):
988+ return True
989+
990 return False
991
992
993
994=== modified file 'lib/canonical/launchpad/templates/root-index.pt'
995--- lib/canonical/launchpad/templates/root-index.pt 2009-07-17 17:59:07 +0000
996+++ lib/canonical/launchpad/templates/root-index.pt 2009-07-21 06:42:41 +0000
997@@ -46,7 +46,6 @@
998 alt=""
999 style="margin: 0 9em 1em 0"/>
1000 <br />
1001-
1002 <input id="text" type="text" name="field.text" size="50" />
1003 <input type="submit" value="Search Launchpad" />
1004 </form>
1005@@ -59,11 +58,11 @@
1006 <strong tal:content="view/blueprint_count/fmt:intcomma">123</strong>&nbsp;blueprints,
1007 and&nbsp;counting...
1008 </div>
1009- <div id="home-description">Launchpad is a unique collaboration and
1010- <a href="http://bazaar-vcs.org/">Bazaar</a>
1011- code hosting platform for software projects. <a
1012- href="/+tour"
1013- >Read more...</a></div>
1014+ <div id="home-description">
1015+ Launchpad is a code hosting and software collaboration platform.<br />
1016+ <span class="smaller">Launchpad is open source -- you can <a href="https://dev.launchpad.net/">join the community</a> of people
1017+ who help improve it.</span>
1018+ </div>
1019 <div id="home-page" style="width:90%; max-width:80em; margin:auto;">
1020 <div class="three column left" id="featured-projects">
1021 <h2>Featured projects</h2>
1022
1023=== modified file 'lib/canonical/launchpad/tour/bugs'
1024--- lib/canonical/launchpad/tour/bugs 2009-06-12 16:36:02 +0000
1025+++ lib/canonical/launchpad/tour/bugs 2009-07-20 19:12:02 +0000
1026@@ -66,7 +66,7 @@
1027 In Launchpad, you can share a bug report and its comment history with other communities interested in finding a fix.
1028 Each project &mdash; or even different releases within a project &mdash; can track its own status, importance and assignee for that same bug report.
1029 <br /><br />
1030- Even if the bug is tracked elsewhere &mdash; such as in Trac, Sourceforge or Bugzilla &mdash; Launchpad can pull in it status. Using our <a href="<a href="https://help.launchpad.net/Bugs/PluginAPISpec">bug tracker plugins</a> for Bugzilla and Trac you can share a comment history for the same bug tracked both in Launchpad and an external tracker.<br /><br />
1031+ Even if the bug is tracked elsewhere &mdash; such as in Trac, Sourceforge or Bugzilla &mdash; Launchpad can pull in it status. Using our <a href="https://help.launchpad.net/Bugs/PluginAPISpec">bug tracker plugins</a> for Bugzilla and Trac you can share a comment history for the same bug tracked both in Launchpad and an external tracker.<br /><br />
1032 And to help find low-hanging fruit, there&rsquo;s a &ldquo;Bugs fixed elsewhere&rdquo; report that shows which of your bugs are marked fixed in other communities.
1033
1034 </p>
1035
1036=== modified file 'lib/canonical/launchpad/utilities/celebrities.py'
1037--- lib/canonical/launchpad/utilities/celebrities.py 2009-07-17 00:26:05 +0000
1038+++ lib/canonical/launchpad/utilities/celebrities.py 2009-07-19 04:41:14 +0000
1039@@ -136,6 +136,7 @@
1040 ubuntu = CelebrityDescriptor(IDistributionSet, 'ubuntu')
1041 ubuntu_branches = CelebrityDescriptor(IPersonSet, 'ubuntu-branches')
1042 ubuntu_bugzilla = CelebrityDescriptor(IBugTrackerSet, 'ubuntu-bugzilla')
1043+ ubuntu_security = CelebrityDescriptor(IPersonSet, 'ubuntu-security')
1044 vcs_imports = CelebrityDescriptor(IPersonSet, 'vcs-imports')
1045
1046 @property
1047
1048=== modified file 'lib/canonical/launchpad/vocabularies/configure.zcml'
1049--- lib/canonical/launchpad/vocabularies/configure.zcml 2009-07-13 18:15:02 +0000
1050+++ lib/canonical/launchpad/vocabularies/configure.zcml 2009-07-19 04:41:14 +0000
1051@@ -17,6 +17,12 @@
1052 />
1053
1054 <utility
1055+ name="HostedBranchRestrictedOnOwner"
1056+ component="canonical.launchpad.vocabularies.HostedBranchRestrictedOnOwnerVocabulary"
1057+ provides="zope.schema.interfaces.IVocabularyFactory"
1058+ />
1059+
1060+ <utility
1061 name="BranchRestrictedOnProduct"
1062 component="canonical.launchpad.vocabularies.BranchRestrictedOnProductVocabulary"
1063 provides="zope.schema.interfaces.IVocabularyFactory"
1064
1065=== modified file 'lib/canonical/launchpad/vocabularies/dbobjects.py'
1066--- lib/canonical/launchpad/vocabularies/dbobjects.py 2009-07-17 00:26:05 +0000
1067+++ lib/canonical/launchpad/vocabularies/dbobjects.py 2009-07-19 04:41:14 +0000
1068@@ -11,6 +11,7 @@
1069
1070 __all__ = [
1071 'BountyVocabulary',
1072+ 'HostedBranchRestrictedOnOwnerVocabulary',
1073 'BranchRestrictedOnProductVocabulary',
1074 'BranchVocabulary',
1075 'BugNominatableSeriesesVocabulary',
1076@@ -76,11 +77,13 @@
1077 CountableIterator, IHugeVocabulary,
1078 NamedSQLObjectVocabulary, SQLObjectVocabularyBase)
1079
1080+from lp.code.enums import BranchType
1081 from lp.code.interfaces.branch import IBranch
1082 from lp.code.interfaces.branchcollection import IAllBranches
1083 from lp.registry.interfaces.distribution import IDistribution
1084 from lp.registry.interfaces.distroseries import (
1085 DistroSeriesStatus, IDistroSeries)
1086+from lp.registry.interfaces.person import IPerson
1087 from lp.registry.interfaces.product import IProduct
1088 from lp.registry.interfaces.productseries import IProductSeries
1089 from lp.registry.interfaces.project import IProject
1090@@ -186,6 +189,25 @@
1091 return getUtility(IAllBranches).inProduct(self.product)
1092
1093
1094+class HostedBranchRestrictedOnOwnerVocabulary(BranchVocabularyBase):
1095+ """A vocabulary for hosted branches owned by the current user.
1096+
1097+ These are branches that the user is guaranteed to be able to push
1098+ to.
1099+ """
1100+ def __init__(self, context=None):
1101+ """Pass a Person as context, or anything else for the current user."""
1102+ super(HostedBranchRestrictedOnOwnerVocabulary, self).__init__(context)
1103+ if IPerson.providedBy(self.context):
1104+ self.user = context
1105+ else:
1106+ self.user = getUtility(ILaunchBag).user
1107+
1108+ def _getCollection(self):
1109+ return getUtility(IAllBranches).ownedBy(self.user).withBranchType(
1110+ BranchType.HOSTED)
1111+
1112+
1113 class BugVocabulary(SQLObjectVocabularyBase):
1114
1115 _table = Bug
1116
1117=== modified file 'lib/canonical/launchpad/windmill/tests/test_registry/test_plusnew_step2.py'
1118--- lib/canonical/launchpad/windmill/tests/test_registry/test_plusnew_step2.py 2009-06-25 05:30:52 +0000
1119+++ lib/canonical/launchpad/windmill/tests/test_registry/test_plusnew_step2.py 2009-07-21 17:14:29 +0000
1120@@ -52,12 +52,17 @@
1121 validator='className|unseen')
1122 # Clicking on the href expands the search results.
1123 client.click(id='search-results-expander')
1124- client.waits.sleep(milliseconds=u'1000')
1125+ client.waits.forElement(
1126+ xpath='//*[@id="search-results" and contains(@class, "lazr-opened")]',
1127+ milliseconds=u'1000')
1128 client.asserts.assertProperty(
1129 id=u'search-results',
1130 validator='className|lazr-opened')
1131 # Clicking it again hides the results.
1132 client.click(id='search-results-expander')
1133+ client.waits.forElement(
1134+ xpath='//*[@id="search-results" and contains(@class, "lazr-closed")]',
1135+ milliseconds=u'1000')
1136 client.asserts.assertProperty(
1137 id=u'search-results',
1138 validator='className|lazr-closed')
1139
1140=== modified file 'lib/canonical/lazr/interfaces/feed.py'
1141--- lib/canonical/lazr/interfaces/feed.py 2009-06-25 05:30:52 +0000
1142+++ lib/canonical/lazr/interfaces/feed.py 2009-07-21 07:27:48 +0000
1143@@ -133,14 +133,7 @@
1144 def getItems():
1145 """Get the individual items for the feed.
1146
1147- For instance, get all announcements for a project. Each item should
1148- be converted to a feed entry using itemToFeedEntry.
1149- """
1150-
1151- def itemToFeedEntry(item):
1152- """Convert a single item to a formatted feed entry.
1153-
1154- An individual entry will be an instance providing `IFeedEntry`.
1155+ Individual items will provide `IFeedEntry`.
1156 """
1157
1158 def renderAtom():
1159
1160=== removed symlink 'lib/canonical/shipit'
1161=== target was u'../../sourcecode/shipit'
1162=== added symlink 'lib/canonical/shipit'
1163=== target is u'../../sourcecode/shipit'
1164=== added symlink 'lib/canonical/signon'
1165=== target is u'../../sourcecode/canonical-identity-provider'
1166=== removed symlink 'lib/canonical/signon'
1167=== target was u'../../sourcecode/canonical-identity-provider'
1168=== modified file 'lib/canonical/widgets/product.py'
1169--- lib/canonical/widgets/product.py 2009-07-17 00:26:05 +0000
1170+++ lib/canonical/widgets/product.py 2009-07-21 03:56:03 +0000
1171@@ -362,7 +362,9 @@
1172 return self.template()
1173
1174 def _renderTable(self, category, column_count=1):
1175- html = ['<table id="%s">' % category]
1176+ # The tables are wrapped in divs, since IE8 does not respond
1177+ # to setting the table's height to zero.
1178+ html = ['<div id="%s"><table>' % category]
1179 rendered_items = self.items_by_category[category]
1180 row_count = int(math.ceil(len(rendered_items) / float(column_count)))
1181 for i in range(0, row_count):
1182@@ -373,7 +375,7 @@
1183 break
1184 html.append('<td>%s</td>' % rendered_items[index])
1185 html.append('</tr>')
1186- html.append('</table>')
1187+ html.append('</table></div>')
1188 return '\n'.join(html)
1189
1190
1191
1192=== modified file 'lib/lp/bugs/browser/bug.py'
1193--- lib/lp/bugs/browser/bug.py 2009-07-17 00:26:05 +0000
1194+++ lib/lp/bugs/browser/bug.py 2009-07-20 16:59:49 +0000
1195@@ -385,8 +385,15 @@
1196
1197 @cachedproperty
1198 def duplicate_subscribers(self):
1199- """Caches the list of subscribers from duplicates."""
1200- return frozenset(self.context.getSubscribersFromDuplicates())
1201+ """Caches the list of subscribers from duplicates.
1202+
1203+ Don't use getSubscribersFromDuplicates here because that method
1204+ omits a user if the user is also a direct or indirect subscriber.
1205+ getSubscriptionsFromDuplicates doesn't, so find person objects via
1206+ this method.
1207+ """
1208+ dupe_subscriptions = self.context.getSubscriptionsFromDuplicates()
1209+ return frozenset([sub.person for sub in dupe_subscriptions])
1210
1211 def subscription_class(self, subscribed_person):
1212 """Returns a set of CSS class names based on subscription status.
1213
1214=== modified file 'lib/lp/bugs/model/bug.py'
1215--- lib/lp/bugs/model/bug.py 2009-07-17 18:46:25 +0000
1216+++ lib/lp/bugs/model/bug.py 2009-07-21 20:33:31 +0000
1217@@ -68,7 +68,6 @@
1218 from lp.bugs.interfaces.bugactivity import IBugActivitySet
1219 from lp.bugs.interfaces.bugattachment import (
1220 BugAttachmentType, IBugAttachmentSet)
1221-from lp.bugs.interfaces.bugbranch import IBugBranch
1222 from lp.bugs.interfaces.bugmessage import IBugMessageSet
1223 from lp.bugs.interfaces.bugnomination import (
1224 NominationError, NominationSeriesObsoleteError)
1225@@ -533,17 +532,6 @@
1226 Bug.duplicateof = %d""" % self.id,
1227 prejoins=["person"], clauseTables=["Bug"]))
1228
1229- # Direct and "also notified" subscribers take precedence
1230- # over subscribers from duplicates.
1231- duplicate_subscriptions -= set(self.getDirectSubscriptions())
1232- also_notified_subscriptions = set()
1233- for also_notified_subscriber in self.getAlsoNotifiedSubscribers():
1234- for duplicate_subscription in duplicate_subscriptions:
1235- if also_notified_subscriber == duplicate_subscription.person:
1236- also_notified_subscriptions.add(duplicate_subscription)
1237- break
1238- duplicate_subscriptions -= also_notified_subscriptions
1239-
1240 # Only add a subscriber once to the list.
1241 duplicate_subscribers = set(
1242 sub.person for sub in duplicate_subscriptions)
1243
1244=== modified file 'lib/lp/bugs/templates/bugtask-tasks-and-nominations-table-row.pt'
1245--- lib/lp/bugs/templates/bugtask-tasks-and-nominations-table-row.pt 2009-07-17 18:46:25 +0000
1246+++ lib/lp/bugs/templates/bugtask-tasks-and-nominations-table-row.pt 2009-07-20 14:59:27 +0000
1247@@ -64,7 +64,7 @@
1248 style="float: left"
1249 tal:content="context/status/title" />
1250 <a href="+editstatus" style="margin-left: 3px">
1251- <img class="editicon" />
1252+ <img class="editicon" src="/@@/edit-transparent" />
1253 </a>
1254 </div>
1255 </td>
1256@@ -78,7 +78,7 @@
1257 style="float: left"
1258 tal:content="context/importance/title" />
1259 <a href="+editstatus" style="margin-left: 3px">
1260- <img class="editicon" />
1261+ <img class="editicon" src="/@@/edit-transparent" />
1262 </a>
1263 </div>
1264 </td>
1265
1266=== modified file 'lib/lp/bugs/windmill/tests/test_bugs/test_bug_inline_subscriber.py'
1267--- lib/lp/bugs/windmill/tests/test_bugs/test_bug_inline_subscriber.py 2009-07-17 00:26:05 +0000
1268+++ lib/lp/bugs/windmill/tests/test_bugs/test_bug_inline_subscriber.py 2009-07-20 18:38:23 +0000
1269@@ -167,52 +167,81 @@
1270 xpath=SUBSCRIPTION_LINK, validator=u'Subscribe')
1271 client.asserts.assertNotNode(classname=FOO_BAR_CLASS)
1272
1273- # A bit of a corner case here, but make sure that when
1274- # a user is subscribed to both the main bug and the dupe
1275- # that the user is unsubscribed correctly.
1276- client.open(url=BUG_URL % 5)
1277- client.waits.forPageLoad(timeout=PAGE_LOAD)
1278- client.waits.forElement(
1279- id=u'subscribers-links', timeout=FOR_ELEMENT)
1280- # Subscribe to the main bug, bug 5.
1281- client.click(xpath=SUBSCRIPTION_LINK)
1282- client.waits.sleep(milliseconds=SLEEP)
1283- client.asserts.assertText(
1284- xpath=SUBSCRIPTION_LINK, validator=u'Unsubscribe')
1285- # Go to bug 6, the dupe, and subscribe.
1286- client.open(url=BUG_URL % 6)
1287- client.waits.forPageLoad(timeout=PAGE_LOAD)
1288- client.waits.forElement(
1289- id=u'subscribers-links', timeout=FOR_ELEMENT)
1290- client.click(xpath=SUBSCRIPTION_LINK)
1291- # Now back to bug 5. The first unsubscribe should remove
1292- # the current bug direct subscription.
1293- client.open(url=BUG_URL % 5)
1294- client.waits.forPageLoad(timeout=PAGE_LOAD)
1295- client.waits.forElement(
1296- id=u'subscribers-links', timeout=FOR_ELEMENT)
1297- client.asserts.assertText(
1298- xpath=SUBSCRIPTION_LINK, validator=u'Unsubscribe')
1299- # Confirm there are 2 subscriber links: one in direct subscribers,
1300- # and one in duplicate subscribers.
1301- client.asserts.assertNode(
1302- xpath=(u'//div[@id="subscribers-links"]'
1303- '/div/a[@name="Foo Bar"]'))
1304- client.asserts.assertNode(
1305- xpath=(u'//div[@id="subscribers-from-duplicates"]'
1306- '/div/a[@name="Foo Bar"]'))
1307- # The first click unsubscribes the direct subscription, leaving the dupe.
1308- client.click(xpath=SUBSCRIPTION_LINK)
1309- client.waits.sleep(milliseconds=SLEEP)
1310- client.asserts.assertNotNode(
1311- xpath=(u'//div[@id="subscribers-links"]'
1312- '/div/a[@name="Foo Bar"]'))
1313- client.asserts.assertNode(
1314- xpath=(u'//div[@id="subscribers-from-duplicates"]'
1315- '/div/a[@name="Foo Bar"]'))
1316- # The second unsubscribe removes the dupe/
1317- client.click(xpath=SUBSCRIPTION_LINK)
1318- client.waits.sleep(milliseconds=SLEEP)
1319- client.asserts.assertNotNode(
1320- xpath=(u'//div[@id="subscribers-from-duplicates"]'
1321- '/div/a[@name="Foo Bar"]'))
1322+ # Subscribe/Unsubscribe link handling when dealing
1323+ # with duplicates...
1324+ #
1325+ # First test case, ensure unsubscribing works when
1326+ # dealing with a duplicate and an indirect subscription.
1327+ lpuser.SAMPLE_PERSON.ensure_login(client)
1328+ # Go to bug 6, the dupe, and subscribe.
1329+ client.open(url=BUG_URL % 6)
1330+ client.waits.forPageLoad(timeout=PAGE_LOAD)
1331+ client.waits.forElement(
1332+ id=u'subscribers-links', timeout=FOR_ELEMENT)
1333+ client.click(xpath=SUBSCRIPTION_LINK)
1334+ client.waits.sleep(milliseconds=SLEEP)
1335+ client.asserts.assertText(
1336+ xpath=SUBSCRIPTION_LINK, validator=u'Unsubscribe')
1337+ # Now back to bug 5.
1338+ client.open(url=BUG_URL % 5)
1339+ client.waits.forPageLoad(timeout=PAGE_LOAD)
1340+ client.waits.forElement(
1341+ id=u'subscribers-links', timeout=FOR_ELEMENT)
1342+ # Confirm there are 2 subscriber links: one in duplicate subscribers,
1343+ # and one in indirect subscribers.
1344+ client.asserts.assertNode(
1345+ xpath=(u'//div[@id="subscribers-from-duplicates"]'
1346+ '/div/a[@name="Sample Person"]'))
1347+ client.asserts.assertNode(
1348+ xpath=(u'//div[@id="subscribers-indirect"]'
1349+ '/div/a[text() = "Sample Person"]'))
1350+ # Clicking "Unsubscribe" successfully removes the duplicate subscription,
1351+ # but the indirect subscription remains.
1352+ client.click(xpath=SUBSCRIPTION_LINK)
1353+ client.waits.sleep(milliseconds=SLEEP)
1354+ client.asserts.assertNotNode(
1355+ xpath=(u'//div[@id="subscribers-from-duplicates"]'
1356+ '/div/a[@name="Sample Person"]'))
1357+ client.asserts.assertNode(
1358+ xpath=(u'//div[@id="subscribers-indirect"]'
1359+ '/div/a[text() = "Sample Person"]'))
1360+
1361+ # Second test case, confirm duplicate handling is correct between direct
1362+ # and duplicate subscriptions. Subscribe directly to bug 5.
1363+ client.click(xpath=SUBSCRIPTION_LINK)
1364+ client.waits.sleep(milliseconds=SLEEP)
1365+ client.asserts.assertText(
1366+ xpath=SUBSCRIPTION_LINK, validator=u'Unsubscribe')
1367+ # Go to bug 6, the dupe, and subscribe.
1368+ client.open(url=BUG_URL % 6)
1369+ client.waits.forPageLoad(timeout=PAGE_LOAD)
1370+ client.waits.forElement(
1371+ id=u'subscribers-links', timeout=FOR_ELEMENT)
1372+ client.click(xpath=SUBSCRIPTION_LINK)
1373+ client.waits.sleep(milliseconds=SLEEP)
1374+ client.asserts.assertText(
1375+ xpath=SUBSCRIPTION_LINK, validator=u'Unsubscribe')
1376+ # Now back to bug 5. Confirm there are 2 subscriptions.
1377+ client.open(url=BUG_URL % 5)
1378+ client.asserts.assertNode(
1379+ xpath=(u'//div[@id="subscribers-links"]'
1380+ '/div/a[@name="Sample Person"]'))
1381+ client.asserts.assertNode(
1382+ xpath=(u'//div[@id="subscribers-from-duplicates"]'
1383+ '/div/a[@name="Sample Person"]'))
1384+ # The first click unsubscribes the direct subscription, leaving
1385+ # the duplicate subscription.
1386+ client.click(xpath=SUBSCRIPTION_LINK)
1387+ client.waits.sleep(milliseconds=SLEEP)
1388+ client.asserts.assertNotNode(
1389+ xpath=(u'//div[@id="subscribers-links"]'
1390+ '/div/a[@name="Sample Person"]'))
1391+ client.asserts.assertNode(
1392+ xpath=(u'//div[@id="subscribers-from-duplicates"]'
1393+ '/div/a[@name="Sample Person"]'))
1394+ # The second unsubscribe removes the duplicate, too.
1395+ client.click(xpath=SUBSCRIPTION_LINK)
1396+ client.waits.sleep(milliseconds=SLEEP)
1397+ client.asserts.assertNotNode(
1398+ xpath=(u'//div[@id="subscribers-from-duplicates"]'
1399+ '/div/a[@name="Sample Person"]'))
1400
1401=== modified file 'lib/lp/code/browser/branch.py'
1402--- lib/lp/code/browser/branch.py 2009-07-17 00:26:05 +0000
1403+++ lib/lp/code/browser/branch.py 2009-07-19 04:41:14 +0000
1404@@ -520,6 +520,15 @@
1405 return False
1406 return self.context.target.collection.getBranches().count() > 1
1407
1408+ def translations_sources(self):
1409+ """Anything that automatically exports its translations here.
1410+
1411+ Produces a list, so that the template can easily check whether
1412+ there are any translations sources.
1413+ """
1414+ # Actually only ProductSeries currently do that.
1415+ return list(self.context.getProductSeriesPushingTranslations())
1416+
1417
1418 class DecoratedMergeProposal:
1419 """Provide some additional attributes to a normal branch merge proposal.
1420
1421=== modified file 'lib/lp/code/browser/configure.zcml'
1422--- lib/lp/code/browser/configure.zcml 2009-07-17 00:26:05 +0000
1423+++ lib/lp/code/browser/configure.zcml 2009-07-20 18:22:54 +0000
1424@@ -508,14 +508,14 @@
1425 for="lp.code.interfaces.branchvisibilitypolicy.IHasBranchVisibilityPolicy"
1426 facet="overview"
1427 class="lp.code.browser.branchvisibilitypolicy.AddBranchVisibilityTeamPolicyView"
1428- permission="launchpad.Admin"
1429+ permission="launchpad.Commercial"
1430 template="../templates/branch-visibility-edit.pt"/>
1431 <browser:page
1432 name="+removebranchvisibilitypolicy"
1433 for="lp.code.interfaces.branchvisibilitypolicy.IHasBranchVisibilityPolicy"
1434 facet="overview"
1435 class="lp.code.browser.branchvisibilitypolicy.RemoveBranchVisibilityTeamPolicyView"
1436- permission="launchpad.Admin"
1437+ permission="launchpad.Commercial"
1438 template="../templates/branch-visibility-edit.pt"/>
1439 <browser:page
1440 name="+spark"
1441
1442=== modified file 'lib/lp/code/browser/tests/test_branch.py'
1443--- lib/lp/code/browser/tests/test_branch.py 2009-07-17 00:26:05 +0000
1444+++ lib/lp/code/browser/tests/test_branch.py 2009-07-19 04:41:14 +0000
1445@@ -208,6 +208,27 @@
1446 view.initialize()
1447 self.assertFalse(view.show_merge_links)
1448
1449+ def testNoProductSeriesPushingTranslations(self):
1450+ # By default, a branch view shows no product series pushing
1451+ # translations to the branch.
1452+ branch = self.factory.makeBranch()
1453+
1454+ view = BranchView(branch, self.request)
1455+ view.initialize()
1456+ self.assertEqual(list(view.translations_sources()), [])
1457+
1458+ def testProductSeriesPushingTranslations(self):
1459+ # If a product series exports its translations to the branch,
1460+ # the view shows it.
1461+ product = self.factory.makeProduct()
1462+ trunk = product.getSeries('trunk')
1463+ branch = self.factory.makeBranch(owner=product.owner)
1464+ removeSecurityProxy(trunk).translations_branch = branch
1465+
1466+ view = BranchView(branch, self.request)
1467+ view.initialize()
1468+ self.assertEqual(list(view.translations_sources()), [trunk])
1469+
1470
1471 class TestBranchReviewerEditView(TestCaseWithFactory):
1472 """Test the BranchReviewerEditView view."""
1473
1474=== modified file 'lib/lp/code/configure.zcml'
1475--- lib/lp/code/configure.zcml 2009-07-17 00:26:05 +0000
1476+++ lib/lp/code/configure.zcml 2009-07-19 04:41:14 +0000
1477@@ -388,6 +388,7 @@
1478 canBeDeleted
1479 deletionRequirements
1480 associatedProductSeries
1481+ getProductSeriesPushingTranslations
1482 associatedSuiteSourcePackages
1483 subscribe
1484 getSubscription
1485
1486=== modified file 'lib/lp/code/feed/branch.py'
1487--- lib/lp/code/feed/branch.py 2009-07-17 00:26:05 +0000
1488+++ lib/lp/code/feed/branch.py 2009-07-21 07:27:48 +0000
1489@@ -40,6 +40,12 @@
1490 from lp.registry.interfaces.project import IProject
1491
1492
1493+def revision_feed_id(revision):
1494+ """Return a consistent id for a revision to use as an id."""
1495+ return "tag:launchpad.net,%s:/revision/%s" % (
1496+ revision.revision_date.date().isoformat(), revision.revision_id)
1497+
1498+
1499 class BranchFeedEntry(FeedEntry):
1500 """See `IFeedEntry`."""
1501 def construct_id(self):
1502@@ -248,16 +254,35 @@
1503 Called by getItems which may cache the results.
1504 """
1505 cache = self._getRevisionCache()
1506- revisions = cache.public().getRevisions().config(limit=self.quantity)
1507+ revisions = cache.public().getRevisions()
1508 # Convert the items into their feed entry representation.
1509- items = [self.itemToFeedEntry(item) for item in revisions]
1510+ items = []
1511+ for revision in revisions:
1512+ content_view = self._createView(revision)
1513+ if content_view is not None:
1514+ entry = self.createFeedEntry(content_view)
1515+ items.append(entry)
1516+ # If we've hit our limit, stop iterating the revisions.
1517+ if len(items) >= self.quantity:
1518+ break
1519 return items
1520
1521- def itemToFeedEntry(self, revision):
1522- """See `IFeed`."""
1523- id = "tag:launchpad.net,%s:/revision/%s" % (
1524- revision.revision_date.date().isoformat(), revision.revision_id)
1525+ def _createView(self, revision):
1526+ """Make a view for this revision.
1527+
1528+ :return: A view class, or None.
1529+ """
1530 content_view = RevisionFeedContentView(revision, self.request, self)
1531+ # If there is no longer an associated branch for this, return None as
1532+ # we don't want to show this revision.
1533+ if content_view.branch is None:
1534+ return None
1535+ return content_view
1536+
1537+ def createFeedEntry(self, content_view):
1538+ """Create the FeedEntry for the specified view."""
1539+ revision = content_view.context
1540+ id = revision_feed_id(revision)
1541 content = content_view.render()
1542 content_data = FeedTypedData(content=content,
1543 content_type="html",
1544
1545=== added directory 'lib/lp/code/feed/tests'
1546=== added file 'lib/lp/code/feed/tests/__init__.py'
1547--- lib/lp/code/feed/tests/__init__.py 1970-01-01 00:00:00 +0000
1548+++ lib/lp/code/feed/tests/__init__.py 2009-07-20 05:51:06 +0000
1549@@ -0,0 +1,4 @@
1550+# Copyright 2009 Canonical Ltd. This software is licensed under the
1551+# GNU Affero General Public License version 3 (see the file LICENSE).
1552+
1553+"""Test for feeds relating to Launchpad code live here."""
1554
1555=== added file 'lib/lp/code/feed/tests/test_revision.py'
1556--- lib/lp/code/feed/tests/test_revision.py 1970-01-01 00:00:00 +0000
1557+++ lib/lp/code/feed/tests/test_revision.py 2009-07-21 03:30:59 +0000
1558@@ -0,0 +1,141 @@
1559+# Copyright 2009 Canonical Ltd. This software is licensed under the
1560+# GNU Affero General Public License version 3 (see the file LICENSE).
1561+
1562+"""Tests for the revision feeds."""
1563+
1564+__metaclass__ = type
1565+
1566+from datetime import datetime
1567+import unittest
1568+
1569+from pytz import UTC
1570+from zope.component import getUtility
1571+
1572+from canonical.launchpad.webapp.servers import LaunchpadTestRequest
1573+from canonical.testing.layers import DatabaseFunctionalLayer
1574+from lp.code.feed.branch import (
1575+ ProductRevisionFeed, revision_feed_id, RevisionListingFeed)
1576+from lp.code.interfaces.revision import IRevisionSet
1577+from lp.testing import login_person, TestCaseWithFactory
1578+
1579+
1580+class TestRevisionFeedId(TestCaseWithFactory):
1581+ """Test the revision_feed_id function."""
1582+
1583+ layer = DatabaseFunctionalLayer
1584+
1585+ def test_format(self):
1586+ # The id contains the iso format of the date part of the revision
1587+ # date, and the revision id.
1588+ revision_date = datetime(2009, 07, 21, 12, tzinfo=UTC)
1589+ revision = self.factory.makeRevision(
1590+ revision_date=revision_date, rev_id="test_revision_id")
1591+ feed_id = revision_feed_id(revision)
1592+ self.assertEqual(
1593+ 'tag:launchpad.net,2009-07-21:/revision/test_revision_id',
1594+ feed_id)
1595+
1596+
1597+class TestRevisionFeed(TestCaseWithFactory):
1598+ """Tests for the methods of the RevisionListingFeed base class."""
1599+
1600+ layer = DatabaseFunctionalLayer
1601+
1602+ def _createBranchWithRevision(self):
1603+ """Create a branch with a linked, cached revision.
1604+
1605+ :return: a tuple of (branch, revision)
1606+ """
1607+ revision = self.factory.makeRevision()
1608+ branch = self.factory.makeBranch()
1609+ branch.createBranchRevision(1, revision)
1610+ getUtility(IRevisionSet).updateRevisionCacheForBranch(branch)
1611+ return branch, revision
1612+
1613+ def _createFeed(self):
1614+ """Create and return a RevisionListingFeed instance."""
1615+ # The FeedBase class determins the feed type by the end of the
1616+ # requested URL, so forcing .atom here.
1617+ return RevisionListingFeed(
1618+ None, LaunchpadTestRequest(
1619+ SERVER_URL="http://example.com/fake.atom"))
1620+
1621+ def test_createView(self):
1622+ # Revisions that are linked to branches are shown in the feed.
1623+
1624+ # Since we are calling into a base class that would normally take a
1625+ # context and a request, we need to give it something - None should be
1626+ # fine.
1627+ branch, revision = self._createBranchWithRevision()
1628+ revision_feed = self._createFeed()
1629+ view = revision_feed._createView(revision)
1630+ self.assertEqual(revision, view.context)
1631+ self.assertEqual(branch, view.branch)
1632+
1633+ def test_createView_revision_not_in_branch(self):
1634+ # If a revision is in the RevisionCache table, but no longer
1635+ # associated with a public branch, then the createView call will
1636+ # return None to indicate not do show this revision.
1637+ branch, revision = self._createBranchWithRevision()
1638+ # Now delete the branch.
1639+ login_person(branch.owner)
1640+ branch.destroySelf()
1641+ revision_feed = self._createFeed()
1642+ view = revision_feed._createView(revision)
1643+ self.assertIs(None, view)
1644+
1645+
1646+class TestProductRevisionFeed(TestCaseWithFactory):
1647+ """Tests for the ProductRevisionFeed."""
1648+
1649+ layer = DatabaseFunctionalLayer
1650+
1651+ def _createBranchWithRevision(self, product):
1652+ """Create a branch with a linked, cached revision.
1653+
1654+ :return: a tuple of (branch, revision)
1655+ """
1656+ revision = self.factory.makeRevision()
1657+ branch = self.factory.makeProductBranch(product=product)
1658+ branch.createBranchRevision(1, revision)
1659+ getUtility(IRevisionSet).updateRevisionCacheForBranch(branch)
1660+ return branch, revision
1661+
1662+ def _createFeed(self, product):
1663+ """Create and return a ProductRevisionFeed instance."""
1664+ # The FeedBase class determins the feed type by the end of the
1665+ # requested URL, so forcing .atom here.
1666+ return ProductRevisionFeed(
1667+ product, LaunchpadTestRequest(
1668+ SERVER_URL="http://example.com/fake.atom"))
1669+
1670+ def test_getItems_empty(self):
1671+ # If there are no revisions for a product, there are no items.
1672+ product = self.factory.makeProduct()
1673+ feed = self._createFeed(product)
1674+ self.assertEqual([], feed.getItems())
1675+
1676+ def test_getItems_revisions(self):
1677+ # If there are revisions in branches for the project, these are
1678+ # returned in the feeds items.
1679+ product = self.factory.makeProduct()
1680+ branch, revision = self._createBranchWithRevision(product)
1681+ feed = self._createFeed(product)
1682+ [item] = feed.getItems()
1683+ self.assertEqual(revision_feed_id(revision), item.id)
1684+
1685+ def test_getItems_skips_revisions_not_in_branches(self):
1686+ # If a revision was added to a project, but the only branch that
1687+ # referred to that revision was subsequently removed, the revision
1688+ # does not show in the feed.
1689+ product = self.factory.makeProduct()
1690+ branch, revision = self._createBranchWithRevision(product)
1691+ # Now delete the branch.
1692+ login_person(branch.owner)
1693+ branch.destroySelf()
1694+ feed = self._createFeed(product)
1695+ self.assertEqual([], feed.getItems())
1696+
1697+
1698+def test_suite():
1699+ return unittest.TestLoader().loadTestsFromName(__name__)
1700
1701=== modified file 'lib/lp/code/interfaces/branch.py'
1702--- lib/lp/code/interfaces/branch.py 2009-07-17 00:26:05 +0000
1703+++ lib/lp/code/interfaces/branch.py 2009-07-19 04:41:14 +0000
1704@@ -780,6 +780,15 @@
1705 series as a branch.
1706 """
1707
1708+ def getProductSeriesPushingTranslations():
1709+ """Return sequence of product series pushing translations here.
1710+
1711+ These are any `ProductSeries` that have this branch as their
1712+ translations_branch. It should normally be at most one, but
1713+ there's nothing stopping people from combining translations
1714+ branches.
1715+ """
1716+
1717 def associatedSuiteSourcePackages():
1718 """Return the suite source packages that this branch is linked to."""
1719
1720
1721=== modified file 'lib/lp/code/model/branch.py'
1722--- lib/lp/code/model/branch.py 2009-07-17 00:26:05 +0000
1723+++ lib/lp/code/model/branch.py 2009-07-19 04:41:14 +0000
1724@@ -484,6 +484,9 @@
1725 spec_link.destroySelf))
1726 for series in self.associatedProductSeries():
1727 alteration_operations.append(ClearSeriesBranch(series, self))
1728+ for series in self.getProductSeriesPushingTranslations():
1729+ alteration_operations.append(
1730+ ClearSeriesTranslationsBranch(series, self))
1731
1732 series_set = getUtility(IFindOfficialBranchLinks)
1733 alteration_operations.extend(
1734@@ -529,6 +532,14 @@
1735 ProductSeries,
1736 ProductSeries.branch == self)
1737
1738+ def getProductSeriesPushingTranslations(self):
1739+ """See `IBranch`."""
1740+ # Imported here to avoid circular import.
1741+ from lp.registry.model.productseries import ProductSeries
1742+ return Store.of(self).find(
1743+ ProductSeries,
1744+ ProductSeries.translations_branch == self)
1745+
1746 def associatedSuiteSourcePackages(self):
1747 """See `IBranch`."""
1748 series_set = getUtility(IFindOfficialBranchLinks)
1749@@ -910,6 +921,21 @@
1750 self.affected_object.syncUpdate()
1751
1752
1753+class ClearSeriesTranslationsBranch(DeletionOperation):
1754+ """Deletion operation that clears a series' translations branch."""
1755+
1756+ def __init__(self, series, branch):
1757+ DeletionOperation.__init__(
1758+ self, series,
1759+ _('This series exports its translations to this branch.'))
1760+ self.branch = branch
1761+
1762+ def __call__(self):
1763+ if self.affected_object.branch == self.branch:
1764+ self.affected_object.branch = None
1765+ self.affected_object.syncUpdate()
1766+
1767+
1768 class ClearOfficialPackageBranch(DeletionOperation):
1769 """Deletion operation that clears an official package branch."""
1770
1771
1772=== modified file 'lib/lp/code/model/directbranchcommit.py'
1773--- lib/lp/code/model/directbranchcommit.py 2009-07-17 18:46:25 +0000
1774+++ lib/lp/code/model/directbranchcommit.py 2009-07-21 04:21:23 +0000
1775@@ -46,6 +46,7 @@
1776 """
1777 is_open = False
1778 is_locked = False
1779+ commit_builder = None
1780
1781 def __init__(self, db_branch, committer=None):
1782 """Create context for direct commit to branch.
1783@@ -73,6 +74,9 @@
1784 committer = db_branch.owner
1785 self.committer = committer
1786
1787+ # Directories we create on the branch, and their ids.
1788+ self.path_ids = {}
1789+
1790 mirrorer = make_branch_mirrorer(self.db_branch.branch_type)
1791 self.bzrbranch = mirrorer.open(self.db_branch.getPullURL())
1792 self.bzrbranch.lock_write()
1793@@ -81,30 +85,40 @@
1794 try:
1795 self.revision_tree = self.bzrbranch.basis_tree()
1796 self.transform_preview = TransformPreview(self.revision_tree)
1797+ assert self.transform_preview.find_conflicts() == [], (
1798+ "TransformPreview is not in a consistent state.")
1799
1800 self.is_open = True
1801 except:
1802 self.unlock()
1803- self.is_locked = False
1804 raise
1805
1806 self.files = set()
1807
1808 def _getDir(self, path):
1809 """Get trans_id for directory "path." Create if necessary."""
1810- dir = self.revision_tree.path2id(path)
1811- if dir:
1812+ path_id = self.path_ids.get(path)
1813+ if path_id:
1814+ # This is a path we've just created in the branch.
1815+ return path_id
1816+
1817+ if self.revision_tree.path2id(path):
1818+ # This is a path that was already in the branch.
1819 return self.transform_preview.trans_id_tree_path(path)
1820
1821+ # Look up (or create) parent directory.
1822 parent_dir, dirname = os.path.split(path)
1823 if dirname:
1824 parent_id = self._getDir(parent_dir)
1825 else:
1826 parent_id = None
1827
1828+ # Create new directory.
1829 dirfile_id = gen_file_id(path)
1830- return self.transform_preview.new_directory(
1831+ path_id = self.transform_preview.new_directory(
1832 dirname, parent_id, dirfile_id)
1833+ self.path_ids[path] = path_id
1834+ return path_id
1835
1836 def writeFile(self, path, contents):
1837 """Write file to branch; may be an update or a new file.
1838@@ -151,6 +165,7 @@
1839 assert self.is_open, "Committing closed DirectBranchCommit."
1840 assert self.is_locked, "Not locked at commit time."
1841
1842+ builder = None
1843 try:
1844 self._checkForRace()
1845
1846@@ -170,6 +185,7 @@
1847 builder.finish_inventory()
1848
1849 new_rev_id = builder.commit(commit_message)
1850+ builder = None
1851
1852 revno, old_rev_id = self.bzrbranch.last_revision_info()
1853 self.bzrbranch.set_last_revision_info(revno + 1, new_rev_id)
1854@@ -177,11 +193,14 @@
1855 IMasterObject(self.db_branch).requestMirror()
1856
1857 finally:
1858+ if builder:
1859+ builder.abort()
1860 self.unlock()
1861 self.is_open = False
1862
1863 def unlock(self):
1864 """Release commit lock, if held."""
1865 if self.is_locked:
1866+ self.transform_preview.finalize()
1867 self.bzrbranch.unlock()
1868 self.is_locked = False
1869
1870=== modified file 'lib/lp/code/model/tests/test_branch.py'
1871--- lib/lp/code/model/tests/test_branch.py 2009-07-17 00:26:05 +0000
1872+++ lib/lp/code/model/tests/test_branch.py 2009-07-19 04:41:14 +0000
1873@@ -558,6 +558,14 @@
1874 " is not deletable.")
1875 self.assertRaises(CannotDeleteBranch, self.branch.destroySelf)
1876
1877+ def test_productSeriesTranslationsBranchDisablesDeletion(self):
1878+ self.product.development_focus.translations_branch = self.branch
1879+ syncUpdate(self.product.development_focus)
1880+ self.assertEqual(self.branch.canBeDeleted(), False,
1881+ "A branch that is a translations branch for a "
1882+ "product series is not deletable.")
1883+ self.assertRaises(CannotDeleteBranch, self.branch.destroySelf)
1884+
1885 def test_revisionsDeletable(self):
1886 """A branch that has some revisions can be deleted."""
1887 revision = self.factory.makeRevision()
1888
1889=== modified file 'lib/lp/code/model/tests/test_branchjob.py'
1890--- lib/lp/code/model/tests/test_branchjob.py 2009-07-17 00:26:05 +0000
1891+++ lib/lp/code/model/tests/test_branchjob.py 2009-07-19 04:41:14 +0000
1892@@ -622,6 +622,8 @@
1893 bmp = self.factory.makeBranchMergeProposal(target_branch=job.branch,
1894 registrant=hacker)
1895 bmp.source_branch.last_scanned_id = 'rev3-id'
1896+ transaction.commit()
1897+ self.layer.switchDbUser(config.sendbranchmail.dbuser)
1898 message = job.getRevisionMessage('rev2d-id', 1)
1899 self.assertEqual(
1900 'Merge authors:\n'
1901
1902=== modified file 'lib/lp/code/stories/branches/xx-branch-visibility-policy.txt'
1903--- lib/lp/code/stories/branches/xx-branch-visibility-policy.txt 2009-07-09 19:54:12 +0000
1904+++ lib/lp/code/stories/branches/xx-branch-visibility-policy.txt 2009-07-20 18:22:54 +0000
1905@@ -1,7 +1,7 @@
1906 = Branch Visibility Policy Pages =
1907
1908 Controlling the branch visibility policies for products and projects is only
1909-available to launchpad admins.
1910+available to launchpad admins and launchpad commercial admins.
1911
1912 Not to anonymous people.
1913
1914@@ -43,14 +43,19 @@
1915 ...
1916 Unauthorized: ...
1917
1918-Launchpad admins however, can get to it.
1919+Launchpad admins however, can get to the page to the branch visibility
1920+overview page and to the page to actually modify the policies.
1921
1922 >>> admin_browser.open('http://code.launchpad.dev/firefox')
1923 >>> admin_browser.getLink('Define branch visibility').click()
1924 >>> print admin_browser.url
1925 http://launchpad.dev/firefox/+branchvisibility
1926
1927-And members of the Launchpad Commercial team can view the page.
1928+ >>> admin_browser.getLink('Customise policy for Mozilla Firefox').click()
1929+ >>> print admin_browser.url
1930+ http://launchpad.dev/firefox/+addbranchvisibilitypolicy
1931+
1932+And members of the Launchpad Commercial team can view the pages.
1933
1934 >>> commercial_browser = setupBrowser(
1935 ... auth='Basic commercial-member@canonical.com:test')
1936@@ -59,6 +64,10 @@
1937 >>> print commercial_browser.url
1938 http://launchpad.dev/firefox/+branchvisibility
1939
1940+ >>> commercial_browser.getLink('Customise policy for Mozilla Firefox').click()
1941+ >>> print commercial_browser.url
1942+ http://launchpad.dev/firefox/+addbranchvisibilitypolicy
1943+
1944
1945 == Default policies ==
1946
1947@@ -68,6 +77,8 @@
1948 specify any branch visibility policy items and there is an inherited branch
1949 visibility policy, then that policy is used.
1950
1951+ >>> admin_browser.open('http://launchpad.dev/firefox/+branchvisibility')
1952+
1953 >>> print extract_text(find_tag_by_id(admin_browser.contents, 'inherited'))
1954 Using inherited policy from the Mozilla Project.
1955
1956@@ -192,6 +203,14 @@
1957
1958 See? All still there.
1959
1960+Before we remove them, let's ensure that the commercial admins can see
1961+the removal page.
1962+
1963+ >>> commercial_browser.open('http://launchpad.dev/firefox/+branchvisibility')
1964+ >>> commercial_browser.getLink('Remove policy items').click()
1965+ >>> print commercial_browser.url
1966+ http://launchpad.dev/firefox/+removebranchvisibilitypolicy
1967+
1968 Now to remove two. The override for Everyone, and Launchpad Devs.
1969
1970 >>> admin_browser.getControl('Everyone: Public').click()
1971@@ -204,13 +223,15 @@
1972 Ubuntu Gnome Team: Private
1973
1974 As you can see there is still a default visibility of Public. This
1975-is now implicit rather than explicit. So if we go bact to remove more
1976+is now implicit rather than explicit. So if we go back to remove more
1977 items, there is only one more policy item to remove. Once that is removed
1978-Firefox will go back to inheriting the polices of Mozilla.
1979-
1980- >>> admin_browser.getLink('Remove policy items').click()
1981- >>> admin_browser.getControl('Ubuntu Gnome Team: Private').click()
1982- >>> admin_browser.getControl('Remove Selected Policy Items').click()
1983-
1984- >>> print_tag_with_id(admin_browser.contents, 'inherited')
1985+Firefox will go back to inheriting the polices of Mozilla. Let's let
1986+the commercial admin do the removal to ensure he has the permission.
1987+
1988+ >>> commercial_browser.open('http://launchpad.dev/firefox/+branchvisibility')
1989+ >>> commercial_browser.getLink('Remove policy items').click()
1990+ >>> commercial_browser.getControl('Ubuntu Gnome Team: Private').click()
1991+ >>> commercial_browser.getControl('Remove Selected Policy Items').click()
1992+
1993+ >>> print_tag_with_id(commercial_browser.contents, 'inherited')
1994 Using inherited policy from the Mozilla Project.
1995
1996=== modified file 'lib/lp/code/templates/branch-index.pt'
1997--- lib/lp/code/templates/branch-index.pt 2009-07-17 17:59:07 +0000
1998+++ lib/lp/code/templates/branch-index.pt 2009-07-19 04:41:14 +0000
1999@@ -449,6 +449,17 @@
2000
2001 </div>
2002
2003+ <div
2004+ id="translations-sources"
2005+ tal:define="translations_sources view/translations_sources"
2006+ tal:condition="translations_sources">
2007+ <h2>Automatic translations commits</h2>
2008+ <ul>
2009+ <tal:sources-list repeat="source translations_sources">
2010+ <li tal:content="structure source/fmt:link">~foo/example/branch</li>
2011+ </tal:sources-list>
2012+ </ul>
2013+ </div>
2014
2015 <div id="recent-revisions">
2016 <h2 style="clear: both">Recent revisions</h2>
2017
2018=== modified file 'lib/lp/code/tests/test_directbranchcommit.py'
2019--- lib/lp/code/tests/test_directbranchcommit.py 2009-07-17 18:46:25 +0000
2020+++ lib/lp/code/tests/test_directbranchcommit.py 2009-07-21 04:21:23 +0000
2021@@ -13,17 +13,13 @@
2022 from canonical.testing.layers import ZopelessDatabaseLayer
2023
2024
2025-class TestDirectBranchCommit(TestCaseWithFactory):
2026- """Test `DirectBranchCommit`."""
2027-
2028- layer = ZopelessDatabaseLayer
2029-
2030+class DirectBranchCommitTestCase(TestCaseWithFactory):
2031+ """Base class for `DirectBranchCommit` tests."""
2032 db_branch = None
2033 committer = None
2034
2035 def setUp(self):
2036- super(TestDirectBranchCommit, self).setUp()
2037-
2038+ super(DirectBranchCommitTestCase, self).setUp()
2039 self.useBzrBranches()
2040
2041 self.series = self.factory.makeProductSeries()
2042@@ -33,8 +29,10 @@
2043 self.series.translations_branch = self.db_branch
2044
2045 self._setUpCommitter()
2046+ self.addCleanup(self._tearDownCommitter)
2047
2048 def _setUpCommitter(self, update_last_scanned_id=True):
2049+ """Clean up any existing `DirectBranchCommit`, set up a new one."""
2050 if self.committer:
2051 self.committer.unlock()
2052
2053@@ -43,15 +41,22 @@
2054 self.db_branch.last_scanned_id = (
2055 self.committer.bzrbranch.last_revision())
2056
2057- def tearDown(self):
2058- self.committer.unlock()
2059+ def _tearDownCommitter(self):
2060+ if self.committer:
2061+ self.committer.unlock()
2062
2063 def _getContents(self):
2064 """Return branch contents as dict mapping filenames to contents."""
2065 return map_branch_contents(self.committer.db_branch.getPullURL())
2066
2067+
2068+class TestDirectBranchCommit(DirectBranchCommitTestCase):
2069+ """Test `DirectBranchCommit`."""
2070+
2071+ layer = ZopelessDatabaseLayer
2072+
2073 def test_DirectBranchCommit_commits_no_changes(self):
2074- # Committing to an empty branch leaves the branch empty.
2075+ # Committing to an empty branch leaves an empty branch empty.
2076 self.committer.commit('')
2077 self.assertEqual({}, self._getContents())
2078
2079@@ -94,6 +99,19 @@
2080 }
2081 self.assertEqual(expected, self._getContents())
2082
2083+ def test_DirectBranchCommit_reuses_new_directories(self):
2084+ # If a directory doesn't exist in the committed branch, creating
2085+ # it twice would be an error. DirectBranchCommit doesn't do
2086+ # that.
2087+ self.committer.writeFile('foo/x.txt', 'x')
2088+ self.committer.writeFile('foo/y.txt', 'y')
2089+ self.committer.commit('')
2090+ expected = {
2091+ 'foo/x.txt': 'x',
2092+ 'foo/y.txt': 'y',
2093+ }
2094+ self.assertEqual(expected, self._getContents())
2095+
2096 def test_DirectBranchCommit_writes_new_file_twice(self):
2097 # If you write the same new file multiple times before
2098 # committing, the original wins.
2099@@ -123,5 +141,65 @@
2100 self.assertRaises(ConcurrentUpdateError, self.committer.commit, '')
2101
2102
2103+class TestDirectBranchCommit_getDir(DirectBranchCommitTestCase):
2104+ """Test `DirectBranchCommit._getDir`."""
2105+
2106+ layer = ZopelessDatabaseLayer
2107+
2108+ def test_getDir_creates_root(self):
2109+ # An id is created even for the branch root directory.
2110+ self.assertFalse('' in self.committer.path_ids)
2111+ root_id = self.committer._getDir('')
2112+ self.assertNotEqual(None, root_id)
2113+ self.assertTrue('' in self.committer.path_ids)
2114+ self.assertEqual(self.committer.path_ids[''], root_id)
2115+ self.committer.commit('')
2116+
2117+ def test_getDir_creates_dir(self):
2118+ # _getDir will create a new directory, under the root.
2119+ self.assertFalse('dir' in self.committer.path_ids)
2120+ dir_id = self.committer._getDir('dir')
2121+ self.assertTrue('' in self.committer.path_ids)
2122+ self.assertTrue('dir' in self.committer.path_ids)
2123+ self.assertEqual(self.committer.path_ids['dir'], dir_id)
2124+ self.assertNotEqual(self.committer.path_ids[''], dir_id)
2125+ self.committer.commit('')
2126+
2127+ def test_getDir_creates_subdir(self):
2128+ # _getDir will create nested directories.
2129+ subdir_id = self.committer._getDir('dir/subdir')
2130+ self.assertTrue('' in self.committer.path_ids)
2131+ self.assertTrue('dir' in self.committer.path_ids)
2132+ self.assertTrue('dir/subdir' in self.committer.path_ids)
2133+ self.assertEqual(self.committer.path_ids['dir/subdir'], subdir_id)
2134+ self.committer.commit('')
2135+
2136+ def test_getDir_finds_existing_dir(self):
2137+ # _getDir finds directories that already existed in a previously
2138+ # committed version of the branch.
2139+ existing_id = self.committer._getDir('po')
2140+ self._setUpCommitter()
2141+ dir_id = self.committer._getDir('po')
2142+ self.assertEqual(existing_id, dir_id)
2143+ self.committer.commit('')
2144+
2145+ def test_getDir_creates_dir_in_existing_dir(self):
2146+ # _getDir creates directories inside ones that already existed
2147+ # in a previously committed version of the branch.
2148+ existing_id = self.committer._getDir('po')
2149+ self._setUpCommitter()
2150+ new_dir_id = self.committer._getDir('po/main/files')
2151+ self.assertTrue('po/main' in self.committer.path_ids)
2152+ self.assertTrue('po/main/files' in self.committer.path_ids)
2153+ self.assertEqual(self.committer.path_ids['po/main/files'], new_dir_id)
2154+ self.committer.commit('')
2155+
2156+ def test_getDir_reuses_new_id(self):
2157+ # If a directory was newly created, _getDir will reuse its id.
2158+ dir_id = self.committer._getDir('foo/bar')
2159+ self.assertEqual(dir_id, self.committer._getDir('foo/bar'))
2160+ self.committer.commit('')
2161+
2162+
2163 def test_suite():
2164 return TestLoader().loadTestsFromName(__name__)
2165
2166=== modified file 'lib/lp/codehosting/scanner/tests/test_buglinks.py'
2167--- lib/lp/codehosting/scanner/tests/test_buglinks.py 2009-07-17 00:26:05 +0000
2168+++ lib/lp/codehosting/scanner/tests/test_buglinks.py 2009-07-19 04:41:14 +0000
2169@@ -12,16 +12,16 @@
2170 import zope.component.event
2171 from zope.component import getUtility
2172
2173+from canonical.config import config
2174+from canonical.launchpad.interfaces import (
2175+ IBugBranchSet, IBugSet, NotFoundError)
2176+from canonical.testing.layers import LaunchpadZopelessLayer
2177+
2178 from lp.codehosting.scanner.buglinks import got_new_revision, BugBranchLinker
2179 from lp.codehosting.scanner.fixture import make_zope_event_fixture
2180 from lp.codehosting.scanner.tests.test_bzrsync import BzrSyncTestCase
2181-from canonical.config import config
2182-from canonical.launchpad.interfaces import (
2183- IBugBranchSet, IBugSet, ILaunchpadCelebrities,
2184- NotFoundError)
2185+from lp.soyuz.interfaces.publishing import PackagePublishingPocket
2186 from lp.testing import TestCase
2187-from lp.testing.factory import LaunchpadObjectFactory
2188-from canonical.testing import LaunchpadZopelessLayer
2189
2190
2191 class RevisionPropertyParsing(TestCase):
2192@@ -153,6 +153,27 @@
2193 self.syncBazaarBranchToDatabase(self.bzr_branch, self.db_branch)
2194 self.assertBugBranchLinked(self.bug1, self.db_branch)
2195
2196+ def makePackageBranch(self):
2197+ LaunchpadZopelessLayer.switchDbUser(self.lp_db_user)
2198+ try:
2199+ branch = self.factory.makePackageBranch()
2200+ branch.sourcepackage.setBranch(
2201+ PackagePublishingPocket.RELEASE, branch, branch.owner)
2202+ LaunchpadZopelessLayer.txn.commit()
2203+ finally:
2204+ LaunchpadZopelessLayer.switchDbUser(config.branchscanner.dbuser)
2205+ return branch
2206+
2207+ def test_linking_bug_to_official_package_branch(self):
2208+ # We can link a bug to an official package branch. Test added to catch
2209+ # bug 391303.
2210+ self.commitRevision(
2211+ rev_id='rev1',
2212+ revprops={'bugs': '%s fixed' % self.getBugURL(self.bug1)})
2213+ branch = self.makePackageBranch()
2214+ self.syncBazaarBranchToDatabase(self.bzr_branch, branch)
2215+ self.assertBugBranchLinked(self.bug1, branch)
2216+
2217 def test_knownMainlineRevisionsDoesntMakeLink(self):
2218 """Don't add BugBranches for known mainline revision."""
2219 self.commitRevision(
2220
2221=== modified file 'lib/lp/registry/browser/configure.zcml'
2222--- lib/lp/registry/browser/configure.zcml 2009-07-18 00:05:49 +0000
2223+++ lib/lp/registry/browser/configure.zcml 2009-07-19 04:41:14 +0000
2224@@ -1753,6 +1753,13 @@
2225 permission="launchpad.Admin"
2226 template="../templates/productseries-review.pt"/>
2227 <browser:page
2228+ for="lp.registry.interfaces.productseries.IProductSeries"
2229+ name="+link-translations-branch"
2230+ class="lp.registry.browser.productseries.LinkTranslationsBranchView"
2231+ template="../templates/productseries-link-translations-branch.pt"
2232+ facet="translations"
2233+ permission="launchpad.Edit"/>
2234+ <browser:page
2235 name="+ask-a-question-button"
2236 for="lp.registry.interfaces.productseries.IProductSeries"
2237 class="canonical.launchpad.browser.AskAQuestionButtonView"
2238
2239=== modified file 'lib/lp/registry/browser/productseries.py'
2240--- lib/lp/registry/browser/productseries.py 2009-07-17 00:26:05 +0000
2241+++ lib/lp/registry/browser/productseries.py 2009-07-21 22:27:22 +0000
2242@@ -7,6 +7,7 @@
2243
2244 __all__ = [
2245 'get_series_branch_error',
2246+ 'LinkTranslationsBranchView',
2247 'ProductSeriesBreadcrumbBuilder',
2248 'ProductSeriesBugsMenu',
2249 'ProductSeriesDeleteView',
2250@@ -47,14 +48,13 @@
2251 from lp.code.interfaces.codeimport import (
2252 ICodeImportSet)
2253 from lp.services.worlddata.interfaces.country import ICountry
2254-from lp.bugs.interfaces.bugtask import BugTaskSearchParams, IBugTaskSet
2255+from lp.bugs.interfaces.bugtask import IBugTaskSet
2256 from canonical.launchpad.interfaces.launchpad import ILaunchpadCelebrities
2257 from lp.registry.browser import StatusCount
2258 from lp.translations.interfaces.potemplate import IPOTemplateSet
2259 from lp.translations.interfaces.productserieslanguage import (
2260 IProductSeriesLanguageSet)
2261 from lp.services.worlddata.interfaces.language import ILanguageSet
2262-from canonical.launchpad.searchbuilder import any
2263 from canonical.launchpad.webapp import (
2264 action, ApplicationMenu, canonical_url, custom_widget,
2265 enabled_with_permission, LaunchpadEditFormView,
2266@@ -69,6 +69,7 @@
2267
2268 from lp.registry.browser import (
2269 MilestoneOverlayMixin, RegistryDeleteViewMixin)
2270+from lp.registry.interfaces.distroseries import DistroSeriesStatus
2271 from lp.registry.interfaces.productseries import IProductSeries
2272 from lp.registry.interfaces.sourcepackagename import (
2273 ISourcePackageNameSet)
2274@@ -417,6 +418,17 @@
2275 return (branch is not None and
2276 check_permission('launchpad.View', branch))
2277
2278+ @property
2279+ def is_obsolete(self):
2280+ """Return True if the series is OBSOLETE"
2281+
2282+ Obsolete series do not need to display as much information as other
2283+ series. Accessing private bugs is an expensive operation and showing
2284+ them for obsolete series can be a problem if many series are being
2285+ displayed.
2286+ """
2287+ return self.context.status == DistroSeriesStatus.OBSOLETE
2288+
2289 @cachedproperty
2290 def bugtask_status_counts(self):
2291 """A list StatusCounts summarising the targeted bugtasks."""
2292@@ -585,6 +597,27 @@
2293 """Do nothing and go back to the product series page."""
2294
2295
2296+class LinkTranslationsBranchView(LaunchpadEditFormView):
2297+ """View to set the series' translations export branch."""
2298+
2299+ schema = IProductSeries
2300+ field_names = ['translations_branch']
2301+
2302+ @property
2303+ def next_url(self):
2304+ return canonical_url(self.context) + '/+translations-settings'
2305+
2306+ @action(_('Update'), name='update')
2307+ def update_action(self, action, data):
2308+ self.updateContextFromData(data)
2309+ self.request.response.addInfoNotification(
2310+ 'Translations export branch updated.')
2311+
2312+ @action('Cancel', name='cancel', validator='validate_cancel')
2313+ def cancel_action(self, action, data):
2314+ """Do nothing and go back to the settings page."""
2315+
2316+
2317 class ProductSeriesLinkBranchFromCodeView(ProductSeriesLinkBranchView):
2318 """Set the branch link from the code overview page."""
2319
2320
2321=== modified file 'lib/lp/registry/browser/tests/productrelease-views.txt'
2322--- lib/lp/registry/browser/tests/productrelease-views.txt 2009-06-18 08:58:59 +0000
2323+++ lib/lp/registry/browser/tests/productrelease-views.txt 2009-07-22 04:08:03 +0000
2324@@ -119,7 +119,7 @@
2325 var select_menu = get_by_id('field.milestone_for_release');
2326 var create_milestone_link = Y.Node.create(
2327 '<a href="+addmilestone" id="create-milestone-link" ' +
2328- 'class="add js-action">Create milestone</a>'); ...
2329+ 'class="add js-action sprite">Create milestone</a>'); ...
2330
2331
2332 == Editing a a product release ==
2333
2334=== modified file 'lib/lp/registry/browser/tests/productseries-views.txt'
2335--- lib/lp/registry/browser/tests/productseries-views.txt 2009-07-07 11:24:01 +0000
2336+++ lib/lp/registry/browser/tests/productseries-views.txt 2009-07-21 16:30:28 +0000
2337@@ -60,6 +60,22 @@
2338 >>> print view.milestone_table_class
2339 listing
2340
2341+Obsolete series are less interesting that other series. The ProductSeriesView
2342+has an is_obsolete property that templates can check when choosing the content
2343+to display.
2344+
2345+ >>> from lp.registry.interfaces.distroseries import DistroSeriesStatus
2346+
2347+ >>> print series.status
2348+ Active Development
2349+ >>> view.is_obsolete
2350+ False
2351+
2352+ >>> series.status = DistroSeriesStatus.OBSOLETE
2353+ >>> view = create_view(series, '+index')
2354+ >>> view.is_obsolete
2355+ True
2356+
2357
2358 == Delete ProductSeries ==
2359
2360
2361=== modified file 'lib/lp/registry/configure.zcml'
2362--- lib/lp/registry/configure.zcml 2009-07-17 00:26:05 +0000
2363+++ lib/lp/registry/configure.zcml 2009-07-19 04:41:14 +0000
2364@@ -1302,7 +1302,7 @@
2365 <require
2366 permission="launchpad.Edit"
2367 set_attributes="product name owner driver summary branch
2368- status releasefileglob
2369+ translations_branch status releasefileglob
2370 translations_autoimport_mode"/>
2371 <require
2372 permission="launchpad.AnyPerson"
2373
2374=== modified file 'lib/lp/registry/doc/person.txt'
2375--- lib/lp/registry/doc/person.txt 2009-07-16 22:17:03 +0000
2376+++ lib/lp/registry/doc/person.txt 2009-07-18 01:03:09 +0000
2377@@ -809,6 +809,7 @@
2378 Ubuntu branches (ubuntu-branches): []
2379 Ubuntu Doc Team (doc): [u'doc@lists.ubuntu.com']
2380 Ubuntu Gnome Team (name18): []
2381+ Ubuntu Security Team (ubuntu-security): []
2382 Ubuntu Team (ubuntu-team): [u'support@ubuntu.com']
2383 Ubuntu Translators (ubuntu-translators): []
2384 Ubuntu-branches-owner (ubuntu-branches-owner): [u'ubuntu-branches-owner@example.com']
2385@@ -844,6 +845,7 @@
2386 testing Spanish team (testing-spanish-team): []
2387 Ubuntu Doc Team (doc): [u'doc@lists.ubuntu.com']
2388 Ubuntu Gnome Team (name18): []
2389+ Ubuntu Security Team (ubuntu-security): []
2390 Ubuntu Team (ubuntu-team): [u'support@ubuntu.com']
2391 Warty Gnome Team (warty-gnome): []
2392 Warty Security Team (name20): []
2393@@ -864,6 +866,7 @@
2394 testing Spanish team (testing-spanish-team): []
2395 Ubuntu Doc Team (doc): [u'doc@lists.ubuntu.com']
2396 Ubuntu Gnome Team (name18): []
2397+ Ubuntu Security Team (ubuntu-security): []
2398 Ubuntu Team (ubuntu-team): [u'support@ubuntu.com']
2399 Warty Gnome Team (warty-gnome): []
2400 Warty Security Team (name20): []
2401@@ -921,6 +924,7 @@
2402 Simple Team (simple-team): []
2403 testing Spanish team (testing-spanish-team): []
2404 Ubuntu Gnome Team (name18): []
2405+ Ubuntu Security Team (ubuntu-security): []
2406 Ubuntu Team (ubuntu-team): [u'support@ubuntu.com']
2407 Warty Gnome Team (warty-gnome): []
2408 Warty Security Team (name20): []
2409@@ -939,6 +943,7 @@
2410 Simple Team (simple-team): []
2411 testing Spanish team (testing-spanish-team): []
2412 Ubuntu Gnome Team (name18): []
2413+ Ubuntu Security Team (ubuntu-security): []
2414 Ubuntu Team (ubuntu-team): [u'support@ubuntu.com']
2415 Warty Gnome Team (warty-gnome): []
2416 Warty Security Team (name20): []
2417
2418=== modified file 'lib/lp/registry/doc/vocabularies.txt'
2419--- lib/lp/registry/doc/vocabularies.txt 2009-07-17 16:22:12 +0000
2420+++ lib/lp/registry/doc/vocabularies.txt 2009-07-18 06:35:05 +0000
2421@@ -743,7 +743,8 @@
2422
2423 >>> [(p.name, getattr(p.teamowner, 'name', None))
2424 ... for p in vocab.search('ubuntu-team')]
2425- [(u'doc', None), (u'name18', u'sabdfl'), (u'ubuntu-team', u'sabdfl')]
2426+ [(u'doc', None), (u'name18', u'sabdfl'),
2427+ (u'ubuntu-security', u'kamion'), (u'ubuntu-team', u'sabdfl')]
2428
2429 But it doesn't include merged accounts:
2430
2431@@ -780,7 +781,7 @@
2432 >>> sorted(person.name for person in vocab.search('team'))
2433 [u'hwdb-team', u'name18', u'name20', u'name21', u'no-team-memberships',
2434 u'otherteam', u'simple-team', u'testing-spanish-team',
2435- u'ubuntu-team', u'warty-gnome']
2436+ u'ubuntu-security', u'ubuntu-team', u'warty-gnome']
2437
2438 Logging in as 'owner', who is a member of myteam shows that the token
2439 lookup still omits myteam.
2440@@ -789,7 +790,7 @@
2441 >>> sorted(person.name for person in vocab.search('team'))
2442 [u'hwdb-team', u'name18', u'name20', u'name21', u'no-team-memberships',
2443 u'otherteam', u'simple-team', u'testing-spanish-team',
2444- u'ubuntu-team', u'warty-gnome']
2445+ u'ubuntu-security', u'ubuntu-team', u'warty-gnome']
2446
2447 A PRIVATE team is displayed when the logged in user is a member of the team.
2448
2449@@ -802,17 +803,19 @@
2450 ... owner=commercial,
2451 ... visibility=PersonVisibility.PRIVATE)
2452 >>> sorted(person.name for person in vocab.search('team'))
2453- [u'hwdb-team', u'name18', u'name20', u'name21', u'no-team-memberships',
2454- u'otherteam', u'private-team', u'simple-team', u'testing-spanish-team',
2455+ [u'hwdb-team', u'name18', u'name20', u'name21',
2456+ u'no-team-memberships', u'otherteam', u'private-team',
2457+ u'simple-team', u'testing-spanish-team', u'ubuntu-security',
2458 u'ubuntu-team', u'warty-gnome']
2459
2460 The PRIVATE team is also displayed for Launchpad admins.
2461
2462 >>> login('foo.bar@canonical.com')
2463 >>> sorted(person.name for person in vocab.search('team'))
2464- [u'hwdb-team', u'name18', u'name20', u'name21', u'no-team-memberships',
2465- u'otherteam', u'private-team', u'simple-team', u'testing-spanish-team',
2466- u'ubuntu-team', u'warty-gnome']
2467+ [u'hwdb-team', u'name18', u'name20', u'name21',
2468+ u'no-team-memberships', u'otherteam',
2469+ u'private-team', u'simple-team', u'testing-spanish-team',
2470+ u'ubuntu-security', u'ubuntu-team', u'warty-gnome']
2471
2472 The PRIVATE team can be looked up via getTermByToken for a member of the team.
2473
2474@@ -826,7 +829,7 @@
2475 >>> sorted(person.name for person in vocab.search('team'))
2476 [u'hwdb-team', u'name18', u'name20', u'name21', u'no-team-memberships',
2477 u'otherteam', u'simple-team', u'testing-spanish-team',
2478- u'ubuntu-team', u'warty-gnome']
2479+ u'ubuntu-security', u'ubuntu-team', u'warty-gnome']
2480
2481 The anonymous user will not see the private team either.
2482
2483@@ -834,7 +837,7 @@
2484 >>> sorted(person.name for person in vocab.search('team'))
2485 [u'hwdb-team', u'name18', u'name20', u'name21', u'no-team-memberships',
2486 u'otherteam', u'simple-team', u'testing-spanish-team',
2487- u'ubuntu-team', u'warty-gnome']
2488+ u'ubuntu-security', u'ubuntu-team', u'warty-gnome']
2489
2490 Attempting to lookup the team via getTermByToken results in a
2491 LookupError, the same as if the team didn't exist, which is really
2492@@ -854,7 +857,6 @@
2493 >>> sorted(person.name for person in vocab.search(''))
2494 [...u'private-team'...]
2495
2496-
2497 A search for 'support' will give us only the persons which have support
2498 as part of their name or displayname, or the beginning of
2499 one of its email addresses.
2500@@ -890,8 +892,9 @@
2501 >>> login(ANONYMOUS)
2502 >>> [person.displayname for person in vocab.search('team')]
2503 [u'HWDB Team', u'Hoary Gnome Team', u'No Team Memberships',
2504- u'Other Team', u'Simple Team', u'Ubuntu Gnome Team', u'Ubuntu Team',
2505- u'Warty Gnome Team', u'Warty Security Team', u'testing Spanish team']
2506+ u'Other Team', u'Simple Team', u'Ubuntu Gnome Team',
2507+ u'Ubuntu Security Team', u'Ubuntu Team', u'Warty Gnome Team',
2508+ u'Warty Security Team', u'testing Spanish team']
2509
2510 >>> login(ANONYMOUS)
2511 >>> vocab.LIMIT
2512@@ -920,9 +923,10 @@
2513 search for 'team' should give us some of them:
2514
2515 >>> sorted(person.name for person in vocab.search('team'))
2516- [u'hwdb-team', u'name18', u'name20', u'name21', u'no-team-memberships',
2517- u'otherteam', u'simple-team', u'testing-spanish-team', u'ubuntu-team',
2518- u'warty-gnome']
2519+ [u'hwdb-team', u'name18', u'name20', u'name21',
2520+ u'no-team-memberships', u'otherteam',
2521+ u'simple-team', u'testing-spanish-team', u'ubuntu-security',
2522+ u'ubuntu-team', u'warty-gnome']
2523
2524
2525 === ValidTeam ===
2526@@ -961,6 +965,7 @@
2527 (u'ShipIt Administrators', u'Mark Shuttleworth'),
2528 (u'Simple Team', u'One Membership'),
2529 (u'Ubuntu Gnome Team', u'Mark Shuttleworth'),
2530+ (u'Ubuntu Security Team', u'Colin Watson'),
2531 (u'Ubuntu Team', u'Mark Shuttleworth'),
2532 (u'Ubuntu Translators', u'Rosetta Administrators'),
2533 (u'Ubuntu branches', u'Ubuntu-branches-owner'),
2534@@ -980,6 +985,7 @@
2535 ... for team in vocab.search('spanish | ubuntu'))
2536 [(u'Mirror Administrators', u'Mark Shuttleworth'),
2537 (u'Ubuntu Gnome Team', u'Mark Shuttleworth'),
2538+ (u'Ubuntu Security Team', u'Colin Watson'),
2539 (u'Ubuntu Team', u'Mark Shuttleworth'),
2540 (u'Ubuntu Translators', u'Rosetta Administrators'),
2541 (u'Ubuntu branches', u'Ubuntu-branches-owner'),
2542@@ -992,6 +998,7 @@
2543 (u'Other Team', u'Owner'),
2544 (u'Simple Team', u'One Membership'),
2545 (u'Ubuntu Gnome Team', u'Mark Shuttleworth'),
2546+ (u'Ubuntu Security Team', u'Colin Watson'),
2547 (u'Ubuntu Team', u'Mark Shuttleworth'),
2548 (u'Warty Gnome Team', u'Mark Shuttleworth'),
2549 (u'Warty Security Team', u'Mark Shuttleworth'),
2550@@ -1008,6 +1015,7 @@
2551 (u'Private Team', u'Commercial Member'),
2552 (u'Simple Team', u'One Membership'),
2553 (u'Ubuntu Gnome Team', u'Mark Shuttleworth'),
2554+ (u'Ubuntu Security Team', u'Colin Watson'),
2555 (u'Ubuntu Team', u'Mark Shuttleworth'),
2556 (u'Warty Gnome Team', u'Mark Shuttleworth'),
2557 (u'Warty Security Team', u'Mark Shuttleworth'),
2558@@ -1060,7 +1068,7 @@
2559 >>> team in vocab
2560 False
2561 >>> [person.name for person in vocab.search('ubuntu-team')]
2562- [u'name18']
2563+ [u'name18', u'ubuntu-security']
2564
2565 'ubuntu-team' is a member of 'guadamen', so 'guadamen' can't be a member
2566 of 'ubuntu-team'.
2567@@ -1103,7 +1111,7 @@
2568 >>> team in vocab
2569 False
2570 >>> [person.name for person in vocab.search('ubuntu-team')]
2571- [u'name18']
2572+ [u'name18', u'ubuntu-security']
2573
2574 'name16' is a valid owner for 'ubuntu-team'.
2575
2576
2577=== modified file 'lib/lp/registry/interfaces/productseries.py'
2578--- lib/lp/registry/interfaces/productseries.py 2009-07-17 00:26:05 +0000
2579+++ lib/lp/registry/interfaces/productseries.py 2009-07-19 04:41:14 +0000
2580@@ -251,6 +251,15 @@
2581 "A Bazaar branch to commit translation snapshots to. "
2582 "Leave blank to disable."))
2583
2584+ translations_branch = ReferenceChoice(
2585+ title=_("Translations export branch"),
2586+ vocabulary='HostedBranchRestrictedOnOwner',
2587+ schema=IBranch,
2588+ required=False,
2589+ description=_(
2590+ "A Bazaar branch to commit translation snapshots to. "
2591+ "Leave blank to disable."))
2592+
2593 def getRelease(version):
2594 """Get the release in this series that has the specified version.
2595 Return None is there is no such release.
2596
2597=== modified file 'lib/lp/registry/stories/productseries/xx-productseries-series.txt'
2598--- lib/lp/registry/stories/productseries/xx-productseries-series.txt 2009-06-29 23:15:46 +0000
2599+++ lib/lp/registry/stories/productseries/xx-productseries-series.txt 2009-07-21 16:30:28 +0000
2600@@ -54,12 +54,12 @@
2601 >>> print series_1_0['class']
2602 unhighlighted series
2603
2604-Any user can see that obsolete series are dimmed.
2605+Any user can see that obsolete series are dimmed. Obsolete series do not
2606+show bug status counts because it is expensive to retrieve the information.
2607
2608 >>> series_xxx = find_tag_by_id(content, 'series-xxx')
2609 >>> print extract_text(series_xxx)
2610 xxx series Obsolete
2611- Bugs targeted: None
2612 Blueprints targeted: None
2613 Use true GTK UI.
2614
2615
2616=== modified file 'lib/lp/registry/templates/codeofconduct-admin.pt'
2617--- lib/lp/registry/templates/codeofconduct-admin.pt 2009-07-17 17:59:07 +0000
2618+++ lib/lp/registry/templates/codeofconduct-admin.pt 2009-07-22 14:19:22 +0000
2619@@ -10,12 +10,8 @@
2620 i18n:domain="launchpad"
2621 >
2622 <body>
2623- <metal:heading fill-slot="pageheading">
2624+ <div metal:fill-slot="main">
2625 <h1>Administer code of conduct signatures</h1>
2626- </metal:heading>
2627-
2628-<div metal:fill-slot="main">
2629-
2630 <p>
2631 As an admin, you can
2632 approve or reject code of conduct signatures,
2633@@ -90,7 +86,6 @@
2634 </p>
2635 </tal:results>
2636
2637-</div>
2638-
2639-</body>
2640+ </div>
2641+ </body>
2642 </html>
2643
2644=== modified file 'lib/lp/registry/templates/productrelease-add-from-series.pt'
2645--- lib/lp/registry/templates/productrelease-add-from-series.pt 2009-07-17 17:59:07 +0000
2646+++ lib/lp/registry/templates/productrelease-add-from-series.pt 2009-07-21 17:29:12 +0000
2647@@ -51,7 +51,7 @@
2648 var select_menu = get_by_id('field.milestone_for_release');
2649 var create_milestone_link = Y.Node.create(
2650 '<a href="+addmilestone" id="create-milestone-link" ' +
2651- 'class="add js-action">Create milestone</a>');
2652+ 'class="add js-action sprite">Create milestone</a>');
2653 select_menu.ancestor().appendChild(create_milestone_link);
2654 var config = {
2655 milestone_form_uri: milestone_form_uri,
2656
2657=== added file 'lib/lp/registry/templates/productseries-link-translations-branch.pt'
2658--- lib/lp/registry/templates/productseries-link-translations-branch.pt 1970-01-01 00:00:00 +0000
2659+++ lib/lp/registry/templates/productseries-link-translations-branch.pt 2009-07-03 09:50:12 +0000
2660@@ -0,0 +1,25 @@
2661+<tal:root
2662+ xmlns:tal="http://xml.zope.org/namespaces/tal"
2663+ omit-tag="">
2664+
2665+<html
2666+ xmlns="http://www.w3.org/1999/xhtml"
2667+ xmlns:metal="http://xml.zope.org/namespaces/metal"
2668+ xmlns:i18n="http://xml.zope.org/namespaces/i18n"
2669+ xml:lang="en"
2670+ lang="en"
2671+ dir="ltr"
2672+ metal:use-macro="view/macro:page/onecolumn"
2673+ i18n:domain="launchpad"
2674+>
2675+
2676+<body>
2677+<div metal:fill-slot="main">
2678+ <div metal:use-macro="context/@@launchpad_form/form">
2679+ <h1 metal:fill-slot="heading"
2680+ >Set translations export branch for this series</h1>
2681+ </div>
2682+</div>
2683+</body>
2684+</html>
2685+</tal:root>
2686
2687=== modified file 'lib/lp/registry/templates/productseries-status.pt'
2688--- lib/lp/registry/templates/productseries-status.pt 2009-07-17 17:59:07 +0000
2689+++ lib/lp/registry/templates/productseries-status.pt 2009-07-21 18:17:49 +0000
2690@@ -6,24 +6,27 @@
2691 tal:define="
2692 series context;
2693 is_focus context/is_development_focus;
2694- bug_count_status view/bugtask_status_counts;
2695 spec_count_status view/specification_status_counts;"
2696 >
2697 <metal:series use-macro="series/@@+macros/detailed_display">
2698 <div metal:fill-slot="extra">
2699 <div>
2700- Bugs targeted:
2701- <tal:statuses repeat="count_status bug_count_status">
2702- <span tal:attributes="class string:status${count_status/status/name}">
2703- <strong tal:content="count_status/count">2</strong>
2704- <tal:status replace="count_status/status/title" /><tal:comma
2705- condition="not: repeat/count_status/end">,</tal:comma>
2706- </span>
2707- </tal:statuses>
2708- <tal:no-statuses condition="not: bug_count_status">
2709- None
2710- </tal:no-statuses>
2711- <br />
2712+ <tal:not-obsolete
2713+ condition="not: view/is_obsolete"
2714+ define="bug_count_status view/bugtask_status_counts;">
2715+ Bugs targeted:
2716+ <tal:statuses repeat="count_status bug_count_status">
2717+ <span tal:attributes="class string:status${count_status/status/name}">
2718+ <strong tal:content="count_status/count">2</strong>
2719+ <tal:status replace="count_status/status/title" /><tal:comma
2720+ condition="not: repeat/count_status/end">,</tal:comma>
2721+ </span>
2722+ </tal:statuses>
2723+ <tal:no-statuses condition="not: bug_count_status">
2724+ None
2725+ </tal:no-statuses>
2726+ <br />
2727+ </tal:not-obsolete>
2728 Blueprints targeted:
2729 <tal:statuses repeat="count_status spec_count_status">
2730 <span tal:attributes="class string:specdelivery${count_status/status/name}">
2731
2732=== modified file 'lib/lp/registry/vocabularies.py'
2733--- lib/lp/registry/vocabularies.py 2009-07-18 00:05:49 +0000
2734+++ lib/lp/registry/vocabularies.py 2009-07-23 17:41:28 +0000
2735@@ -422,9 +422,13 @@
2736 """The storm store."""
2737 return getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR)
2738
2739- @property
2740- def _private_team_query(self):
2741- """Return query for private teams the logged in user belongs to."""
2742+ def _privateTeamQueryAndTables(self):
2743+ """Return query tables for private teams.
2744+
2745+ The teams are based on membership by the user.
2746+ Returns a tuple of (query, tables).
2747+ """
2748+ tables = []
2749 logged_in_user = getUtility(ILaunchBag).user
2750 if logged_in_user is not None:
2751 celebrities = getUtility(ILaunchpadCelebrities)
2752@@ -433,22 +437,22 @@
2753 # visible.
2754 private_query = AND(
2755 Not(Person.teamowner == None),
2756- Person.visibility == PersonVisibility.PRIVATE
2757- )
2758+ Person.visibility == PersonVisibility.PRIVATE)
2759 else:
2760 private_query = AND(
2761 TeamParticipation.person == logged_in_user.id,
2762 Not(Person.teamowner == None),
2763- Person.visibility == PersonVisibility.PRIVATE
2764- )
2765+ Person.visibility == PersonVisibility.PRIVATE)
2766+ tables = [Join(TeamParticipation,
2767+ TeamParticipation.teamID == Person.id)]
2768 else:
2769 private_query = False
2770- return private_query
2771+ return (private_query, tables)
2772
2773 def _doSearch(self, text=""):
2774 """Return the people/teams whose fti or email address match :text:"""
2775
2776- logged_in_user = getUtility(ILaunchBag).user
2777+ private_query, private_tables = self._privateTeamQueryAndTables()
2778 exact_match = None
2779
2780 # Short circuit if there is no search text - all valid people and
2781@@ -458,14 +462,13 @@
2782 Person,
2783 Join(self.cache_table_name,
2784 SQL("%s.id = Person.id" % self.cache_table_name)),
2785- Join(TeamParticipation,
2786- TeamParticipation.teamID == Person.id),
2787 ]
2788+ tables.extend(private_tables)
2789 result = self.store.using(*tables).find(
2790 Person,
2791 And(
2792 Or(Person.visibility == PersonVisibility.PUBLIC,
2793- self._private_team_query,
2794+ private_query,
2795 ),
2796 self.extra_clause
2797 )
2798@@ -549,11 +552,9 @@
2799 public_result.order_by()
2800
2801 # Next search for the private teams.
2802- private_tables = [
2803- Person,
2804- Join(TeamParticipation,
2805- TeamParticipation.teamID == Person.id),
2806- ]
2807+ private_query, private_tables = self._privateTeamQueryAndTables()
2808+ private_tables = [Person] + private_tables
2809+
2810 # Searching for private teams that match can be easier since we
2811 # are only interested in teams. Teams can have email addresses
2812 # but we're electing to ignore them here.
2813@@ -567,7 +568,7 @@
2814 Person,
2815 And(
2816 Person.id.is_in(private_inner_select),
2817- self._private_team_query,
2818+ private_query,
2819 )
2820 )
2821
2822@@ -642,16 +643,13 @@
2823 def _doSearch(self, text=""):
2824 """Return the teams whose fti, IRC, or email address match :text:"""
2825
2826+ private_query, private_tables = self._privateTeamQueryAndTables()
2827 base_query = Or(
2828 Person.visibility == PersonVisibility.PUBLIC,
2829- self._private_team_query,
2830+ private_query,
2831 )
2832
2833- tables = [
2834- Person,
2835- LeftJoin(TeamParticipation,
2836- TeamParticipation.teamID == Person.id),
2837- ]
2838+ tables = [Person] + private_tables
2839
2840 if not text:
2841 query = And(base_query,
2842@@ -660,19 +658,21 @@
2843 else:
2844 name_match_query = SQL("Person.fti @@ ftq(%s)" % quote(text))
2845
2846- email_match_query = And(
2847- EmailAddress.person == Person.id,
2848- StartsWith(Lower(EmailAddress.email), text),
2849- )
2850-
2851- tables.append(EmailAddress)
2852-
2853- query = And(base_query,
2854- self.extra_clause,
2855- Or(name_match_query, email_match_query),
2856- )
2857+ email_storm_query = self.store.find(
2858+ EmailAddress.personID,
2859+ StartsWith(Lower(EmailAddress.email), text))
2860+ email_subquery = Alias(email_storm_query._get_select(),
2861+ 'EmailAddress')
2862+ tables += [
2863+ LeftJoin(email_subquery, EmailAddress.person == Person.id),
2864+ ]
2865+
2866 result = self.store.using(*tables).find(
2867- Person, query)
2868+ Person,
2869+ And(base_query,
2870+ self.extra_clause,
2871+ Or(name_match_query,
2872+ EmailAddress.person != None)))
2873
2874 # XXX: BradCrittenden 2009-05-07 bug=373228: A bug in Storm prevents
2875 # setting the 'distinct' and 'limit' options in a single call to
2876
2877=== modified file 'lib/lp/services/inlinehelp/javascript/inlinehelp.js'
2878--- lib/lp/services/inlinehelp/javascript/inlinehelp.js 2009-06-30 21:06:27 +0000
2879+++ lib/lp/services/inlinehelp/javascript/inlinehelp.js 2009-07-20 23:17:14 +0000
2880@@ -24,8 +24,10 @@
2881 page elements.
2882 */
2883 // The button is inserted in the page dynamically:
2884+ // Changed from an <input type=button> to a <button> since
2885+ // IE8 doesn't handle style.css's input{visibility:inherit} correctly.
2886 $('help-close').innerHTML =
2887- '<input id="help-close-btn" type="button" value="Continue">';
2888+ '<button id="help-close-btn" value="Continue">';
2889 forEach(findHelpLinks(), setupHelpTrigger);
2890 initHelpPane();
2891 }
2892
2893=== modified file 'lib/lp/soyuz/browser/queue.py'
2894--- lib/lp/soyuz/browser/queue.py 2009-07-17 00:26:05 +0000
2895+++ lib/lp/soyuz/browser/queue.py 2009-07-19 04:41:14 +0000
2896@@ -186,7 +186,12 @@
2897 if len(uploads) == 0:
2898 return None
2899
2900- upload_ids = [upload.id for upload in uploads]
2901+ # Operate only on upload and/or processed delayed-copies.
2902+ upload_ids = [
2903+ upload.id
2904+ for upload in uploads
2905+ if not (upload.is_delayed_copy and
2906+ upload.status != PackageUploadStatus.DONE)]
2907 binary_file_set = getUtility(IBinaryPackageFileSet)
2908 binary_files = binary_file_set.getByPackageUploadIDs(upload_ids)
2909 source_file_set = getUtility(ISourcePackageReleaseFileSet)
2910@@ -456,3 +461,22 @@
2911 self.sourcepackagerelease = self.sources[0].sourcepackagerelease
2912 else:
2913 self.sourcepackagerelease = None
2914+
2915+ @property
2916+ def pending_delayed_copy(self):
2917+ """Whether the context is a delayed-copy pending processing."""
2918+ return (
2919+ self.is_delayed_copy and self.status != PackageUploadStatus.DONE)
2920+
2921+ @property
2922+ def changesfile(self):
2923+ """Return the upload changesfile object, even for delayed-copies.
2924+
2925+ If the context `PackageUpload` is a delayed-copy, which doesn't
2926+ have '.changesfile' by design, return the changesfile originally
2927+ used to upload the contained source.
2928+ """
2929+ if self.is_delayed_copy:
2930+ return self.sources[0].sourcepackagerelease.upload_changesfile
2931+ return self.context.changesfile
2932+
2933
2934=== modified file 'lib/lp/soyuz/browser/tests/archive-views.txt'
2935--- lib/lp/soyuz/browser/tests/archive-views.txt 2009-07-17 13:11:44 +0000
2936+++ lib/lp/soyuz/browser/tests/archive-views.txt 2009-07-18 01:03:09 +0000
2937@@ -907,11 +907,11 @@
2938 WidgetInputError: ('destination_archive', u'Destination PPA', )
2939
2940
2941-=== Copy privacy mismatch ===
2942+=== Copy private files to public archives ===
2943
2944-Users are only allowed to copy private sources into private PPAs,
2945-otherwise builders won't be able to retrieve the files for
2946-building. See `testCopyFromPrivateToPublicPPAs` for more information.
2947+Users are allowed to copy private sources into private PPAs, however
2948+it happens via 'delayed-copies' not the usual direct copying method.
2949+See more information in scripts/packagecopier.py
2950
2951 First we will make Celso's PPA private.
2952
2953@@ -954,14 +954,24 @@
2954 ... 'field.actions.copy': 'Copy',
2955 ... })
2956
2957-The action cannot be performed due to the 'private mismatch'
2958-error. Nothing was copied to Ubuntu-team PPA.
2959+ >>> len(view.errors)
2960+ 0
2961+
2962+The action is performed as a delayed-copy, and the user is informed of
2963+it via a page notification.
2964
2965 >>> from canonical.launchpad.testing.pages import extract_text
2966- >>> for error in view.errors:
2967- ... print extract_text(error)
2968- The following source cannot be copied:
2969- private 1.0 in hoary (cannot copy private files into public archives)
2970-
2971- >>> ubuntu_team_ppa.getPublishedSources().count()
2972- 0
2973+ >>> for notification in view.request.response.notifications:
2974+ ... print extract_text(notification.message)
2975+ Packages copied to PPA for Ubuntu Team:
2976+ Delayed copy of private - 1.0 (source)
2977+
2978+The delayed-copy request is waiting to be processed in the ACCEPTED
2979+upload queue.
2980+
2981+ >>> from lp.soyuz.interfaces.queue import IPackageUploadSet
2982+ >>> copy = getUtility(IPackageUploadSet).findSourceUpload(
2983+ ... 'private', '1.0', ubuntu_team_ppa, ubuntu)
2984+
2985+ >>> print copy.status.name
2986+ ACCEPTED
2987
2988=== modified file 'lib/lp/soyuz/doc/archive.txt'
2989--- lib/lp/soyuz/doc/archive.txt 2009-07-11 14:46:40 +0000
2990+++ lib/lp/soyuz/doc/archive.txt 2009-07-20 19:13:31 +0000
2991@@ -2065,6 +2065,42 @@
2992 >>> cprov_archive.buildd_secret = ''
2993 >>> cprov_archive.private = False
2994
2995+Another important aspect of the upload permission for ubuntu main
2996+archives (PRIMARY, PARTNER and DEBUG) is that in addition to owners
2997+and users which were specifically granted permissions, members of the
2998+ubuntu-security' team also have 'launchpad.Append' on them.
2999+
3000+In the sampledata, Carlos does not have permission to append contents
3001+to the Ubuntu main archives.
3002+
3003+ >>> primary, partner, debug = ubuntu.all_distro_archives
3004+
3005+ >>> login('carlos@canonical.com')
3006+ >>> check_permission('launchpad.Append', primary)
3007+ False
3008+ >>> check_permission('launchpad.Append', partner)
3009+ False
3010+ >>> check_permission('launchpad.Append', debug)
3011+ False
3012+
3013+When Carlos becomes a member of the 'ubuntu-security' team he is
3014+allowed to append to ubuntu main archives. In practice it means that
3015+Carlos can now *copy* packages directly to ubuntu.
3016+
3017+ # Make Carlos a member of the ubuntu-security team.
3018+ >>> login('foo.bar@canonical.com')
3019+ >>> ubuntu_security = getUtility(IPersonSet).getByName(
3020+ ... 'ubuntu-security')
3021+ >>> ubuntu_security.addMember(carlos, cprov)
3022+
3023+ >>> login('carlos@canonical.com')
3024+ >>> check_permission('launchpad.Append', primary)
3025+ True
3026+ >>> check_permission('launchpad.Append', partner)
3027+ True
3028+ >>> check_permission('launchpad.Append', debug)
3029+ True
3030+
3031
3032 == Rebuild archives ==
3033
3034@@ -2080,6 +2116,7 @@
3035 Creating new COPY archive without passing a name results in an
3036 AssertionError.
3037
3038+ >>> login('foo.bar@canonical.com')
3039 >>> rebuild_archive = getUtility(IArchiveSet).new(
3040 ... owner=cprov, purpose=ArchivePurpose.COPY,
3041 ... distribution=ubuntutest)
3042@@ -2227,7 +2264,6 @@
3043 ...
3044 DistroSeriesNotFound: badseries
3045
3046-
3047 We can also specify a single source to be copied with the `syncSource`
3048 call. This allows a version to be specified so older versions can be
3049 pulled.
3050@@ -2312,6 +2348,38 @@
3051 ...
3052 CannotCopy: Destination pocket must be 'release' for a PPA.
3053
3054+syncSource() will always use only the latest publication of the
3055+specific source, ignoring the previous ones. Multiple publications can
3056+be resulted from copies and/or overrides of the copy candidates in the
3057+source archive.
3058+
3059+ # Create a copy candidate (override_1.0) in ubuntu primary archive
3060+ # and override its section. Resulting in 2 publications in the
3061+ # source archive.
3062+ >>> from lp.soyuz.interfaces.section import ISectionSet
3063+ >>> source_old = test_publisher.getPubSource(
3064+ ... sourcename="overridden", version="1.0")
3065+ >>> python_section = getUtility(ISectionSet).ensure('python')
3066+ >>> copy_candidate = source_old.changeOverride(new_section=python_section)
3067+
3068+ >>> source_archive = copy_candidate.archive
3069+ >>> source_archive.getPublishedSources(name="overridden").count()
3070+ 2
3071+
3072+ >>> print copy_candidate.section.name
3073+ python
3074+
3075+When syncing 'overridden_1.0' to Mark's PPA, the latest publication,
3076+the one published in 'python' section, will be used.
3077+
3078+ >>> sabdfl.archive.syncSource(
3079+ ... source_name='overridden', version='1.0',
3080+ ... from_archive=source_archive, to_pocket='release')
3081+
3082+ >>> [copy] = sabdfl.archive.getPublishedSources(name="overridden")
3083+ >>> print copy.section.name
3084+ python
3085+
3086
3087 == Publish flag ==
3088
3089
3090=== modified file 'lib/lp/soyuz/doc/closing-bugs-from-changelogs.txt'
3091--- lib/lp/soyuz/doc/closing-bugs-from-changelogs.txt 2009-05-06 20:53:05 +0000
3092+++ lib/lp/soyuz/doc/closing-bugs-from-changelogs.txt 2009-07-16 00:00:27 +0000
3093@@ -49,18 +49,18 @@
3094 Launchpad-bugs-fixed header. This is required so that we have some data
3095 for close_bugs to operate on.
3096
3097- >>> from canonical.launchpad.interfaces import (
3098- ... PackagePublishingPocket, PackageUploadStatus)
3099-
3100+ >>> from canonical.launchpad.interfaces import PackagePublishingPocket
3101 >>> def add_package_upload(
3102 ... source_release, fixing_text,
3103 ... pocket=PackagePublishingPocket.RELEASE,
3104- ... archive=None):
3105+ ... archive=None, distroseries=None):
3106 ... """Create a PackageUpload record."""
3107 ... changes = changes_template % fixing_text
3108+ ... if distroseries is None:
3109+ ... distroseries = ubuntu_hoary
3110 ... if archive is None:
3111- ... archive = ubuntu_hoary.main_archive
3112- ... queue_item = ubuntu_hoary.createQueueEntry(
3113+ ... archive = distroseries.main_archive
3114+ ... queue_item = distroseries.createQueueEntry(
3115 ... archive=archive,
3116 ... pocket=pocket,
3117 ... changesfilename='%s.changes' % source_release.name,
3118@@ -68,7 +68,6 @@
3119 ... source_queue = queue_item.addSource(source_release)
3120 ... return queue_item
3121
3122-
3123 Throughout this document we'll now create PackageUpload records for various
3124 packages in the sample data (e.g. pmount, cdrkit) using this helper function.
3125
3126@@ -215,7 +214,7 @@
3127 >>> cdrkit_bug_id = cdrkit_ubuntu.createBug(bug_params).id
3128
3129 >>> queue_item_id = add_package_upload(
3130- ... cdrkit_release, cdrkit_bug_id,
3131+ ... cdrkit_release, cdrkit_bug_id,
3132 ... pocket=PackagePublishingPocket.PROPOSED).id
3133
3134 >>> close_bugs_and_check_status([cdrkit_bug_id], queue_item_id)
3135@@ -226,7 +225,7 @@
3136 #295621).
3137
3138 >>> queue_item_id = add_package_upload(
3139- ... cdrkit_release, cdrkit_bug_id,
3140+ ... cdrkit_release, cdrkit_bug_id,
3141 ... pocket=PackagePublishingPocket.BACKPORTS).id
3142
3143 >>> close_bugs_and_check_status([cdrkit_bug_id], queue_item_id)
3144@@ -246,6 +245,28 @@
3145 Before: NEW
3146 After: NEW
3147
3148+Delayed-copies to allowed archives and pockets will close bugs when
3149+processed.
3150+
3151+ # Create the cdrkit 'original upload' in ubuntu/breezy-autotest
3152+ # with an appropriate 'changelog_entry'.
3153+ >>> original_upload = add_package_upload(
3154+ ... cdrkit_release, cdrkit_bug_id,
3155+ ... distroseries=cdrkit_release.upload_distroseries)
3156+ >>> from zope.security.proxy import removeSecurityProxy
3157+ >>> removeSecurityProxy(cdrkit_release).changelog_entry = 'Something!'
3158+
3159+ # Create a delayed-copy for cdrkit in ubuntu/hoary.
3160+ >>> from lp.soyuz.interfaces.queue import IPackageUploadSet
3161+ >>> delayed_copy = getUtility(IPackageUploadSet).createDelayedCopy(
3162+ ... archive=ubuntu.main_archive, distroseries=ubuntu_hoary,
3163+ ... pocket=PackagePublishingPocket.RELEASE, signing_key=None)
3164+ >>> unused = delayed_copy.addSource(cdrkit_release)
3165+
3166+ >>> close_bugs_and_check_status([cdrkit_bug_id], delayed_copy.id)
3167+ Before: NEW
3168+ After: FIXRELEASED
3169+
3170 It's possible to specify more than one bug in the Launchpad-bugs-fixed
3171 header, each will be marked as Fix Released. If a nonexistent bug,
3172 '666', is specified, it's ignored.
3173
3174=== modified file 'lib/lp/soyuz/doc/distroseriesqueue.txt'
3175--- lib/lp/soyuz/doc/distroseriesqueue.txt 2009-07-16 03:31:45 +0000
3176+++ lib/lp/soyuz/doc/distroseriesqueue.txt 2009-07-16 15:06:35 +0000
3177@@ -1093,16 +1093,16 @@
3178
3179 >>> unused = delayed_copy.addSource(a_source_release)
3180
3181-IPackageUpload.acceptFromCopy() checks and accepts a delayed-copy
3182-record. It also closes related bug reports and grant karma to people
3183-related with the upload, although it doesn't send any emails.
3184+IPackageUpload.acceptFromCopy() simply checks and accepts a
3185+delayed-copy record. Bugs mentioned in the changelog are closed by
3186+`process-accepted` (transition to ACCEPTED to DONE) see
3187+closing-bugs-from-changelogs.txt for more information.
3188+
3189+ >>> print delayed_copy.status.name
3190+ NEW
3191
3192 >>> delayed_copy.acceptFromCopy()
3193
3194- >>> transaction.commit()
3195- >>> pop_notifications()
3196- []
3197-
3198 >>> print delayed_copy.status.name
3199 ACCEPTED
3200
3201
3202=== modified file 'lib/lp/soyuz/interfaces/queue.py'
3203--- lib/lp/soyuz/interfaces/queue.py 2009-07-17 00:26:05 +0000
3204+++ lib/lp/soyuz/interfaces/queue.py 2009-07-19 04:41:14 +0000
3205@@ -274,19 +274,25 @@
3206 * Publish and close bugs for 'single-source' uploads.
3207 * Skip bug-closing for PPA uploads.
3208 * Grant karma to people involved with the upload.
3209+
3210+ :raises: AssertionError if the context is a delayed-copy.
3211 """
3212
3213 def acceptFromCopy():
3214 """Perform upload acceptance for a delayed-copy record.
3215
3216 * Move the upload to accepted queue in all cases.
3217- * Close bugs for uploaded sources (skip imported ones).
3218+
3219+ :raises: AssertionError if the context is not a delayed-copy or
3220+ has no sources associated to it.
3221 """
3222
3223 def acceptFromQueue(announce_list, logger=None, dry_run=False):
3224 """Call setAccepted, do a syncUpdate, and send notification email.
3225
3226 * Grant karma to people involved with the upload.
3227+
3228+ :raises: AssertionError if the context is a delayed-copy.
3229 """
3230
3231 def rejectFromQueue(logger=None, dry_run=False):
3232
3233=== modified file 'lib/lp/soyuz/model/archive.py'
3234--- lib/lp/soyuz/model/archive.py 2009-07-18 21:19:37 +0000
3235+++ lib/lp/soyuz/model/archive.py 2009-07-20 18:05:13 +0000
3236@@ -1082,9 +1082,9 @@
3237 raise SourceNotFound(e)
3238
3239 source = from_archive.getPublishedSources(
3240- name=source_name, version=version, exact_match=True)
3241+ name=source_name, version=version, exact_match=True)[0]
3242
3243- self._copySources(source, to_pocket, to_series, include_binaries)
3244+ self._copySources([source], to_pocket, to_series, include_binaries)
3245
3246 def _copySources(self, sources, to_pocket, to_series=None,
3247 include_binaries=False):
3248
3249=== modified file 'lib/lp/soyuz/model/archivepermission.py'
3250--- lib/lp/soyuz/model/archivepermission.py 2009-07-17 00:26:05 +0000
3251+++ lib/lp/soyuz/model/archivepermission.py 2009-07-21 08:27:00 +0000
3252@@ -121,10 +121,8 @@
3253 clauses = ["""
3254 ArchivePermission.archive = %s AND
3255 ArchivePermission.permission = %s AND
3256- EXISTS (SELECT TeamParticipation.person
3257- FROM TeamParticipation
3258- WHERE TeamParticipation.person = %s AND
3259- TeamParticipation.team = ArchivePermission.person)
3260+ ArchivePermission.person = TeamParticipation.team AND
3261+ TeamParticipation.person = %s
3262 """ % sqlvalues(archive, permission, person)
3263 ]
3264
3265@@ -149,7 +147,7 @@
3266
3267 query = " AND ".join(clauses)
3268 auth = ArchivePermission.select(
3269- query, clauseTables=["TeamParticipation"], distinct=True,
3270+ query, clauseTables=["TeamParticipation"],
3271 prejoins=prejoins)
3272
3273 return auth
3274@@ -337,11 +335,11 @@
3275 SELECT ap.id
3276 FROM archivepermission ap, teamparticipation tp
3277 WHERE
3278- (ap.person = ? OR (ap.person = tp.team AND tp.person = ?))
3279+ ap.person = tp.team AND tp.person = ?
3280 AND ap.archive = ?
3281 AND ap.packageset IS NOT NULL
3282 '''
3283- query = SQL(query, (person.id, person.id, archive.id))
3284+ query = SQL(query, (person.id, archive.id))
3285 return store.find(ArchivePermission, In(ArchivePermission.id, query))
3286
3287 def uploadersForPackageset(
3288@@ -375,10 +373,10 @@
3289 SELECT ap.id
3290 FROM archivepermission ap, teamparticipation tp
3291 WHERE
3292- (ap.person = ? OR (ap.person = tp.team AND tp.person = ?))
3293+ ap.person = tp.team AND tp.person = ?
3294 AND ap.packageset = ? AND ap.archive = ?
3295 '''
3296- query = SQL(query, (person.id, person.id, packageset.id, archive.id))
3297+ query = SQL(query, (person.id, packageset.id, archive.id))
3298 permissions = list(
3299 store.find(ArchivePermission, In(ArchivePermission.id, query)))
3300 if len(permissions) > 0:
3301@@ -444,14 +442,14 @@
3302 archivepermission ap, teamparticipation tp,
3303 packagesetsources pss, flatpackagesetinclusion fpsi
3304 WHERE
3305- (ap.person = ? OR (ap.person = tp.team AND tp.person = ?))
3306+ ap.person = tp.team AND tp.person = ?
3307 AND ap.packageset = fpsi.parent
3308 AND pss.packageset = fpsi.child
3309 AND pss.sourcepackagename = ?
3310 AND ap.archive = ?
3311 '''
3312 query = SQL(
3313- query, (person.id, person.id, sourcepackagename.id, archive.id))
3314+ query, (person.id, sourcepackagename.id, archive.id))
3315 return store.find(ArchivePermission, In(ArchivePermission.id, query))
3316
3317 def packagesetsForSource(
3318@@ -491,9 +489,9 @@
3319 # Query parameters for the first WHERE clause.
3320 (archive.id, sourcepackagename.id) +
3321 # Query parameters for the second WHERE clause.
3322- (sourcepackagename.id,) + (person.id,)*2 + archive_params +
3323+ (sourcepackagename.id,) + (person.id,) + archive_params +
3324 # Query parameters for the third WHERE clause.
3325- (sourcepackagename.id,) + (person.id,)*2 + archive_params)
3326+ (sourcepackagename.id,) + (person.id,) + archive_params)
3327
3328 query = '''
3329 SELECT CASE
3330@@ -511,7 +509,7 @@
3331 teamparticipation tp
3332 WHERE
3333 pss.sourcepackagename = %s
3334- AND (ap.person = %s OR (ap.person = tp.team AND tp.person = %s))
3335+ AND ap.person = tp.team AND tp.person = %s
3336 AND pss.packageset = ap.packageset AND ap.explicit = TRUE
3337 AND ap.permission = %s AND ap.archive = %s)
3338 ELSE (
3339@@ -521,7 +519,7 @@
3340 teamparticipation tp, flatpackagesetinclusion fpsi
3341 WHERE
3342 pss.sourcepackagename = %s
3343- AND (ap.person = %s OR (ap.person = tp.team AND tp.person = %s))
3344+ AND ap.person = tp.team AND tp.person = %s
3345 AND pss.packageset = fpsi.child AND fpsi.parent = ap.packageset
3346 AND ap.permission = %s AND ap.archive = %s)
3347 END AS number_of_permitted_package_sets;
3348
3349=== modified file 'lib/lp/soyuz/model/queue.py'
3350--- lib/lp/soyuz/model/queue.py 2009-07-17 00:26:05 +0000
3351+++ lib/lp/soyuz/model/queue.py 2009-07-19 04:41:14 +0000
3352@@ -57,7 +57,8 @@
3353 PackageUploadStatus, PackageUploadCustomFormat)
3354 from lp.registry.interfaces.person import IPersonSet
3355 from lp.soyuz.interfaces.publishing import (
3356- PackagePublishingPocket, PackagePublishingStatus, pocketsuffix)
3357+ ISourcePackagePublishingHistory, PackagePublishingPocket,
3358+ PackagePublishingStatus, pocketsuffix)
3359 from lp.soyuz.interfaces.queue import (
3360 IPackageUpload, IPackageUploadBuild, IPackageUploadCustom,
3361 IPackageUploadQueue, IPackageUploadSource, IPackageUploadSet,
3362@@ -368,21 +369,8 @@
3363 assert self.is_delayed_copy, 'Can only process delayed-copies.'
3364 assert self.sources.count() == 1, (
3365 'Source is mandatory for delayed copies.')
3366-
3367 self.setAccepted()
3368
3369- # XXX cprov 2009-06-22 bug=390851: self.sourcepackagerelease
3370- # is cached, we cannot rely on it.
3371- sourcepackagerelease = self.sources[0].sourcepackagerelease
3372-
3373- # Close bugs if possible, skip imported sources.
3374- original_changesfile = sourcepackagerelease.upload_changesfile
3375- if original_changesfile is not None:
3376- changesfile_object = StringIO.StringIO(
3377- original_changesfile.read())
3378- close_bugs_for_queue_item(
3379- self, changesfile_object=changesfile_object)
3380-
3381 def rejectFromQueue(self, logger=None, dry_run=False):
3382 """See `IPackageUpload`."""
3383 self.setRejected()
3384@@ -538,6 +526,11 @@
3385 for new_file in update_files_privacy(pub_record):
3386 debug(logger,
3387 "Re-uploaded %s to librarian" % new_file.filename)
3388+ if ISourcePackagePublishingHistory.providedBy(pub_record):
3389+ pas_verify = BuildDaemonPackagesArchSpecific(
3390+ config.builddmaster.root, self.distroseries)
3391+ pub_record.createMissingBuilds(
3392+ pas_verify=pas_verify, logger=logger)
3393
3394 self.setDone()
3395
3396
3397=== modified file 'lib/lp/soyuz/model/sourcepackagerelease.py'
3398--- lib/lp/soyuz/model/sourcepackagerelease.py 2009-07-17 00:26:05 +0000
3399+++ lib/lp/soyuz/model/sourcepackagerelease.py 2009-07-20 15:05:42 +0000
3400@@ -35,7 +35,6 @@
3401 from canonical.launchpad.interfaces.launchpad import ILaunchpadCelebrities
3402 from lp.translations.interfaces.translationimportqueue import (
3403 ITranslationImportQueue)
3404-from canonical.librarian.interfaces import ILibrarianClient
3405 from canonical.launchpad.webapp.interfaces import NotFoundError
3406 from lp.soyuz.interfaces.archive import (
3407 ArchivePurpose, IArchiveSet, MAIN_ARCHIVE_PURPOSES)
3408@@ -574,12 +573,9 @@
3409 return change
3410
3411 def attachTranslationFiles(self, tarball_alias, is_published,
3412- importer=None):
3413+ importer=None):
3414 """See ISourcePackageRelease."""
3415- client = getUtility(ILibrarianClient)
3416-
3417- tarball_file = client.getFileByAlias(tarball_alias.id)
3418- tarball = tarball_file.read()
3419+ tarball = tarball_alias.read()
3420
3421 if importer is None:
3422 importer = getUtility(ILaunchpadCelebrities).rosetta_experts
3423
3424=== modified file 'lib/lp/soyuz/scripts/packagecopier.py'
3425--- lib/lp/soyuz/scripts/packagecopier.py 2009-07-17 00:26:05 +0000
3426+++ lib/lp/soyuz/scripts/packagecopier.py 2009-07-19 04:41:14 +0000
3427@@ -30,11 +30,13 @@
3428 build_package_location)
3429 from lp.soyuz.interfaces.archive import (
3430 ArchivePurpose, CannotCopy)
3431-from lp.soyuz.interfaces.build import BuildSetStatus
3432+from lp.soyuz.interfaces.build import (
3433+ BuildStatus, BuildSetStatus)
3434 from lp.soyuz.interfaces.publishing import (
3435 IBinaryPackagePublishingHistory, ISourcePackagePublishingHistory,
3436 active_publishing_status)
3437-from lp.soyuz.interfaces.queue import IPackageUploadSet
3438+from lp.soyuz.interfaces.queue import (
3439+ IPackageUpload, IPackageUploadSet)
3440 from lp.soyuz.scripts.ftpmasterbase import (
3441 SoyuzScript, SoyuzScriptError)
3442 from lp.soyuz.scripts.processaccepted import (
3443@@ -74,8 +76,8 @@
3444
3445 return new_lfa
3446
3447-# XXX cprov 2009-06-12: These two functions could be incorporated in
3448-# ISPPH and BPPH. I just don't see a clear benefit in doing that right now.
3449+# XXX cprov 2009-06-12: this function should be incorporated in
3450+# IPublishing.
3451 def update_files_privacy(pub_record):
3452 """Update file privacy according the publishing detination
3453
3454@@ -133,23 +135,43 @@
3455 return re_uploaded_files
3456
3457
3458+# XXX cprov 2009-07-01: should be part of `ISourcePackagePublishingHistory`.
3459+def has_restricted_files(source):
3460+ """Whether or not a given source files has restricted files."""
3461+ for source_file in source.sourcepackagerelease.files:
3462+ if source_file.libraryfile.restricted:
3463+ return True
3464+
3465+ for binary in source.getBuiltBinaries():
3466+ for binary_file in binary.binarypackagerelease.files:
3467+ if binary_file.libraryfile.restricted:
3468+ return True
3469+
3470+ return False
3471+
3472+
3473 class CheckedCopy:
3474 """Representation of a copy that was checked and approved.
3475
3476 Decorates `ISourcePackagePublishingHistory`, tweaking
3477 `getStatusSummaryForBuilds` to return `BuildSetStatus.NEEDSBUILD`
3478 for source-only copies.
3479+
3480+ It also store the 'delayed' boolean, which controls the way this source
3481+ should be copied to the destionation archive (see `_do_delayed_copy` and
3482+ `_do_direct_copy`)
3483 """
3484 delegates(ISourcePackagePublishingHistory)
3485
3486- def __init__(self, context, include_binaries):
3487+ def __init__(self, context, include_binaries, delayed):
3488 self.context = context
3489 self.include_binaries = include_binaries
3490+ self.delayed = delayed
3491
3492 def getStatusSummaryForBuilds(self):
3493 """Always `BuildSetStatus.NEEDSBUILD` for source-only copies."""
3494 if self.include_binaries:
3495- self.context.getStatusSummaryForBuilds()
3496+ return self.context.getStatusSummaryForBuilds()
3497 else:
3498 return {'status': BuildSetStatus.NEEDSBUILD}
3499
3500@@ -160,9 +182,10 @@
3501 Allows the checker function to identify conflicting copy candidates
3502 within the copying batch.
3503 """
3504- def __init__(self, archive, include_binaries):
3505+ def __init__(self, archive, include_binaries, allow_delayed_copies=True):
3506 self.archive = archive
3507 self.include_binaries = include_binaries
3508+ self.allow_delayed_copies = allow_delayed_copies
3509 self._inventory = {}
3510
3511 def _getInventoryKey(self, candidate):
3512@@ -174,11 +197,18 @@
3513 return (
3514 candidate.source_package_name, candidate.source_package_version)
3515
3516- def addCopy(self, source):
3517+ def addCopy(self, source, delayed):
3518 """Story a copy in the inventory as a `CheckedCopy` instance."""
3519 inventory_key = self._getInventoryKey(source)
3520+ checked_copy = CheckedCopy(source, self.include_binaries, delayed)
3521 candidates = self._inventory.setdefault(inventory_key, [])
3522- candidates.append(CheckedCopy(source, self.include_binaries))
3523+ candidates.append(checked_copy)
3524+
3525+ def getCheckedCopies(self):
3526+ """Return a list of copies allowed to be performed."""
3527+ for copies in self._inventory.values():
3528+ for copy in copies:
3529+ yield copy
3530
3531 def getConflicts(self, candidate):
3532 """Conflicting `CheckedCopy` objects in the inventory.
3533@@ -356,34 +386,27 @@
3534 "version older than the %s published in %s" %
3535 (ancestry.displayname, ancestry.distroseries.name))
3536
3537-
3538-def check_privacy_mismatch(source, archive):
3539- """Whether or not source files match the archive privacy.
3540-
3541- Public source files can be copied to any archive, it does not
3542- represent a 'privacy mismatch'.
3543-
3544- On the other hand, private source files can be copied to private
3545- archives where builders will fetch it directly from the repository
3546- and not from the restricted librarian.
3547- """
3548- if archive.private:
3549- return False
3550-
3551- for source_file in source.sourcepackagerelease.files:
3552- if source_file.libraryfile.restricted:
3553- return True
3554-
3555- for binary in source.getBuiltBinaries():
3556- for binary_file in binary.binarypackagerelease.files:
3557- if binary_file.libraryfile.restricted:
3558- return True
3559-
3560- return False
3561+ delayed = (
3562+ self.allow_delayed_copies and
3563+ not self.archive.private and
3564+ has_restricted_files(source))
3565+
3566+ if delayed:
3567+ upload_conflict = getUtility(IPackageUploadSet).findSourceUpload(
3568+ name=source.sourcepackagerelease.name,
3569+ version=source.sourcepackagerelease.version,
3570+ archive=self.archive, distribution=series.distribution)
3571+ if upload_conflict is not None:
3572+ raise CannotCopy(
3573+ 'same version already uploaded and waiting in '
3574+ 'ACCEPTED queue')
3575+
3576+ # Copy is approved, update the copy inventory.
3577+ self.addCopy(source, delayed)
3578
3579
3580 def do_copy(sources, archive, series, pocket, include_binaries=False,
3581- deny_privacy_mismatch=True):
3582+ allow_delayed_copies=True):
3583 """Perform the complete copy of the given sources incrementally.
3584
3585 Verifies if each copy can be performed using `CopyChecker` and
3586@@ -402,9 +425,9 @@
3587 :param: include_binaries: optional boolean, controls whether or
3588 not the published binaries for each given source should be also
3589 copied along with the source.
3590- :param deny_privacy_mismatch: boolean indicating whether or not private
3591- sources can be copied to public archives. Defaults to True, only
3592- set as False in the UnembargoPackage context.
3593+ :param allow_delayed_copies: boolean indicating whether or not private
3594+ sources can be copied to public archives using delayed_copies.
3595+ Defaults to True, only set as False in the UnembargoPackage context.
3596
3597 :raise CannotCopy when one or more copies were not allowed. The error
3598 will contain the reason why each copy was denied.
3599@@ -415,42 +438,35 @@
3600 """
3601 copies = []
3602 errors = []
3603- copy_checker = CopyChecker(archive, include_binaries)
3604+ copy_checker = CopyChecker(
3605+ archive, include_binaries, allow_delayed_copies)
3606
3607 for source in sources:
3608 if series is None:
3609 destination_series = source.distroseries
3610 else:
3611 destination_series = series
3612-
3613 try:
3614 copy_checker.checkCopy(source, destination_series, pocket)
3615 except CannotCopy, reason:
3616 errors.append("%s (%s)" % (source.displayname, reason))
3617 continue
3618
3619- # For now, deny copies implying in file privacy mismatch.
3620- if (deny_privacy_mismatch and
3621- check_privacy_mismatch(source, archive)):
3622- errors.append(
3623- "%s (cannot copy private files into public archives)" %
3624- source.displayname)
3625- continue
3626-
3627- # Update the copy inventory.
3628- copy_checker.addCopy(source)
3629-
3630 if len(errors) != 0:
3631 raise CannotCopy("\n".join(errors))
3632
3633- for source in sources:
3634+ for source in copy_checker.getCheckedCopies():
3635 if series is None:
3636 destination_series = source.distroseries
3637 else:
3638 destination_series = series
3639-
3640- sub_copies = _do_direct_copy(
3641- source, archive, destination_series, pocket, include_binaries)
3642+ if source.delayed:
3643+ delayed_copy = _do_delayed_copy(
3644+ source, archive, destination_series, pocket, include_binaries)
3645+ sub_copies = [delayed_copy]
3646+ else:
3647+ sub_copies = _do_direct_copy(
3648+ source, archive, destination_series, pocket, include_binaries)
3649
3650 copies.extend(sub_copies)
3651
3652@@ -534,6 +550,21 @@
3653 return copies
3654
3655
3656+class DelayedCopy:
3657+ """Decorates `IPackageUpload` with a more descriptive 'displayname'."""
3658+
3659+ delegates(IPackageUpload)
3660+
3661+ def __init__(self, context):
3662+ self.context = context
3663+
3664+ @property
3665+ def displayname(self):
3666+ return 'Delayed copy of %s (%s)' % (
3667+ self.context.sourcepackagerelease.title,
3668+ self.context.displayarchs)
3669+
3670+
3671 def _do_delayed_copy(source, archive, series, pocket, include_binaries):
3672 """Schedule the given source for copy.
3673
3674@@ -571,6 +602,8 @@
3675 # If binaries are included in the copy we include binary custom files.
3676 if include_binaries:
3677 for build in source.getBuilds():
3678+ if build.buildstate != BuildStatus.FULLYBUILT:
3679+ continue
3680 delayed_copy.addBuild(build)
3681 original_build_upload = build.package_upload
3682 for custom in original_build_upload.customfiles:
3683@@ -586,11 +619,7 @@
3684 # the destination context.
3685 delayed_copy.acceptFromCopy()
3686
3687- # XXX cprov 2009-06-22 bug=390845: `IPackageUpload.displayname`
3688- # implementation is very poor, if we can't fix in place we should
3689- # build a decorated object implemented a more complete 'displayname'
3690- # property.
3691- return delayed_copy
3692+ return DelayedCopy(delayed_copy)
3693
3694
3695 class PackageCopier(SoyuzScript):
3696@@ -609,7 +638,7 @@
3697
3698 usage = '%prog -s warty mozilla-firefox --to-suite hoary'
3699 description = 'MOVE or COPY a published package to another suite.'
3700- deny_privacy_mismatch = True
3701+ allow_delayed_copies = True
3702
3703 def add_my_options(self):
3704
3705@@ -707,11 +736,15 @@
3706 copies = do_copy(
3707 sources, self.destination.archive,
3708 self.destination.distroseries, self.destination.pocket,
3709- self.options.include_binaries, self.deny_privacy_mismatch)
3710+ self.options.include_binaries, self.allow_delayed_copies)
3711 except CannotCopy, error:
3712 self.logger.error(str(error))
3713 return []
3714
3715+ self.logger.info("Copied:")
3716+ for copy in copies:
3717+ self.logger.info('\t%s' % copy.displayname)
3718+
3719 if len(copies) == 1:
3720 self.logger.info(
3721 "%s package successfully copied." % len(copies))
3722@@ -772,7 +805,7 @@
3723 description = ("Unembargo packages in a private PPA by copying to the "
3724 "specified location and re-uploading any files to the "
3725 "unrestricted librarian.")
3726- deny_privacy_mismatch = False
3727+ allow_delayed_copies = False
3728
3729 def add_my_options(self):
3730 """Add -d, -s, dry-run and confirmation options."""
3731
3732=== modified file 'lib/lp/soyuz/scripts/processaccepted.py'
3733--- lib/lp/soyuz/scripts/processaccepted.py 2009-06-25 04:06:00 +0000
3734+++ lib/lp/soyuz/scripts/processaccepted.py 2009-07-19 04:41:14 +0000
3735@@ -20,6 +20,8 @@
3736 from lp.soyuz.interfaces.archive import ArchivePurpose
3737 from lp.soyuz.interfaces.publishing import PackagePublishingPocket
3738 from lp.soyuz.interfaces.queue import IPackageUploadSet
3739+
3740+
3741 def get_bugs_from_changes_file(changes_file):
3742 """Parse the changes file and return a list of bugs referenced by it.
3743
3744@@ -54,6 +56,7 @@
3745 queue_item = getUtility(IPackageUploadSet).get(queue_id)
3746 close_bugs_for_queue_item(queue_item)
3747
3748+
3749 def can_close_bugs(target):
3750 """Whether or not bugs should be closed in the given target.
3751
3752@@ -74,6 +77,7 @@
3753
3754 return True
3755
3756+
3757 def close_bugs_for_queue_item(queue_item, changesfile_object=None):
3758 """Close bugs for a given queue item.
3759
3760@@ -96,12 +100,17 @@
3761 return
3762
3763 if changesfile_object is None:
3764- changesfile_object = queue_item.changesfile
3765+ if queue_item.is_delayed_copy:
3766+ sourcepackagerelease = queue_item.sources[0].sourcepackagerelease
3767+ changesfile_object = sourcepackagerelease.upload_changesfile
3768+ else:
3769+ changesfile_object = queue_item.changesfile
3770
3771 for source_queue_item in queue_item.sources:
3772 close_bugs_for_sourcepackagerelease(
3773 source_queue_item.sourcepackagerelease, changesfile_object)
3774
3775+
3776 def close_bugs_for_sourcepublication(source_publication):
3777 """Close bugs for a given sourcepublication.
3778
3779@@ -121,6 +130,7 @@
3780 close_bugs_for_sourcepackagerelease(
3781 sourcepackagerelease, changesfile_object)
3782
3783+
3784 def close_bugs_for_sourcepackagerelease(source_release, changesfile_object):
3785 """Close bugs for a given source.
3786
3787
3788=== modified file 'lib/lp/soyuz/scripts/tests/test_copypackage.py'
3789--- lib/lp/soyuz/scripts/tests/test_copypackage.py 2009-07-17 00:26:05 +0000
3790+++ lib/lp/soyuz/scripts/tests/test_copypackage.py 2009-07-19 04:41:14 +0000
3791@@ -28,7 +28,8 @@
3792 from lp.soyuz.adapters.packagelocation import PackageLocationError
3793 from lp.soyuz.interfaces.archive import (
3794 ArchivePurpose, CannotCopy)
3795-from lp.soyuz.interfaces.build import BuildStatus
3796+from lp.soyuz.interfaces.build import (
3797+ BuildSetStatus, BuildStatus)
3798 from lp.soyuz.interfaces.component import IComponentSet
3799 from lp.soyuz.interfaces.publishing import (
3800 IBinaryPackagePublishingHistory, ISourcePackagePublishingHistory,
3801@@ -42,7 +43,7 @@
3802 from lp.soyuz.model.processor import ProcessorFamily
3803 from lp.soyuz.scripts.ftpmasterbase import SoyuzScriptError
3804 from lp.soyuz.scripts.packagecopier import (
3805- CopyChecker, _do_delayed_copy, _do_direct_copy, PackageCopier,
3806+ CopyChecker, do_copy, _do_delayed_copy, _do_direct_copy, PackageCopier,
3807 re_upload_file, UnembargoSecurityPackage, update_files_privacy)
3808 from lp.soyuz.tests.test_publishing import SoyuzTestPublisher
3809 from lp.testing import (
3810@@ -387,33 +388,81 @@
3811 class CopyCheckerHarness:
3812 """Basic checks common for all scenarios."""
3813
3814- def assertCanCopySourceOnly(self):
3815- """checkCopy() for source-only copy returns None."""
3816+ def assertCanCopySourceOnly(self, delayed=False):
3817+ """Source-only copy is allowed.
3818+
3819+ Initialise a `CopyChecker` and assert a `checkCopy` call returns
3820+ None (more importantly, doesn't raise `CannotCopy`) in the test
3821+ suite context.
3822+
3823+ Also assert that:
3824+ * 1 'CheckedCopy' was allowed and stored as so.
3825+ * Since it was source-only, the `CheckedCopy` objects is in
3826+ NEEDSBUILD state.
3827+ * Finally check whether is a delayed-copy or not according to the
3828+ given state.
3829+ """
3830 copy_checker = CopyChecker(self.archive, include_binaries=False)
3831 self.assertIs(
3832 None,
3833 copy_checker.checkCopy(self.source, self.series, self.pocket))
3834-
3835- def assertCanCopyBinaries(self):
3836- """checkCopy() for copy including binaries returns None."""
3837+ checked_copies = list(copy_checker.getCheckedCopies())
3838+ self.assertEquals(1, len(checked_copies))
3839+ [checked_copy] = checked_copies
3840+ self.assertEquals(
3841+ BuildSetStatus.NEEDSBUILD,
3842+ checked_copy.getStatusSummaryForBuilds()['status'])
3843+ self.assertEquals(delayed, checked_copy.delayed)
3844+
3845+ def assertCanCopyBinaries(self, delayed=False):
3846+ """Source and binary copy is allowed.
3847+
3848+ Initialise a `CopyChecker` and assert a `checkCopy` call returns
3849+ None (more importantly, doesn't raise `CannotCopy`) in the test
3850+ suite context.
3851+
3852+ Also assert that:
3853+ * 1 'CheckedCopy' was allowed and stored as so.
3854+ * The `CheckedCopy` objects is in FULLYBUILT_PENDING or FULLYBUILT
3855+ status, so there are binaries to be copied.
3856+ * Finally check whether is a delayed-copy or not according to the
3857+ given state.
3858+ """
3859 copy_checker = CopyChecker(self.archive, include_binaries=True)
3860 self.assertIs(
3861 None,
3862 copy_checker.checkCopy(self.source, self.series, self.pocket))
3863+ checked_copies = list(copy_checker.getCheckedCopies())
3864+ self.assertEquals(1, len(checked_copies))
3865+ [checked_copy] = checked_copies
3866+ self.assertTrue(
3867+ checked_copy.getStatusSummaryForBuilds()['status'] >=
3868+ BuildSetStatus.FULLYBUILT_PENDING)
3869+ self.assertEquals(delayed, checked_copy.delayed)
3870
3871 def assertCannotCopySourceOnly(self, msg):
3872- """checkCopy() for source-only copy raises CannotCopy."""
3873+ """`CopyChecker.checkCopy()` for source-only copy raises CannotCopy.
3874+
3875+ No `CheckedCopy` is stored.
3876+ """
3877 copy_checker = CopyChecker(self.archive, include_binaries=False)
3878 self.assertRaisesWithContent(
3879 CannotCopy, msg,
3880 copy_checker.checkCopy, self.source, self.series, self.pocket)
3881+ checked_copies = list(copy_checker.getCheckedCopies())
3882+ self.assertEquals(0, len(checked_copies))
3883
3884 def assertCannotCopyBinaries(self, msg):
3885- """checkCopy() for copy including binaries raises CannotCopy."""
3886+ """`CopyChecker.checkCopy()` including binaries raises CannotCopy.
3887+
3888+ No `CheckedCopy` is stored.
3889+ """
3890 copy_checker = CopyChecker(self.archive, include_binaries=True)
3891 self.assertRaisesWithContent(
3892 CannotCopy, msg,
3893 copy_checker.checkCopy, self.source, self.series, self.pocket)
3894+ checked_copies = list(copy_checker.getCheckedCopies())
3895+ self.assertEquals(0, len(checked_copies))
3896
3897 def test_cannot_copy_binaries_from_building(self):
3898 [build] = self.source.createMissingBuilds()
3899@@ -549,6 +598,32 @@
3900 status=PackagePublishingStatus.PUBLISHED)
3901 self.assertCanCopySourceOnly()
3902
3903+ def switchToAPrivateSource(self):
3904+ """Override the probing source with a private one."""
3905+ private_archive = self.factory.makeArchive(
3906+ distribution=self.test_publisher.ubuntutest,
3907+ purpose=ArchivePurpose.PPA)
3908+ private_archive.buildd_secret = 'x'
3909+ private_archive.private = True
3910+
3911+ self.source = self.test_publisher.getPubSource(
3912+ archive=private_archive)
3913+
3914+ def test_can_copy_only_source_from_private_archives(self):
3915+ # Source-only copies from private archives to public ones
3916+ # are allowed and result in a delayed-copy.
3917+ self.switchToAPrivateSource()
3918+ self.assertCanCopySourceOnly(delayed=True)
3919+
3920+ def test_can_copy_binaries_from_private_archives(self):
3921+ # Source and binary copies from private archives to public ones
3922+ # are allowed and result in a delayed-copy.
3923+ self.switchToAPrivateSource()
3924+ self.test_publisher.getPubBinaries(
3925+ pub_source=self.source,
3926+ status=PackagePublishingStatus.PUBLISHED)
3927+ self.assertCanCopyBinaries(delayed=True)
3928+
3929
3930 class CopyCheckerTestCase(TestCaseWithFactory):
3931
3932@@ -658,7 +733,6 @@
3933 None,
3934 copy_checker.checkCopy(
3935 source, source.distroseries, source.pocket))
3936- copy_checker.addCopy(source)
3937
3938 # The second source-only copy, for hoary-test, fails, since it
3939 # conflicts with the just-approved copy.
3940@@ -669,6 +743,73 @@
3941 copy_checker.checkCopy,
3942 copied_source, copied_source.distroseries, copied_source.pocket)
3943
3944+ def test_checkCopy_identifies_delayed_copies_conflicts(self):
3945+ # checkCopy() detects copy conflicts in the upload queue for
3946+ # delayed-copies. This is mostly caused by previous delayed-copies
3947+ # that are waiting to be processed.
3948+
3949+ # Create a private archive with a restricted source publication.
3950+ private_archive = self.factory.makeArchive(
3951+ distribution=self.test_publisher.ubuntutest,
3952+ purpose=ArchivePurpose.PPA)
3953+ private_archive.buildd_secret = 'x'
3954+ private_archive.private = True
3955+ source = self.test_publisher.getPubSource(archive=private_archive)
3956+
3957+ archive = self.test_publisher.ubuntutest.main_archive
3958+ series = source.distroseries
3959+ pocket = source.pocket
3960+
3961+ # Commit so the just-created files are accessible and perform
3962+ # the delayed-copy.
3963+ self.layer.txn.commit()
3964+ do_copy([source], archive, series, pocket, include_binaries=False)
3965+
3966+ # Repeating the copy is denied.
3967+ copy_checker = CopyChecker(archive, include_binaries=False)
3968+ self.assertRaisesWithContent(
3969+ CannotCopy,
3970+ 'same version already uploaded and waiting in ACCEPTED queue',
3971+ copy_checker.checkCopy, source, series, pocket)
3972+
3973+ def test_checkCopy_suppressing_delayed_copies(self):
3974+ # `CopyChecker` by default will request delayed-copies when it's
3975+ # the case (restricted files being copied to public archives).
3976+ # However this feature can be turned off, and the operation can
3977+ # be performed as a direct-copy by passing 'allow_delayed_copies'
3978+ # as False when initialising `CopyChecker`.
3979+ # This aspect is currently only used in `UnembargoSecurityPackage`
3980+ # script class, because it performs the file privacy fixes in
3981+ # place.
3982+
3983+ # Create a private archive with a restricted source publication.
3984+ private_archive = self.factory.makeArchive(
3985+ distribution=self.test_publisher.ubuntutest,
3986+ purpose=ArchivePurpose.PPA)
3987+ private_archive.buildd_secret = 'x'
3988+ private_archive.private = True
3989+ source = self.test_publisher.getPubSource(archive=private_archive)
3990+
3991+ archive = self.test_publisher.ubuntutest.main_archive
3992+ series = source.distroseries
3993+ pocket = source.pocket
3994+
3995+ # Normally `CopyChecker` would store a delayed-copy representing
3996+ # this operation, since restricted files are being copied to
3997+ # public archives.
3998+ copy_checker = CopyChecker(archive, include_binaries=False)
3999+ copy_checker.checkCopy(source, series, pocket)
4000+ [checked_copy] = list(copy_checker.getCheckedCopies())
4001+ self.assertTrue(checked_copy.delayed)
4002+
4003+ # When 'allow_delayed_copies' is off, a direct-copy will be
4004+ # scheduled.
4005+ copy_checker = CopyChecker(
4006+ archive, include_binaries=False, allow_delayed_copies=False)
4007+ copy_checker.checkCopy(source, series, pocket)
4008+ [checked_copy] = list(copy_checker.getCheckedCopies())
4009+ self.assertFalse(checked_copy.delayed)
4010+
4011
4012 class DoDirectCopyTestCase(TestCaseWithFactory):
4013
4014@@ -722,6 +863,7 @@
4015 class DoDelayedCopyTestCase(TestCaseWithFactory):
4016
4017 layer = LaunchpadZopelessLayer
4018+ dbuser = config.archivepublisher.dbuser
4019
4020 def setUp(self):
4021 super(DoDelayedCopyTestCase, self).setUp()
4022@@ -763,22 +905,32 @@
4023 self.test_publisher.breezy_autotest.status = (
4024 DistroSeriesStatus.CURRENT)
4025
4026+ # Setup and execute the delayed copy procedure.
4027+ copy_archive = self.test_publisher.ubuntutest.main_archive
4028+ copy_series = source.distroseries
4029+ copy_pocket = PackagePublishingPocket.SECURITY
4030+
4031 # Commit for making the just-create library files available.
4032 self.layer.txn.commit()
4033-
4034- # Setup and execute the delayed copy procedure.
4035- copy_archive = self.test_publisher.ubuntutest.main_archive
4036- copy_series = source.distroseries
4037- copy_pocket = PackagePublishingPocket.SECURITY
4038+ self.layer.switchDbUser(self.dbuser)
4039
4040 delayed_copy = _do_delayed_copy(
4041 source, copy_archive, copy_series, copy_pocket, True)
4042
4043+ self.layer.txn.commit()
4044+ self.layer.switchDbUser('launchpad')
4045+
4046 # A delayed-copy `IPackageUpload` record is returned.
4047 self.assertTrue(delayed_copy.is_delayed_copy)
4048 self.assertEquals(
4049 PackageUploadStatus.ACCEPTED, delayed_copy.status)
4050
4051+ # The returned object has a more descriptive 'displayname'
4052+ # attribute than plain `IPackageUpload` instances.
4053+ self.assertEquals(
4054+ 'Delayed copy of foo - 666 (source, i386, raw-dist-upgrader)',
4055+ delayed_copy.displayname)
4056+
4057 # It is targeted to the right publishing context.
4058 self.assertEquals(copy_archive, delayed_copy.archive)
4059 self.assertEquals(copy_series, delayed_copy.distroseries)
4060@@ -801,6 +953,68 @@
4061 [custom_file],
4062 [custom.libraryfilealias for custom in delayed_copy.customfiles])
4063
4064+ def createPartiallyBuiltDelayedCopyContext(self):
4065+ """Allow tests on delayed-copies of partially built sources.
4066+
4067+ Create an architecture-specific source publication in a private PPA
4068+ capable of building for i386 and hppa architectures.
4069+
4070+ Upload and publish only the i386 binary, letting the hppa build
4071+ in pending status.
4072+ """
4073+ self.test_publisher.prepareBreezyAutotest()
4074+
4075+ ppa = self.factory.makeArchive(
4076+ distribution=self.test_publisher.ubuntutest,
4077+ purpose=ArchivePurpose.PPA)
4078+ ppa.buildd_secret = 'x'
4079+ ppa.private = True
4080+ ppa.require_virtualized = False
4081+
4082+ source = self.test_publisher.getPubSource(
4083+ archive=ppa, architecturehintlist='any')
4084+
4085+ [build_hppa, build_i386] = source.createMissingBuilds()
4086+ lazy_bin = self.test_publisher.uploadBinaryForBuild(
4087+ build_i386, 'lazy-bin')
4088+ self.test_publisher.publishBinaryInArchive(lazy_bin, source.archive)
4089+ changes_file_name = '%s_%s_%s.changes' % (
4090+ lazy_bin.name, lazy_bin.version, build_i386.arch_tag)
4091+ package_upload = self.test_publisher.addPackageUpload(
4092+ ppa, build_i386.distroarchseries.distroseries,
4093+ build_i386.pocket, changes_file_content='anything',
4094+ changes_file_name=changes_file_name)
4095+ package_upload.addBuild(build_i386)
4096+
4097+ return source
4098+
4099+ def test_do_delayed_copy_of_partially_built_sources(self):
4100+ # delayed-copies of partially built sources are allowed and only
4101+ # the FULLYBUILT builds are copied.
4102+ source = self.createPartiallyBuiltDelayedCopyContext()
4103+
4104+ # Setup and execute the delayed copy procedure.
4105+ copy_archive = self.test_publisher.ubuntutest.main_archive
4106+ copy_series = source.distroseries
4107+ copy_pocket = PackagePublishingPocket.RELEASE
4108+
4109+ # Make new libraryfiles available by committing the transaction.
4110+ self.layer.txn.commit()
4111+
4112+ # Perform the delayed-copy including binaries.
4113+ delayed_copy = _do_delayed_copy(
4114+ source, copy_archive, copy_series, copy_pocket, True)
4115+
4116+ # Only the i386 build is included in the delayed-copy.
4117+ # For the record, later on, when the delayed-copy gets processed,
4118+ # a new hppa build record will be created in the destination
4119+ # archive context. Also after this point, the same delayed-copy
4120+ # request will be denied by `CopyChecker`.
4121+ [build_hppa, build_i386] = source.getBuilds()
4122+ self.assertEquals(
4123+ [build_i386],
4124+ [pub.build for pub in delayed_copy.builds])
4125+
4126
4127 class CopyPackageScriptTestCase(unittest.TestCase):
4128 """Test the copy-package.py script."""
4129@@ -1772,15 +1986,7 @@
4130 copy_helper.mainTask)
4131
4132 def testCopyFromPrivateToPublicPPAs(self):
4133- """Check if copying private sources into public archives is denied.
4134-
4135- Private source files can only be published in private archives,
4136- because builders do not have access to the restricted librarian.
4137-
4138- Builders only fetch the sources files from the repository itself
4139- for private PPAs. If we copy a restricted file into a public PPA
4140- builders will not be able to fetch it.
4141- """
4142+ """Copies from private to public archives are allowed."""
4143 # Set up a private PPA.
4144 cprov = getUtility(IPersonSet).getByName("cprov")
4145 cprov.archive.buildd_secret = "secret"
4146@@ -1794,6 +2000,7 @@
4147 archive=cprov.archive, version='1.0', distroseries=hoary)
4148 ppa_binaries = test_publisher.getPubBinaries(
4149 pub_source=ppa_source, distroseries=hoary)
4150+ self.layer.txn.commit()
4151
4152 # Run the copy package script storing the logged information.
4153 copy_helper = self.getCopier(
4154@@ -1801,12 +2008,20 @@
4155 from_suite='hoary', to_suite='hoary')
4156 copied = copy_helper.mainTask()
4157
4158- # Nothing was copied and an error message was printed explaining why.
4159- self.assertEqual(len(copied), 0)
4160+ # The private files are copied via a delayed-copy request.
4161+ self.assertEqual(len(copied), 1)
4162 self.assertEqual(
4163- copy_helper.logger.buffer.getvalue().splitlines()[-1],
4164- 'ERROR: foo 1.0 in hoary '
4165- '(cannot copy private files into public archives)')
4166+ ['INFO: FROM: cprov: hoary-RELEASE',
4167+ 'INFO: TO: Primary Archive for Ubuntu Linux: hoary-RELEASE',
4168+ 'INFO: Copy candidates:',
4169+ 'INFO: \tfoo 1.0 in hoary',
4170+ 'INFO: \tfoo-bin 1.0 in hoary hppa',
4171+ 'INFO: \tfoo-bin 1.0 in hoary i386',
4172+ 'INFO: Copied:',
4173+ 'INFO: \tDelayed copy of foo - 1.0 (source, i386)',
4174+ 'INFO: 1 package successfully copied.',
4175+ ],
4176+ copy_helper.logger.buffer.getvalue().splitlines())
4177
4178 def testUnembargoing(self):
4179 """Test UnembargoSecurityPackage, which wraps PackagerCopier."""
4180
4181=== added file 'lib/lp/soyuz/stories/soyuz/xx-queue-pages-delayed-copies.txt'
4182--- lib/lp/soyuz/stories/soyuz/xx-queue-pages-delayed-copies.txt 1970-01-01 00:00:00 +0000
4183+++ lib/lp/soyuz/stories/soyuz/xx-queue-pages-delayed-copies.txt 2009-07-16 18:07:13 +0000
4184@@ -0,0 +1,128 @@
4185+Displaying delayed-copies
4186+=========================
4187+
4188+Delayed copies can be browsed in the UI as if they were normal uploads.
4189+
4190+We will create a testing delayed-copy for Ubuntu/breezy-autotest.
4191+
4192+ # Create a delayed-copy in ubuntu/breezy-autotest.
4193+ >>> from zope.component import getUtility
4194+ >>> from lp.registry.interfaces.distribution import IDistributionSet
4195+ >>> from lp.registry.interfaces.person import IPersonSet
4196+ >>> from lp.soyuz.interfaces.publishing import PackagePublishingPocket
4197+ >>> from lp.soyuz.interfaces.queue import IPackageUploadSet
4198+ >>> from lp.soyuz.tests.test_publishing import SoyuzTestPublisher
4199+ >>> login('foo.bar@canonical.com')
4200+ >>> ubuntu = getUtility(IDistributionSet).getByName('ubuntu')
4201+ >>> cprov = getUtility(IPersonSet).getByName('cprov')
4202+ >>> cprov.archive.buildd_secret = 'x'
4203+ >>> cprov.archive.private = True
4204+ >>> cprov.archive.require_virtualized = False
4205+ >>> stp = SoyuzTestPublisher()
4206+ >>> stp.prepareBreezyAutotest()
4207+ >>> [bin_hppa, bin_i386] = stp.getPubBinaries(archive=cprov.archive)
4208+ >>> build = bin_hppa.binarypackagerelease.build
4209+ >>> breezy_autotest = ubuntu.getSeries('breezy-autotest')
4210+ >>> delayed_copy = getUtility(IPackageUploadSet).createDelayedCopy(
4211+ ... archive=ubuntu.main_archive, distroseries=breezy_autotest,
4212+ ... pocket=PackagePublishingPocket.RELEASE, signing_key=None)
4213+ >>> unused = delayed_copy.addSource(build.sourcepackagerelease)
4214+ >>> unused = delayed_copy.addBuild(build)
4215+ >>> transaction.commit()
4216+ >>> delayed_copy.acceptFromCopy()
4217+ >>> logout()
4218+
4219+Any user accessing the breezy-autotest ACCEPTED queue will notice the
4220+delayed-copy. They show up in the ACCEPTED distro series queue while
4221+they are pending processing.
4222+
4223+ >>> anon_browser.open(
4224+ ... "http://launchpad.dev/ubuntu/breezy-autotest/+queue")
4225+ >>> anon_browser.getControl(
4226+ ... name="queue_state", index=0).displayValue = ['Accepted']
4227+ >>> anon_browser.getControl("Update").click()
4228+
4229+It's is listed as a normal upload, however there is no link to the
4230+'changesfile'.
4231+
4232+ >>> for row in find_tags_by_class(anon_browser.contents, "queue-row"):
4233+ ... print extract_text(row)
4234+ Package Version Component Section Priority Pocket When
4235+ foo, foo (source, i386) 666 main base low Release ...
4236+
4237+ >>> anon_browser.getLink('foo, foo')
4238+ Traceback (most recent call last):
4239+ ...
4240+ LinkNotFoundError
4241+
4242+On the corresponding expandable area, below the row, there is no file
4243+information, since the delayed-copy is still pending processing. A
4244+user can simply view where the delayed copy came from.
4245+
4246+ >>> print extract_text(
4247+ ... first_tag_by_class(anon_browser.contents,
4248+ ... 'queue-%s' % delayed_copy.id))
4249+ Copied from PPA for Celso Providelo
4250+
4251+The delayed-copy source archive is not linked, since the requester has
4252+no permission to view it.
4253+
4254+ >>> anon_browser.getLink('PPA for Celso Providelo')
4255+ Traceback (most recent call last):
4256+ ...
4257+ LinkNotFoundError
4258+
4259+While the delayed-copy is still in ACCEPTED state, i.e not processed,
4260+authenticated users with permission to view the archive from where the
4261+delayed-copy was issued can additionally access a link to its original
4262+archive, nothing else.
4263+
4264+ >>> cprov_browser = setupBrowser(
4265+ ... auth="Basic celso.providelo@canonical.com:cprov")
4266+ >>> cprov_browser.open(anon_browser.url)
4267+
4268+ >>> for row in find_tags_by_class(cprov_browser.contents, "queue-row"):
4269+ ... print extract_text(row)
4270+ Package Version Component Section Priority Pocket When
4271+ foo, foo (source, i386) 666 main base low Release ...
4272+
4273+ >>> anon_browser.getLink('foo, foo')
4274+ Traceback (most recent call last):
4275+ ...
4276+ LinkNotFoundError
4277+
4278+ >>> print extract_text(
4279+ ... first_tag_by_class(cprov_browser.contents,
4280+ ... 'queue-%s' % delayed_copy.id))
4281+ Copied from PPA for Celso Providelo
4282+
4283+ >>> print cprov_browser.getLink('PPA for Celso Providelo').url
4284+ http://launchpad.dev/~cprov/+archive/ppa
4285+
4286+When the delayed-copy is processed (moved to DONE queue) its contents
4287+becomes available to everyone.
4288+
4289+ # Process the delayed-copy.
4290+ >>> login('foo.bar@canonical.com')
4291+ >>> stp.addFakeChroots(breezy_autotest)
4292+ >>> unused = delayed_copy.realiseUpload()
4293+ >>> transaction.commit()
4294+ >>> logout()
4295+
4296+Any user can access the DONE queue and access the delayed-copy
4297+'changesfile' and view its files in the expandable area.
4298+
4299+ >>> anon_browser.getControl(
4300+ ... name="queue_state", index=0).displayValue = ['Done']
4301+ >>> anon_browser.getControl("Update").click()
4302+
4303+ >>> print anon_browser.getLink('foo, foo').url
4304+ http://localhost:58000/.../foo_666_source.changes
4305+
4306+ >>> extra_information = find_tags_by_class(
4307+ ... anon_browser.contents, 'queue-%s' % delayed_copy.id)
4308+ >>> for info in extra_information:
4309+ ... print extract_text(info)
4310+ foo_666.dsc (28 bytes)
4311+ foo-bin_666_all.deb (18 bytes) 666 main base standard
4312+
4313
4314=== modified file 'lib/lp/soyuz/stories/soyuz/xx-queue-pages.txt'
4315--- lib/lp/soyuz/stories/soyuz/xx-queue-pages.txt 2009-07-01 13:16:44 +0000
4316+++ lib/lp/soyuz/stories/soyuz/xx-queue-pages.txt 2009-07-16 00:31:36 +0000
4317@@ -135,15 +135,15 @@
4318 >>> anon_browser.getControl(name="queue_text").value = ''
4319 >>> anon_browser.getControl("Update").click()
4320
4321- >>> print find_tag_by_id(anon_browser.contents, 'queue-alsa-utils-4-arrow')
4322+ >>> print find_tag_by_id(anon_browser.contents, 'queue-4-arrow')
4323 <img width="14" height="14" src="/@@/treeCollapsed" alt="view files"
4324- id="queue-alsa-utils-4-arrow" />
4325+ id="queue-4-arrow" />
4326
4327 The 'filelist' is expanded as one or more table rows, right below the
4328 clicked item:
4329
4330 >>> filelist = find_tags_by_class(
4331- ... anon_browser.contents, 'queue-alsa-utils-4')
4332+ ... anon_browser.contents, 'queue-4')
4333
4334 It contains a list of files related to the queue item clicked, followed
4335 by its size, one file per line:
4336@@ -164,7 +164,7 @@
4337 candidates). The binary items will also individually show their
4338 version, component, section and priority.
4339
4340- >>> [filelist] = find_tags_by_class(anon_browser.contents, 'queue-pmount-2')
4341+ >>> [filelist] = find_tags_by_class(anon_browser.contents, 'queue-2')
4342 >>> print extract_text(filelist)
4343 pmount_1.0-1_all.deb (18 bytes) NEW 0.1-1 main base important
4344
4345@@ -409,7 +409,7 @@
4346 values:
4347
4348 >>> filelist = find_tags_by_class(
4349- ... anon_browser.contents, 'queue-pmount-2')
4350+ ... anon_browser.contents, 'queue-2')
4351 >>> for row in filelist:
4352 ... print extract_text(row)
4353 pmount_1.0-1_all.deb (18 bytes) NEW 0.1-1 restricted admin extra
4354
4355=== modified file 'lib/lp/soyuz/stories/webservice/xx-archive.txt'
4356--- lib/lp/soyuz/stories/webservice/xx-archive.txt 2009-07-17 18:25:27 +0000
4357+++ lib/lp/soyuz/stories/webservice/xx-archive.txt 2009-07-18 11:51:59 +0000
4358@@ -834,33 +834,47 @@
4359 <BLANKLINE>
4360
4361
4362-=== Copy privacy mismatch ===
4363-
4364-A CannotCopy error, giving the reason "Cannot copy private source into
4365-public archives." is raised if such operation is requested.
4366-
4367-When we try to copy the private source to the primary archive, which
4368-is public, the 'privacy mismatch' error is raised. The behaviour is
4369-the same for `syncSource` or `syncSources` operations.
4370+=== Copying private file to public archives ===
4371+
4372+Copying private sources to public archives works fine with
4373+`syncSource` or `syncSources` operations.
4374+
4375+We use `syncSource` to copy 'private - 1.0' source from Celso's
4376+private PPA to the ubuntu primary archive.
4377
4378 >>> print cprov_webservice.named_post(
4379 ... ubuntu['main_archive_link'], 'syncSource', {},
4380- ... source_name='private', version="1.0", to_pocket='release',
4381+ ... source_name='private', version='1.0', to_pocket='release',
4382 ... from_archive=cprov_archive['self_link'],
4383 ... to_series="hoary")
4384- HTTP/1.1 400 Bad Request
4385+ HTTP/1.1 200 Ok
4386 ...
4387- CannotCopy: private 1.0 in hoary
4388- (cannot copy private files into public archives)
4389- <BLANKLINE>
4390+
4391+In the same way we can use 'syncSources' for syncing an subsequent
4392+version.
4393+
4394+ >>> login('foo.bar@canonical.com')
4395+ >>> unused = test_publisher.getPubSource(
4396+ ... sourcename="private", version="1.1", archive=cprov.archive)
4397+ >>> logout()
4398
4399 >>> print cprov_webservice.named_post(
4400 ... ubuntu['main_archive_link'], 'syncSources', {},
4401 ... source_names=['private'], to_pocket='release',
4402 ... from_archive=cprov_archive['self_link'],
4403 ... to_series="hoary")
4404+ HTTP/1.1 200 Ok
4405+ ...
4406+
4407+Although if we try to copy an old version, by repeating the copy an
4408+error is returned.
4409+
4410+ >>> print cprov_webservice.named_post(
4411+ ... ubuntu['main_archive_link'], 'syncSource', {},
4412+ ... source_name='private', version='1.1', to_pocket='release',
4413+ ... from_archive=cprov_archive['self_link'],
4414+ ... to_series="hoary")
4415 HTTP/1.1 400 Bad Request
4416 ...
4417- CannotCopy: private 1.0 in hoary
4418- (cannot copy private files into public archives)
4419- <BLANKLINE>
4420+ CannotCopy: private 1.1 in hoary
4421+ (same version already uploaded and waiting in ACCEPTED queue)
4422
4423=== modified file 'lib/lp/soyuz/templates/distroseries-queue.pt'
4424--- lib/lp/soyuz/templates/distroseries-queue.pt 2009-07-17 17:59:07 +0000
4425+++ lib/lp/soyuz/templates/distroseries-queue.pt 2009-07-19 04:41:14 +0000
4426@@ -72,8 +72,7 @@
4427 </thead>
4428 <tbody class="lesser">
4429 <tal:batch repeat="packageupload batch">
4430- <tal:block
4431- define="filelist_class string:queue-${packageupload/displayname}-${packageupload/id}">
4432+ <tal:block define="filelist_class string:queue-${packageupload/id}">
4433 <tr class="queue-row">
4434 <tal:comment condition="nothing">
4435 Every column is top-padded apart from the checkbox
4436@@ -203,6 +202,28 @@
4437 :packageupload: A PackageUpload record for which we display files.
4438 </tal:comment>
4439
4440+ <tal:copy condition="packageupload/pending_delayed_copy">
4441+ <tr tal:attributes="class string:${filelist_class}"
4442+ tal:define="archive
4443+ packageupload/sourcepackagerelease/upload_archive"
4444+ style="display:none">
4445+ <td />
4446+ <td tal:condition="view/availableActions" />
4447+ <td>Copied from
4448+ <tal:linked condition="archive/required:launchpad.View">
4449+ <a tal:attributes="href archive/fmt:url"
4450+ tal:content="archive/displayname" />
4451+ </tal:linked>
4452+ <tal:not_linked
4453+ condition="not: archive/required:launchpad.View"
4454+ replace="archive/displayname">
4455+ </tal:not_linked>
4456+ </td>
4457+ <td colspan="6" />
4458+ </tr>
4459+ </tal:copy>
4460+
4461+ <tal:upload condition="not: packageupload/pending_delayed_copy">
4462 <tr tal:repeat="file packageupload/source_files"
4463 tal:attributes="class string:${filelist_class}"
4464 style="display:none">
4465@@ -230,7 +251,7 @@
4466 <td tal:condition="view/availableActions"/>
4467 <td>
4468 <a tal:attributes="href file/libraryfile/http_url">
4469- <tal:filename content="file/libraryfile/filename"/>
4470+ <tal:filename replace="file/libraryfile/filename"/>
4471 </a>
4472 (<tal:size replace="file/libraryfile/content/filesize/fmt:bytes" />)
4473 <span style="color: red" tal:condition="is_new">NEW</span>
4474@@ -260,6 +281,7 @@
4475 <td colspan="6"/>
4476 </tr>
4477 </tal:custom>
4478+ </tal:upload>
4479
4480 </metal:macro>
4481
4482@@ -287,12 +309,16 @@
4483 alt="[Debian Description Translation Project Indexes]"
4484 src="/@@/ubuntu-icon"
4485 title="Debian Description Translation Project Indexes"/>
4486- <a tal:attributes="href packageupload/changesfile/http_url;
4487- title string:Changes file for ${packageupload/displayname}">
4488- <tal:name replace="string: ${packageupload/displayname}"/>
4489+ <a tal:condition="not: packageupload/pending_delayed_copy"
4490+ tal:content="packageupload/displayname"
4491+ tal:attributes="
4492+ href packageupload/changesfile/http_url;
4493+ title string:Changes file for ${packageupload/displayname};">
4494 </a>
4495- <tal:version replace="
4496- string: (${packageupload/displayarchs})"/>
4497+ <tal:pending_delayed_copy_title
4498+ condition="packageupload/pending_delayed_copy"
4499+ replace="packageupload/displayname" />
4500+ <tal:arches replace="string: (${packageupload/displayarchs})"/>
4501 </div>
4502 </metal:macro>
4503
4504
4505=== modified file 'lib/lp/soyuz/tests/test_packageupload.py'
4506--- lib/lp/soyuz/tests/test_packageupload.py 2009-07-17 02:25:09 +0000
4507+++ lib/lp/soyuz/tests/test_packageupload.py 2009-07-19 04:41:14 +0000
4508@@ -16,6 +16,7 @@
4509 from lp.registry.interfaces.distribution import IDistributionSet
4510 from lp.registry.interfaces.distroseries import DistroSeriesStatus
4511 from lp.soyuz.interfaces.archive import ArchivePurpose
4512+from lp.soyuz.interfaces.build import BuildStatus
4513 from lp.soyuz.interfaces.publishing import (
4514 PackagePublishingPocket, PackagePublishingStatus)
4515 from lp.soyuz.interfaces.queue import (
4516@@ -24,13 +25,13 @@
4517 from lp.testing import TestCaseWithFactory
4518
4519
4520-class TestPackageUpload(TestCaseWithFactory):
4521+class PackageUploadTestCase(TestCaseWithFactory):
4522
4523 layer = LaunchpadZopelessLayer
4524 dbuser = config.uploadqueue.dbuser
4525
4526 def setUp(self):
4527- super(TestPackageUpload, self).setUp()
4528+ super(PackageUploadTestCase, self).setUp()
4529 self.test_publisher = SoyuzTestPublisher()
4530
4531 def createEmptyDelayedCopy(self):
4532@@ -65,11 +66,15 @@
4533 'Source is mandatory for delayed copies.',
4534 delayed_copy.acceptFromCopy)
4535
4536- def createDelayedCopy(self):
4537+ def createDelayedCopy(self, source_only=False):
4538 """Return a delayed-copy targeted to ubuntutest/breezy-autotest.
4539
4540- The delayed-copy is target to the SECURITY pocket with:
4541+ The delayed-copy is targeted to the SECURITY pocket with:
4542+
4543 * source foo - 1.1
4544+
4545+ And if 'source_only' is False, the default behavior, also attach:
4546+
4547 * binaries foo - 1.1 in i386 and hppa
4548 * a DIST_UPGRADER custom file
4549
4550@@ -83,16 +88,6 @@
4551 ppa.private = True
4552
4553 source = self.test_publisher.getPubSource(archive=ppa, version='1.1')
4554- self.test_publisher.getPubBinaries(pub_source=source)
4555- custom_path = datadir(
4556- 'dist-upgrader/dist-upgrader_20060302.0120_all.tar.gz')
4557- custom_file = self.factory.makeLibraryFileAlias(
4558- filename='dist-upgrader_20060302.0120_all.tar.gz',
4559- content=open(custom_path).read(), restricted=True)
4560- [build] = source.getBuilds()
4561- build.package_upload.addCustom(
4562- custom_file, PackageUploadCustomFormat.DIST_UPGRADER)
4563-
4564 delayed_copy = getUtility(IPackageUploadSet).createDelayedCopy(
4565 self.test_publisher.ubuntutest.main_archive,
4566 self.test_publisher.breezy_autotest,
4567@@ -100,11 +95,21 @@
4568 self.test_publisher.person.gpgkeys[0])
4569
4570 delayed_copy.addSource(source.sourcepackagerelease)
4571- for build in source.getBuilds():
4572- delayed_copy.addBuild(build)
4573- for custom in build.package_upload.customfiles:
4574- delayed_copy.addCustom(
4575- custom.libraryfilealias, custom.customformat)
4576+ if not source_only:
4577+ self.test_publisher.getPubBinaries(pub_source=source)
4578+ custom_path = datadir(
4579+ 'dist-upgrader/dist-upgrader_20060302.0120_all.tar.gz')
4580+ custom_file = self.factory.makeLibraryFileAlias(
4581+ filename='dist-upgrader_20060302.0120_all.tar.gz',
4582+ content=open(custom_path).read(), restricted=True)
4583+ [build] = source.getBuilds()
4584+ build.package_upload.addCustom(
4585+ custom_file, PackageUploadCustomFormat.DIST_UPGRADER)
4586+ for build in source.getBuilds():
4587+ delayed_copy.addBuild(build)
4588+ for custom in build.package_upload.customfiles:
4589+ delayed_copy.addCustom(
4590+ custom.libraryfilealias, custom.customformat)
4591
4592 # Commit for using just-created library files.
4593 self.layer.txn.commit()
4594@@ -136,6 +141,10 @@
4595 # and has their files privacy adjusted according test destination
4596 # context.
4597
4598+ # Add a cleanup for removing the repository where the custom upload
4599+ # was published.
4600+ self.addCleanup(self.removeRepository)
4601+
4602 # Create the default delayed-copy context.
4603 delayed_copy = self.createDelayedCopy()
4604
4605@@ -158,23 +167,27 @@
4606 self.test_publisher.getPubBinaries(
4607 pub_source=ancestry_source,
4608 status=PackagePublishingStatus.PUBLISHED)
4609+ package_diff = ancestry_source.sourcepackagerelease.requestDiffTo(
4610+ requester=self.test_publisher.person,
4611+ to_sourcepackagerelease=delayed_copy.sourcepackagerelease)
4612+ package_diff.diff_content = self.factory.makeLibraryFileAlias(
4613+ restricted=True)
4614
4615 # Accept and publish the delayed-copy.
4616 delayed_copy.acceptFromCopy()
4617 self.assertEquals(
4618 PackageUploadStatus.ACCEPTED, delayed_copy.status)
4619
4620+ self.layer.txn.commit()
4621+ self.layer.switchDbUser(self.dbuser)
4622+
4623 logger = BufferLogger()
4624 pub_records = delayed_copy.realiseUpload(logger=logger)
4625 self.assertEquals(
4626 PackageUploadStatus.DONE, delayed_copy.status)
4627
4628- # Commit for comparing objects correctly.
4629 self.layer.txn.commit()
4630-
4631- # Add a cleanup for removing the repository where the custom upload
4632- # was published.
4633- self.addCleanup(self.removeRepository)
4634+ self.layer.switchDbUser('launchpad')
4635
4636 # One source and 2 binaries are pending publication. They all were
4637 # overridden to multiverse and had their files moved to the public
4638@@ -192,6 +205,9 @@
4639 pub_record, delayed_copy.archive, delayed_copy.pocket,
4640 ancestry_source.component, False)
4641
4642+ # The package diff file is now public.
4643+ self.assertFalse(package_diff.diff_content.restricted)
4644+
4645 # The custom file was also published.
4646 custom_path = os.path.join(
4647 config.archivepublisher.root,
4648@@ -200,6 +216,30 @@
4649 self.assertEquals(
4650 ['20060302.0120', 'current'], sorted(os.listdir(custom_path)))
4651
4652+ def test_realiseUpload_for_source_only_delayed_copies(self):
4653+ # Source-only delayed-copies results in the source published
4654+ # in the destination archive and its corresponding build
4655+ # recors ready to be dispatched.
4656+
4657+ # Create the default delayed-copy context.
4658+ delayed_copy = self.createDelayedCopy(source_only=True)
4659+ self.test_publisher.breezy_autotest.status = (
4660+ DistroSeriesStatus.CURRENT)
4661+ self.layer.txn.commit()
4662+
4663+ # Accept and publish the delayed-copy.
4664+ delayed_copy.acceptFromCopy()
4665+ logger = BufferLogger()
4666+ pub_records = delayed_copy.realiseUpload(logger=logger)
4667+
4668+ # Only the source is published and the needed builds are created
4669+ # in the destination archive.
4670+ self.assertEquals(1, len(pub_records))
4671+ [pub_record] = pub_records
4672+ [build] = pub_record.getBuilds()
4673+ self.assertEquals(
4674+ BuildStatus.NEEDSBUILD, build.buildstate)
4675+
4676
4677 def test_suite():
4678 return unittest.TestLoader().loadTestsFromName(__name__)
4679
4680=== modified file 'lib/lp/translations/doc/sourcepackagerelease-translations.txt'
4681--- lib/lp/translations/doc/sourcepackagerelease-translations.txt 2009-07-02 17:16:50 +0000
4682+++ lib/lp/translations/doc/sourcepackagerelease-translations.txt 2009-07-20 15:05:42 +0000
4683@@ -2,66 +2,85 @@
4684
4685 It's time to check the translation upload function.
4686
4687+We need a test tarball uploaded into librarian to run this test. We
4688+will upload the same sampledata tarball twice, one public and one
4689+restricted `LibraryFileAlias` objects.
4690+
4691 >>> import os.path
4692- >>> import transaction
4693- >>> from canonical.launchpad.database import SourcePackageRelease
4694- >>> from lp.translations.interfaces.translationimportqueue import (
4695- ... ITranslationImportQueue)
4696- >>> from canonical.librarian.interfaces import ILibrarianClient
4697- >>> translation_import_queue = getUtility(ITranslationImportQueue)
4698- >>> client = getUtility(ILibrarianClient)
4699-
4700-We need a test tarball uploaded into librarian to run this test.
4701-
4702 >>> import lp.translations
4703- >>> test_file_name = os.path.join(
4704+ >>> tarball_path = os.path.join(
4705 ... os.path.dirname(lp.translations.__file__),
4706 ... 'doc/sourcepackagerelease-translations.tar.gz')
4707- >>> file = open(test_file_name)
4708- >>> size = len(file.read())
4709- >>> file.seek(0)
4710- >>> alias = client.addFile(
4711+ >>> tarball = open(tarball_path)
4712+ >>> tarball_size = len(tarball.read())
4713+ >>> tarball.seek(0)
4714+
4715+ >>> from canonical.launchpad.interfaces.librarian import (
4716+ ... ILibraryFileAliasSet)
4717+ >>> public_translation = getUtility(ILibraryFileAliasSet).create(
4718 ... name='test.tar.gz',
4719- ... size=size,
4720- ... file=file,
4721+ ... size=tarball_size,
4722+ ... file=tarball,
4723 ... contentType='application/x-gtar')
4724
4725-We need the commit to see the upload.
4726+ >>> tarball.seek(0)
4727+ >>> restricted_translation = getUtility(ILibraryFileAliasSet).create(
4728+ ... name='test.tar.gz',
4729+ ... size=tarball_size,
4730+ ... file=tarball,
4731+ ... contentType='application/x-gtar',
4732+ ... restricted=True)
4733+
4734+Commit, so uploaded contents are available in the current test.
4735
4736 >>> transaction.commit()
4737
4738-Now we do the upload. It's necessary to retrive an ILibraryFileAlias
4739-correspondent to the alias (long) we already have.
4740-
4741- >>> from canonical.launchpad.interfaces import ILibraryFileAliasSet
4742- >>> file_alias = getUtility(ILibraryFileAliasSet)[alias]
4743-
4744+We will use an arbitrary source package release from the sampledata.
4745+
4746+ >>> from canonical.launchpad.database import SourcePackageRelease
4747 >>> spr_test = SourcePackageRelease.get(20)
4748- >>> spr_test.name
4749- u'pmount'
4750-
4751-Before the final upload, the queue should be empty.
4752-
4753+ >>> print spr_test.title
4754+ pmount - 0.1-1
4755+
4756+And the 'katie' celebrity as the user responsible for the transalation.
4757+
4758+ >>> from canonical.launchpad.interfaces import ILaunchpadCelebrities
4759+ >>> katie = getUtility(ILaunchpadCelebrities).katie
4760+
4761+Before the final upload, we can see that the translation queue for the
4762+testing source package is empty.
4763+
4764+ >>> from lp.translations.interfaces.translationimportqueue import (
4765+ ... ITranslationImportQueue)
4766+ >>> translation_import_queue = getUtility(ITranslationImportQueue)
4767 >>> translation_import_queue.getAllEntries(
4768 ... target=spr_test.sourcepackage).count()
4769 0
4770
4771- >>> from canonical.launchpad.interfaces import ILaunchpadCelebrities
4772- >>> katie = getUtility(ILaunchpadCelebrities).katie
4773- >>> spr_test.attachTranslationFiles(file_alias, True, katie)
4774-
4775-The commit is needed to see the new entries
4776-
4777- >>> transaction.commit()
4778-
4779-And the queue should have a new entry.
4780-
4781- >>> for entry in translation_import_queue.getAllEntries(
4782- ... target=spr_test.sourcepackage):
4783+Now we bind both uploaded translations, the public and the restricted
4784+ones, to the testing source package.
4785+
4786+ >>> spr_test.attachTranslationFiles(public_translation, True, katie)
4787+
4788+ >>> spr_test.attachTranslationFiles(restricted_translation, True, katie)
4789+
4790+And the queue should have 2 entries, which exactly the same contents.
4791+
4792+ >>> queue_entries = translation_import_queue.getAllEntries(
4793+ ... target=spr_test.sourcepackage)
4794+
4795+ >>> queue_entries.count()
4796+ 2
4797+
4798+ >>> for entry in queue_entries:
4799 ... print entry.path, entry.importer.name
4800 something/en-US.xpi katie
4801 po/es.po katie
4802
4803+Commit, so the uploaded traslations become available to the scripts.
4804+
4805+ >>> transaction.commit()
4806+
4807 Now, we need to do the final import. It's done as a two steps procedure.
4808
4809 The first one, approves the import.
4810
4811=== modified file 'lib/lp/translations/interfaces/potmsgset.py'
4812--- lib/lp/translations/interfaces/potmsgset.py 2009-07-17 00:26:05 +0000
4813+++ lib/lp/translations/interfaces/potmsgset.py 2009-07-19 04:41:14 +0000
4814@@ -93,10 +93,16 @@
4815 """
4816
4817 def getCurrentTranslationMessage(potemplate, language, variant=None):
4818- """Returns a TranslationMessage marked as being currently used."""
4819+ """Returns a TranslationMessage marked as being currently used.
4820+
4821+ Diverged messages are preferred.
4822+ """
4823
4824 def getImportedTranslationMessage(potemplate, language, variant=None):
4825- """Returns a TranslationMessage as imported from the package."""
4826+ """Returns a TranslationMessage as imported from the package.
4827+
4828+ Diverged messages are preferred.
4829+ """
4830
4831 def getSharedTranslationMessage(language, variant=None):
4832 """Returns a shared TranslationMessage."""
4833
4834=== modified file 'lib/lp/translations/model/potmsgset.py'
4835--- lib/lp/translations/model/potmsgset.py 2009-07-17 00:26:05 +0000
4836+++ lib/lp/translations/model/potmsgset.py 2009-07-19 04:41:14 +0000
4837@@ -210,10 +210,13 @@
4838 'There is already a translation message in our database.')
4839 return DummyTranslationMessage(pofile, self)
4840
4841- def _getUsedTranslationMessage(
4842- self, potemplate, language, variant, current=True):
4843+ def _getUsedTranslationMessage(self, potemplate, language, variant,
4844+ current=True):
4845 """Get a translation message which is either used in
4846- Launchpad (current=True) or in an import (current=False)."""
4847+ Launchpad (current=True) or in an import (current=False).
4848+
4849+ Prefers a diverged message if present.
4850+ """
4851 # Change 'is_current IS TRUE' and 'is_imported IS TRUE' conditions
4852 # carefully: they need to match condition specified in indexes,
4853 # or Postgres may not pick them up (in complicated queries,
4854@@ -239,17 +242,12 @@
4855 clauses.append(
4856 'TranslationMessage.variant=%s' % sqlvalues(variant))
4857
4858- # This returns at most two messages:
4859- # 1. a current translation for this particular potemplate.
4860- # 2. a shared current translation for this.
4861- messages = list(TranslationMessage.select(
4862- ' AND '.join(clauses),
4863- orderBy=['-COALESCE(potemplate, -1)']))
4864- if len(messages) > 0:
4865- return messages[0]
4866- else:
4867- return None
4868+ order_by = '-COALESCE(potemplate, -1)'
4869
4870+ # This should find at most two messages: zero or one shared
4871+ # message, and zero or one diverged one.
4872+ return TranslationMessage.selectFirst(
4873+ ' AND '.join(clauses), orderBy=[order_by])
4874
4875 def getCurrentTranslationMessage(self, potemplate,
4876 language, variant=None):
4877@@ -481,12 +479,8 @@
4878 translations[pluralform] is not None):
4879 translation = translations[pluralform]
4880 # Find or create a POTranslation for the specified text
4881- try:
4882- potranslations[pluralform] = (
4883- POTranslation.byTranslation(translation))
4884- except SQLObjectNotFound:
4885- potranslations[pluralform] = (
4886- POTranslation(translation=translation))
4887+ potranslations[pluralform] = (
4888+ POTranslation.getOrCreateTranslation(translation))
4889 else:
4890 potranslations[pluralform] = None
4891 return potranslations
4892
4893=== modified file 'lib/lp/translations/model/translationmessage.py'
4894--- lib/lp/translations/model/translationmessage.py 2009-07-17 00:26:05 +0000
4895+++ lib/lp/translations/model/translationmessage.py 2009-07-19 04:41:14 +0000
4896@@ -14,6 +14,7 @@
4897 import pytz
4898
4899 from sqlobject import BoolCol, ForeignKey, SQLObjectNotFound, StringCol
4900+from storm.expr import And
4901 from storm.locals import SQL
4902 from storm.store import Store
4903 from zope.interface import implements
4904@@ -85,7 +86,7 @@
4905 return self.potmsgset.makeHTMLID('_'.join(elements))
4906
4907 def setPOFile(self, pofile):
4908- """See `ITransationMessage`."""
4909+ """See `ITranslationMessage`."""
4910 self.browser_pofile = pofile
4911
4912
4913@@ -449,6 +450,28 @@
4914 # suggestions will always be shared.
4915 self.destroySelf()
4916
4917+ def findIdenticalMessage(self, target_potmsgset, target_potemplate):
4918+ """See `ITranslationMessage`."""
4919+ store = Store.of(self)
4920+
4921+ forms_match = (TranslationMessage.msgstr0 == self.msgstr0)
4922+ for form in xrange(1, TranslationConstants.MAX_PLURAL_FORMS):
4923+ form_name = 'msgstr%d' % form
4924+ form_value = getattr(self, form_name)
4925+ forms_match = And(
4926+ forms_match,
4927+ getattr(TranslationMessage, form_name) == form_value)
4928+
4929+ twins = store.find(TranslationMessage, And(
4930+ TranslationMessage.potmsgset == target_potmsgset,
4931+ TranslationMessage.potemplate == target_potemplate,
4932+ TranslationMessage.language == self.language,
4933+ TranslationMessage.variant == self.variant,
4934+ TranslationMessage.id != self.id,
4935+ forms_match))
4936+
4937+ return twins.order_by(TranslationMessage.id).first()
4938+
4939
4940 class TranslationMessageSet:
4941 """See `ITranslationMessageSet`."""
4942
4943=== modified file 'lib/lp/translations/scripts/message_sharing_migration.py'
4944--- lib/lp/translations/scripts/message_sharing_migration.py 2009-07-17 00:26:05 +0000
4945+++ lib/lp/translations/scripts/message_sharing_migration.py 2009-07-19 04:41:14 +0000
4946@@ -2,11 +2,7 @@
4947 # GNU Affero General Public License version 3 (see the file LICENSE).
4948
4949 __metaclass__ = type
4950-__all__ = [
4951- 'MessageSharingMerge',
4952- 'merge_potmsgsets',
4953- 'merge_translationmessages',
4954- ]
4955+__all__ = [ 'MessageSharingMerge' ]
4956
4957
4958 from zope.component import getUtility
4959@@ -90,106 +86,62 @@
4960 merge_pofiletranslators(item.potmsgset, representative_template)
4961
4962
4963-def merge_potmsgsets(potemplates):
4964- """Merge POTMsgSets for given sequence of sharing templates."""
4965-
4966- # Map each POTMsgSet key (context, msgid, plural) to its
4967- # representative POTMsgSet.
4968- representatives = {}
4969-
4970- # Map each representative POTMsgSet to a list of subordinate
4971- # POTMsgSets it represents.
4972- subordinates = {}
4973-
4974- # Map each representative POTMsgSet to its representative
4975- # POTemplate.
4976- representative_templates = {}
4977-
4978- # Figure out representative potmsgsets and their subordinates. Go
4979- # through the templates, starting at the most representative and
4980- # moving towards the least representative. For any unique potmsgset
4981- # key we find, the first POTMsgSet is the representative one.
4982- order_check = OrderingCheck(
4983- cmp=getUtility(IPOTemplateSet).compareSharingPrecedence)
4984- for template in potemplates:
4985- order_check.check(template)
4986- for potmsgset in template.getPOTMsgSets(False):
4987- key = get_potmsgset_key(potmsgset)
4988- if key not in representatives:
4989- representatives[key] = potmsgset
4990- representative_templates[potmsgset] = template
4991- representative = representatives[key]
4992- if representative in subordinates:
4993- subordinates[representative].append(potmsgset)
4994- else:
4995- subordinates[representative] = []
4996-
4997- for representative, potmsgsets in subordinates.iteritems():
4998- # Merge each subordinate POTMsgSet into its representative.
4999- seen_potmsgsets = set([representative])
5000- for subordinate in potmsgsets:
The diff has been truncated for viewing.