Merge lp:~julian-edwards/launchpad/publish-copy-archives-bug-520520-publish-distro into lp:launchpad/db-devel

Proposed by Julian Edwards
Status: Rejected
Rejected by: Julian Edwards
Proposed branch: lp:~julian-edwards/launchpad/publish-copy-archives-bug-520520-publish-distro
Merge into: lp:launchpad/db-devel
Diff against target: 1553 lines (+343/-460)
33 files modified
lib/canonical/launchpad/doc/product-update-remote-product.txt (+1/-6)
lib/lp/bugs/doc/bugzilla-import.txt (+2/-6)
lib/lp/bugs/doc/externalbugtracker.txt (+3/-27)
lib/lp/code/browser/branch.py (+3/-3)
lib/lp/code/doc/xmlrpc-branch-puller.txt (+1/-16)
lib/lp/code/enums.py (+1/-1)
lib/lp/code/interfaces/branchpuller.py (+0/-6)
lib/lp/code/interfaces/codehosting.py (+0/-14)
lib/lp/code/model/branchpuller.py (+0/-22)
lib/lp/code/model/tests/test_branchjob.py (+5/-24)
lib/lp/code/model/tests/test_branchpuller.py (+0/-82)
lib/lp/code/model/tests/test_codeimport.py (+9/-0)
lib/lp/code/model/tests/test_codeimportjob.py (+2/-1)
lib/lp/code/xmlrpc/codehosting.py (+1/-43)
lib/lp/code/xmlrpc/tests/test_codehosting.py (+0/-100)
lib/lp/codehosting/codeimport/tests/test_worker.py (+25/-0)
lib/lp/codehosting/codeimport/worker.py (+15/-5)
lib/lp/codehosting/inmemory.py (+0/-23)
lib/lp/soyuz/scripts/publishdistro.py (+19/-6)
lib/lp/soyuz/scripts/tests/test_publishdistro.py (+52/-1)
lib/lp/testing/faketransaction.py (+35/-0)
lib/lp/translations/browser/poexportrequest.py (+35/-0)
lib/lp/translations/browser/tests/test_baseexportview.py (+68/-2)
lib/lp/translations/doc/distroseries-translations-copy.txt (+2/-7)
lib/lp/translations/doc/gettext-check-messages.txt (+14/-22)
lib/lp/translations/doc/poexport-queue.txt (+6/-4)
lib/lp/translations/doc/poexport-request-productseries.txt (+2/-5)
lib/lp/translations/doc/poexport-request.txt (+3/-6)
lib/lp/translations/doc/poimport.txt (+2/-10)
lib/lp/translations/interfaces/poexportrequest.py (+7/-1)
lib/lp/translations/model/poexportrequest.py (+23/-7)
lib/lp/translations/scripts/tests/test_copy_distroseries_translations.py (+2/-10)
lib/lp/translations/templates/translations-export.pt (+5/-0)
To merge this branch: bzr merge lp:~julian-edwards/launchpad/publish-copy-archives-bug-520520-publish-distro
Reviewer Review Type Date Requested Status
Canonical Launchpad Engineering Pending
Review via email: mp+19987@code.launchpad.net

Commit message

Add support for publishing of copy archives to the publish-distro script.

To post a comment you must log in.
Revision history for this message
Julian Edwards (julian-edwards) wrote :

This is a simple branch that adds support for publishing of copy archives to
the publish-distro script.

It adds the option --copy-archive which will cause all archives with
ArchivePurpose.COPY to be published, provided their "publish" flag is set.

There's also a simple test case added.

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'lib/canonical/launchpad/doc/product-update-remote-product.txt'
2--- lib/canonical/launchpad/doc/product-update-remote-product.txt 2009-06-12 16:36:02 +0000
3+++ lib/canonical/launchpad/doc/product-update-remote-product.txt 2010-02-24 10:59:32 +0000
4@@ -17,12 +17,7 @@
5 ... FakeLogger, QuietFakeLogger)
6 >>> from canonical.launchpad.scripts.updateremoteproduct import (
7 ... RemoteProductUpdater)
8- >>> class FakeTransaction:
9- ... def __init__(self, log_calls=False):
10- ... self.log_calls = log_calls
11- ... def commit(self):
12- ... if self.log_calls:
13- ... print "COMMIT"
14+ >>> from lp.testing.faketransaction import FakeTransaction
15 >>> updater = RemoteProductUpdater(FakeTransaction(), QuietFakeLogger())
16
17
18
19=== modified file 'lib/lp/bugs/doc/bugzilla-import.txt'
20--- lib/lp/bugs/doc/bugzilla-import.txt 2009-06-12 16:36:02 +0000
21+++ lib/lp/bugs/doc/bugzilla-import.txt 2010-02-24 10:59:32 +0000
22@@ -522,12 +522,8 @@
23 The Bugzilla duplicate bugs table can be used to mark the
24 corresponding Launchpad bugs as duplicates too:
25
26- >>> class FakeTransactionManager:
27- ... def begin(self):
28- ... pass
29- ... abort = commit = begin
30- ...
31- >>> bz.processDuplicates(FakeTransactionManager())
32+ >>> from lp.testing.faketransaction import FakeTransaction
33+ >>> bz.processDuplicates(FakeTransaction())
34
35 Now check that the bugs have been marked duplicate:
36
37
38=== modified file 'lib/lp/bugs/doc/externalbugtracker.txt'
39--- lib/lp/bugs/doc/externalbugtracker.txt 2010-02-19 12:05:10 +0000
40+++ lib/lp/bugs/doc/externalbugtracker.txt 2010-02-24 10:59:32 +0000
41@@ -27,17 +27,9 @@
42 ... print "initializeRemoteBugDB() called: %r" % (
43 ... remote_bug_ids, )
44
45- >>> class FakeTransaction:
46- ... """Transaction class to track transaction boundaries."""
47- ... def commit(self):
48- ... print "COMMIT"
49- ... def abort(self):
50- ... print "ABORT"
51- ... def begin(self):
52- ... print "BEGIN"
53-
54+ >>> from lp.testing.faketransaction import FakeTransaction
55 >>> from lp.bugs.scripts.checkwatches import BugWatchUpdater
56- >>> bug_watch_updater = BugWatchUpdater(FakeTransaction())
57+ >>> bug_watch_updater = BugWatchUpdater(FakeTransaction(log_calls=True))
58 >>> bug_watch_updater.updateBugWatches(
59 ... InitializingExternalBugTracker(), [])
60 COMMIT
61@@ -305,7 +297,7 @@
62 and the remote system is never asked about product information.
63
64 >>> bug_watch_updater = BugWatchUpdater(
65- ... FakeTransaction(), syncable_gnome_products=[])
66+ ... FakeTransaction(log_calls=True), syncable_gnome_products=[])
67
68 >>> trackers_and_watches = get_trackers_and_watches(
69 ... gnome_bugzilla, bug_watches)
70@@ -506,16 +498,12 @@
71 ... external_bugtracker, bug_watches)
72 COMMIT
73 initializeRemoteBugDB() called: [u'1', u'2', u'3', u'4']
74- BEGIN
75 getRemoteStatus() called: u'1'
76 COMMIT
77- BEGIN
78 getRemoteStatus() called: u'2'
79 COMMIT
80- BEGIN
81 getRemoteStatus() called: u'3'
82 COMMIT
83- BEGIN
84 getRemoteStatus() called: u'4'
85 COMMIT
86
87@@ -549,14 +537,10 @@
88 last_checked: 2007-03-17 15:...:...
89 getModifiedRemoteBugs() called: [u'1', u'2', u'3', u'4']
90 initializeRemoteBugDB() called: [u'1', u'4']
91- BEGIN
92 getRemoteStatus() called: u'1'
93 COMMIT
94- BEGIN
95 getRemoteStatus() called: u'4'
96 COMMIT
97- BEGIN
98- BEGIN
99
100 The bug watches that are deemed as not being modified are still marked
101 as being checked.
102@@ -604,16 +588,12 @@
103 last_checked: 2007-03-16 15:...:...
104 getModifiedRemoteBugs() called: [u'1', u'4']
105 initializeRemoteBugDB() called: [u'1', u'2', u'3', u'4']
106- BEGIN
107 getRemoteStatus() called: u'1'
108 COMMIT
109- BEGIN
110 getRemoteStatus() called: u'2'
111 COMMIT
112- BEGIN
113 getRemoteStatus() called: u'3'
114 COMMIT
115- BEGIN
116 getRemoteStatus() called: u'4'
117 COMMIT
118
119@@ -631,16 +611,12 @@
120 ... TimeUnknownExternalBugTracker(), bug_watches)
121 COMMIT
122 initializeRemoteBugDB() called: [u'1', u'2', u'3', u'4']
123- BEGIN
124 getRemoteStatus() called: u'1'
125 COMMIT
126- BEGIN
127 getRemoteStatus() called: u'2'
128 COMMIT
129- BEGIN
130 getRemoteStatus() called: u'3'
131 COMMIT
132- BEGIN
133 getRemoteStatus() called: u'4'
134 COMMIT
135
136
137=== modified file 'lib/lp/code/browser/branch.py'
138--- lib/lp/code/browser/branch.py 2010-02-24 10:18:16 +0000
139+++ lib/lp/code/browser/branch.py 2010-02-24 10:59:32 +0000
140@@ -514,10 +514,10 @@
141
142 def iconForCodeImportResultStatus(self, status):
143 """The icon to represent the `CodeImportResultStatus` `status`."""
144- if status in CodeImportResultStatus.successes:
145+ if status == CodeImportResultStatus.SUCCESS_PARTIAL:
146+ return "/@@/yes-gray"
147+ elif status in CodeImportResultStatus.successes:
148 return "/@@/yes"
149- elif status == CodeImportResultStatus.SUCCESS_PARTIAL:
150- return "/@@/yes-gray"
151 else:
152 return "/@@/no"
153
154
155=== modified file 'lib/lp/code/doc/xmlrpc-branch-puller.txt'
156--- lib/lp/code/doc/xmlrpc-branch-puller.txt 2009-10-22 11:55:51 +0000
157+++ lib/lp/code/doc/xmlrpc-branch-puller.txt 2010-02-24 10:59:32 +0000
158@@ -28,19 +28,4 @@
159 True
160
161 The IBranchPuller interface defines some methods, for which see the unit
162-tests. To allow a minimal test here, we call getBranchPullQueue,
163-which will return an empty list.
164-
165- >>> from lp.code.enums import BranchType
166- >>> branch_puller.getBranchPullQueue(BranchType.HOSTED.name)
167- []
168-
169-This remains true when it is accessed over XMLRPC.
170-
171- >>> import xmlrpclib
172- >>> from canonical.functional import XMLRPCTestTransport
173- >>> puller = xmlrpclib.ServerProxy(
174- ... 'http://xmlrpc-private.launchpad.dev:8087/branch_puller',
175- ... transport=XMLRPCTestTransport())
176- >>> puller.getBranchPullQueue(BranchType.HOSTED.name)
177- []
178+tests.
179
180=== modified file 'lib/lp/code/enums.py'
181--- lib/lp/code/enums.py 2010-02-17 04:28:48 +0000
182+++ lib/lp/code/enums.py 2010-02-24 10:59:32 +0000
183@@ -871,7 +871,7 @@
184 job, or the deletion of a CodeImport which had a running job.
185 """)
186
187- successes = [SUCCESS, SUCCESS_NOCHANGE]
188+ successes = [SUCCESS, SUCCESS_NOCHANGE, SUCCESS_PARTIAL]
189
190
191 class CodeReviewVote(DBEnumeratedType):
192
193=== modified file 'lib/lp/code/interfaces/branchpuller.py'
194--- lib/lp/code/interfaces/branchpuller.py 2009-06-30 16:56:07 +0000
195+++ lib/lp/code/interfaces/branchpuller.py 2010-02-24 10:59:32 +0000
196@@ -23,12 +23,6 @@
197 MIRROR_TIME_INCREMENT = Attribute(
198 "How frequently we mirror branches.")
199
200- def getPullQueue(branch_type):
201- """Return a queue of branches to mirror using the puller.
202-
203- :param branch_type: A value from the `BranchType` enum.
204- """
205-
206 def acquireBranchToPull():
207 """Return a Branch to pull and mark it as mirror-started.
208
209
210=== modified file 'lib/lp/code/interfaces/codehosting.py'
211--- lib/lp/code/interfaces/codehosting.py 2009-06-25 04:06:00 +0000
212+++ lib/lp/code/interfaces/codehosting.py 2010-02-24 10:59:32 +0000
213@@ -58,20 +58,6 @@
214 Published at 'branch_puller' on the private XML-RPC server.
215 """
216
217- def getBranchPullQueue(branch_type):
218- """Get the list of branches to be mirrored.
219-
220- :param branch_type: One of 'HOSTED', 'MIRRORED', or 'IMPORTED'.
221-
222- :raise UnknownBranchTypeError: if the branch type is unrecognized.
223-
224- :returns: a list of (branch_id, pull_url, unique_name, default_branch)
225- 4-tuples. branch_id is the database id of the branch, pull_url is
226- where to pull from, unique_name is the unique_name of the branch
227- and default_branch is the default stacked on branch for the
228- branch's target.
229- """
230-
231 def acquireBranchToPull():
232 """Return a Branch to pull and mark it as mirror-started.
233
234
235=== modified file 'lib/lp/code/model/branchpuller.py'
236--- lib/lp/code/model/branchpuller.py 2009-08-04 05:14:32 +0000
237+++ lib/lp/code/model/branchpuller.py 2010-02-24 10:59:32 +0000
238@@ -9,17 +9,13 @@
239
240 from datetime import timedelta
241
242-from storm.expr import LeftJoin, Join
243 from zope.component import getUtility
244 from zope.interface import implements
245
246 from canonical.database.constants import UTC_NOW
247 from lp.code.enums import BranchType
248 from lp.code.model.branch import Branch
249-from lp.code.interfaces.branch import BranchTypeError
250 from lp.code.interfaces.branchpuller import IBranchPuller
251-from lp.registry.model.person import Owner
252-from lp.registry.model.product import Product
253 from canonical.launchpad.webapp.interfaces import (
254 IStoreSelector, MAIN_STORE, DEFAULT_FLAVOR)
255
256@@ -32,24 +28,6 @@
257 MAXIMUM_MIRROR_FAILURES = 5
258 MIRROR_TIME_INCREMENT = timedelta(hours=6)
259
260- def getPullQueue(self, branch_type):
261- """See `IBranchPuller`."""
262- if branch_type == BranchType.REMOTE:
263- raise BranchTypeError("No pull queue for REMOTE branches.")
264- store = getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR)
265- # Prejoin on owner and product to preserve existing behaviour.
266- # XXX: JonathanLange 2009-03-22 spec=package-branches: This prejoin is
267- # inappropriate in the face of package branches.
268- prejoin = store.using(
269- Branch,
270- LeftJoin(Product, Branch.product == Product.id),
271- Join(Owner, Branch.owner == Owner.id))
272- return prejoin.find(
273- Branch,
274- Branch.branch_type == branch_type,
275- Branch.next_mirror_time <= UTC_NOW).order_by(
276- Branch.next_mirror_time)
277-
278 def acquireBranchToPull(self):
279 """See `IBranchPuller`."""
280 store = getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR)
281
282=== modified file 'lib/lp/code/model/tests/test_branchjob.py'
283--- lib/lp/code/model/tests/test_branchjob.py 2010-02-22 12:26:18 +0000
284+++ lib/lp/code/model/tests/test_branchjob.py 2010-02-24 10:59:32 +0000
285@@ -867,22 +867,6 @@
286 self.assertFalse(job.generateDiffs())
287
288
289-def all_dirs(directory):
290- """Generate all parent directories and the directory itself.
291-
292- Passing 'a/b/c/d' produces ['a', 'a/b', 'a/b/c', 'a/b/c/d'].
293- """
294- if directory == '':
295- return []
296- dirs = [directory]
297- while(1):
298- head, tail = os.path.split(directory)
299- if head == '':
300- return reversed(dirs)
301- directory = head
302- dirs.append(directory)
303-
304-
305 class TestRosettaUploadJob(TestCaseWithFactory):
306 """Tests for RosettaUploadJob."""
307
308@@ -937,21 +921,18 @@
309 seen_dirs = set()
310 for file_pair in files:
311 file_name = file_pair[0]
312- dname, fname = os.path.split(file_name)
313- for adir in all_dirs(dname):
314- if adir in seen_dirs:
315- continue
316- self.tree.bzrdir.root_transport.mkdir(adir)
317- self.tree.add(adir)
318- seen_dirs.add(adir)
319 try:
320 file_content = file_pair[1]
321 if file_content is None:
322 raise IndexError # Same as if missing.
323 except IndexError:
324 file_content = self.factory.getUniqueString()
325+ dname = os.path.dirname(file_name)
326+ self.tree.bzrdir.root_transport.clone(dname).create_prefix()
327 self.tree.bzrdir.root_transport.put_bytes(file_name, file_content)
328- self.tree.add(file_name)
329+ if len(files) > 0:
330+ self.tree.smart_add(
331+ [self.tree.abspath(file_pair[0]) for file_pair in files])
332 if commit_message is None:
333 commit_message = self.factory.getUniqueString('commit')
334 revision_id = self.tree.commit(commit_message)
335
336=== modified file 'lib/lp/code/model/tests/test_branchpuller.py'
337--- lib/lp/code/model/tests/test_branchpuller.py 2009-09-28 23:51:54 +0000
338+++ lib/lp/code/model/tests/test_branchpuller.py 2010-02-24 10:59:32 +0000
339@@ -17,7 +17,6 @@
340 from canonical.database.constants import UTC_NOW
341 from canonical.testing.layers import DatabaseFunctionalLayer
342 from lp.code.enums import BranchType
343-from lp.code.interfaces.branch import BranchTypeError
344 from lp.code.interfaces.branchpuller import IBranchPuller
345 from lp.testing import TestCaseWithFactory, login_person
346
347@@ -78,37 +77,6 @@
348 branch.requestMirror()
349 self.assertEqual(UTC_NOW, branch.next_mirror_time)
350
351- def test_requestMirrorDuringPull(self):
352- """Branches can have mirrors requested while they are being mirrored.
353- If so, they should not be removed from the pull queue when the mirror
354- is complete.
355- """
356- # We run these in separate transactions so as to have the times set to
357- # different values. This is closer to what happens in production.
358- branch = self.makeAnyBranch()
359- branch.startMirroring()
360- self.assertEqual(
361- [], list(self.branch_puller.getPullQueue(branch.branch_type)))
362- branch.requestMirror()
363- self.assertEqual(
364- [branch],
365- list(self.branch_puller.getPullQueue(branch.branch_type)))
366- branch.mirrorComplete('rev1')
367- self.assertEqual(
368- [branch],
369- list(self.branch_puller.getPullQueue(branch.branch_type)))
370-
371- def test_startMirroringRemovesFromPullQueue(self):
372- # Starting a mirror removes the branch from the pull queue.
373- branch = self.makeAnyBranch()
374- branch.requestMirror()
375- self.assertEqual(
376- set([branch]),
377- set(self.branch_puller.getPullQueue(branch.branch_type)))
378- branch.startMirroring()
379- self.assertEqual(
380- set(), set(self.branch_puller.getPullQueue(branch.branch_type)))
381-
382 def test_mirroringResetsMirrorRequest(self):
383 """Mirroring branches resets their mirror request times."""
384 branch = self.makeAnyBranch()
385@@ -129,44 +97,6 @@
386 self.assertEqual(1, branch.mirror_failures)
387 self.assertEqual(None, branch.next_mirror_time)
388
389- def test_pullQueueEmpty(self):
390- """Branches with no next_mirror_time are not in the pull queue."""
391- branch = self.makeAnyBranch()
392- self.assertIs(None, branch.next_mirror_time)
393- self.assertEqual(
394- [], list(self.branch_puller.getPullQueue(self.branch_type)))
395-
396- def test_pastNextMirrorTimeInQueue(self):
397- """Branches with next_mirror_time in the past are mirrored."""
398- transaction.begin()
399- branch = self.makeAnyBranch()
400- branch.requestMirror()
401- queue = self.branch_puller.getPullQueue(branch.branch_type)
402- self.assertEqual([branch], list(queue))
403-
404- def test_futureNextMirrorTimeInQueue(self):
405- """Branches with next_mirror_time in the future are not mirrored."""
406- transaction.begin()
407- branch = removeSecurityProxy(self.makeAnyBranch())
408- tomorrow = self.getNow() + timedelta(1)
409- branch.next_mirror_time = tomorrow
410- branch.syncUpdate()
411- transaction.commit()
412- self.assertEqual(
413- [], list(self.branch_puller.getPullQueue(branch.branch_type)))
414-
415- def test_pullQueueOrder(self):
416- """Pull queue has the oldest mirror request times first."""
417- branches = []
418- for i in range(3):
419- branch = removeSecurityProxy(self.makeAnyBranch())
420- branch.next_mirror_time = self.getNow() - timedelta(hours=i+1)
421- branch.sync()
422- branches.append(branch)
423- self.assertEqual(
424- list(reversed(branches)),
425- list(self.branch_puller.getPullQueue(self.branch_type)))
426-
427
428 class TestMirroringForMirroredBranches(TestMirroringForHostedBranches):
429
430@@ -231,18 +161,6 @@
431 branch_type = BranchType.IMPORTED
432
433
434-class TestRemoteBranches(TestCaseWithFactory):
435-
436- layer = DatabaseFunctionalLayer
437-
438- def test_raises_branch_type_error(self):
439- # getPullQueue raises `BranchTypeError` if passed BranchType.REMOTE.
440- # It's impossible to mirror remote branches, so we shouldn't even try.
441- puller = getUtility(IBranchPuller)
442- self.assertRaises(
443- BranchTypeError, puller.getPullQueue, BranchType.REMOTE)
444-
445-
446 class AcquireBranchToPullTests:
447 """Tests for acquiring branches to pull.
448
449
450=== modified file 'lib/lp/code/model/tests/test_codeimport.py'
451--- lib/lp/code/model/tests/test_codeimport.py 2010-02-22 10:31:50 +0000
452+++ lib/lp/code/model/tests/test_codeimport.py 2010-02-24 10:59:32 +0000
453@@ -432,6 +432,15 @@
454 code_import, CodeImportResultStatus.SUCCESS_NOCHANGE)
455 self.assertEqual(0, code_import.consecutive_failure_count)
456
457+ def test_consecutive_failure_count_succeed_succeed_partial(self):
458+ # A code import that has succeeded then succeeded with no changes has
459+ # a consecutive_failure_count of 0.
460+ code_import = self.factory.makeCodeImport()
461+ self.succeedImport(code_import)
462+ self.succeedImport(
463+ code_import, CodeImportResultStatus.SUCCESS_NOCHANGE)
464+ self.assertEqual(0, code_import.consecutive_failure_count)
465+
466 def test_consecutive_failure_count_fail_fail(self):
467 # A code import that has failed twice has a consecutive_failure_count
468 # of 2.
469
470=== modified file 'lib/lp/code/model/tests/test_codeimportjob.py'
471--- lib/lp/code/model/tests/test_codeimportjob.py 2010-02-24 10:18:16 +0000
472+++ lib/lp/code/model/tests/test_codeimportjob.py 2010-02-24 10:59:32 +0000
473@@ -946,7 +946,8 @@
474 code_import = job.code_import
475 self.assertTrue(code_import.date_last_successful is None)
476 getUtility(ICodeImportJobWorkflow).finishJob(job, status, None)
477- if status in CodeImportResultStatus.successes:
478+ if status in [CodeImportResultStatus.SUCCESS,
479+ CodeImportResultStatus.SUCCESS_NOCHANGE]:
480 self.assertTrue(code_import.date_last_successful is not None)
481 else:
482 self.assertTrue(code_import.date_last_successful is None)
483
484=== modified file 'lib/lp/code/xmlrpc/codehosting.py'
485--- lib/lp/code/xmlrpc/codehosting.py 2009-11-23 22:39:21 +0000
486+++ lib/lp/code/xmlrpc/codehosting.py 2010-02-24 10:59:32 +0000
487@@ -12,7 +12,6 @@
488
489
490 import datetime
491-import urllib
492
493 import pytz
494
495@@ -25,8 +24,7 @@
496
497 from canonical.launchpad.ftests import login_person, logout
498 from lp.code.enums import BranchType
499-from lp.code.interfaces.branch import (
500- BranchCreationException, UnknownBranchTypeError)
501+from lp.code.interfaces.branch import BranchCreationException
502 from lp.code.interfaces.branchlookup import IBranchLookup
503 from lp.code.interfaces.branchnamespace import (
504 InvalidNamespace, lookup_branch_namespace, split_unique_name)
505@@ -56,46 +54,6 @@
506
507 implements(IBranchPuller)
508
509- def _getBranchPullInfo(self, branch):
510- """Return information the branch puller needs to pull this branch.
511-
512- This is outside of the IBranch interface so that the authserver can
513- access the information without logging in as a particular user.
514-
515- :return: (id, url, unique_name, default_stacked_on_url), where 'id'
516- is the branch database ID, 'url' is the URL to pull from,
517- 'unique_name' is the `unique_name` property and
518- 'default_stacked_on_url' is the URL of the branch to stack on by
519- default (normally of the form '/~foo/bar/baz'). If there is no
520- default stacked-on branch, then it's ''.
521- """
522- branch = removeSecurityProxy(branch)
523- if branch.branch_type == BranchType.REMOTE:
524- raise AssertionError(
525- 'Remote branches should never be in the pull queue.')
526- default_branch = branch.target.default_stacked_on_branch
527- if default_branch is None:
528- default_branch = ''
529- elif (branch.branch_type == BranchType.MIRRORED
530- and default_branch.private):
531- default_branch = ''
532- else:
533- default_branch = '/' + default_branch.unique_name
534- return (
535- branch.id, branch.getPullURL(), branch.unique_name,
536- default_branch)
537-
538- def getBranchPullQueue(self, branch_type):
539- """See `IBranchPuller`."""
540- try:
541- branch_type = BranchType.items[branch_type]
542- except KeyError:
543- raise UnknownBranchTypeError(
544- 'Unknown branch type: %r' % (branch_type,))
545- branches = getUtility(branchpuller.IBranchPuller).getPullQueue(
546- branch_type)
547- return [self._getBranchPullInfo(branch) for branch in branches]
548-
549 def acquireBranchToPull(self):
550 """See `IBranchPuller`."""
551 branch = getUtility(branchpuller.IBranchPuller).acquireBranchToPull()
552
553=== modified file 'lib/lp/code/xmlrpc/tests/test_codehosting.py'
554--- lib/lp/code/xmlrpc/tests/test_codehosting.py 2010-02-18 03:11:03 +0000
555+++ lib/lp/code/xmlrpc/tests/test_codehosting.py 2010-02-24 10:59:32 +0000
556@@ -399,105 +399,6 @@
557 self.assertFaultEqual(faults.NoBranchWithID(branch_id), fault)
558
559
560-class BranchPullQueueTest(TestCaseWithFactory):
561- """Tests for the pull queue methods of `IBranchPuller`."""
562-
563- def setUp(self):
564- super(BranchPullQueueTest, self).setUp()
565- frontend = self.frontend()
566- self.storage = frontend.getPullerEndpoint()
567- self.factory = frontend.getLaunchpadObjectFactory()
568-
569- def assertBranchQueues(self, hosted, mirrored, imported):
570- expected_hosted = [
571- self.storage._getBranchPullInfo(branch) for branch in hosted]
572- expected_mirrored = [
573- self.storage._getBranchPullInfo(branch) for branch in mirrored]
574- expected_imported = [
575- self.storage._getBranchPullInfo(branch) for branch in imported]
576- self.assertEqual(
577- expected_hosted, self.storage.getBranchPullQueue('HOSTED'))
578- self.assertEqual(
579- expected_mirrored, self.storage.getBranchPullQueue('MIRRORED'))
580- self.assertEqual(
581- expected_imported, self.storage.getBranchPullQueue('IMPORTED'))
582-
583- def test_pullQueuesEmpty(self):
584- """getBranchPullQueue returns an empty list when there are no branches
585- to pull.
586- """
587- self.assertBranchQueues([], [], [])
588-
589- def makeBranchAndRequestMirror(self, branch_type):
590- """Make a branch of the given type and call requestMirror on it."""
591- branch = self.factory.makeAnyBranch(branch_type=branch_type)
592- branch.requestMirror()
593- # The pull queues contain branches that have next_mirror_time strictly
594- # in the past, but requestMirror sets this field to UTC_NOW, so we
595- # push the time back slightly here to get the branch to show up in the
596- # queue.
597- naked_branch = removeSecurityProxy(branch)
598- naked_branch.next_mirror_time -= datetime.timedelta(seconds=1)
599- return branch
600-
601- def test_getBranchPullInfo_no_default_stacked_branch(self):
602- # If there's no default stacked branch for the project that a branch
603- # is on, then _getBranchPullInfo returns (id, url, unique_name, '').
604- branch = self.factory.makeAnyBranch()
605- info = self.storage._getBranchPullInfo(branch)
606- self.assertEqual(
607- (branch.id, branch.getPullURL(), branch.unique_name, ''), info)
608-
609- def test_getBranchPullInfo_default_stacked_branch(self):
610- # If there's a default stacked branch for the project that a branch is
611- # on, then _getBranchPullInfo returns (id, url, unique_name,
612- # default_branch_unique_name).
613- product = self.factory.makeProduct()
614- default_branch = self.factory.enableDefaultStackingForProduct(product)
615- branch = self.factory.makeProductBranch(product=product)
616- info = self.storage._getBranchPullInfo(branch)
617- self.assertEqual(
618- (branch.id, branch.getPullURL(), branch.unique_name,
619- '/' + default_branch.unique_name), info)
620-
621- def test_getBranchPullInfo_private_branch(self):
622- # We don't want to stack mirrored branches onto private branches:
623- # mirrored branches are public by their nature. Thus, if the default
624- # stacked-on branch for the project is private and the branch is
625- # MIRRORED then we don't include the default stacked-on branch's
626- # details in the tuple.
627- product = self.factory.makeProduct()
628- default_branch = self.factory.makeProductBranch(
629- product=product, private=True)
630- self.factory.enableDefaultStackingForProduct(product, default_branch)
631- mirrored_branch = self.factory.makeProductBranch(
632- branch_type=BranchType.MIRRORED, product=product)
633- info = self.storage._getBranchPullInfo(mirrored_branch)
634- self.assertEqual(
635- (mirrored_branch.id, mirrored_branch.getPullURL(),
636- mirrored_branch.unique_name, ''), info)
637-
638- def test_getBranchPullInfo_junk(self):
639- # _getBranchPullInfo returns (id, url, unique_name, '') for junk
640- # branches.
641- branch = self.factory.makePersonalBranch()
642- info = self.storage._getBranchPullInfo(branch)
643- self.assertEqual(
644- (branch.id, branch.getPullURL(), branch.unique_name, ''), info)
645-
646- def test_requestMirrorPutsBranchInQueue_hosted(self):
647- branch = self.makeBranchAndRequestMirror(BranchType.HOSTED)
648- self.assertBranchQueues([branch], [], [])
649-
650- def test_requestMirrorPutsBranchInQueue_mirrored(self):
651- branch = self.makeBranchAndRequestMirror(BranchType.MIRRORED)
652- self.assertBranchQueues([], [branch], [])
653-
654- def test_requestMirrorPutsBranchInQueue_imported(self):
655- branch = self.makeBranchAndRequestMirror(BranchType.IMPORTED)
656- self.assertBranchQueues([], [], [branch])
657-
658-
659 class AcquireBranchToPullTestsViaEndpoint(TestCaseWithFactory,
660 AcquireBranchToPullTests):
661 """Tests for `acquireBranchToPull` method of `IBranchPuller`."""
662@@ -1175,7 +1076,6 @@
663 suite = unittest.TestSuite()
664 puller_tests = unittest.TestSuite(
665 [loader.loadTestsFromTestCase(BranchPullerTest),
666- loader.loadTestsFromTestCase(BranchPullQueueTest),
667 loader.loadTestsFromTestCase(AcquireBranchToPullTestsViaEndpoint),
668 loader.loadTestsFromTestCase(BranchFileSystemTest),
669 ])
670
671=== modified file 'lib/lp/codehosting/codeimport/tests/test_worker.py'
672--- lib/lp/codehosting/codeimport/tests/test_worker.py 2010-02-22 05:37:36 +0000
673+++ lib/lp/codehosting/codeimport/tests/test_worker.py 2010-02-24 10:59:32 +0000
674@@ -254,6 +254,31 @@
675 store._getMirrorURL(self.arbitrary_branch_id),
676 sftp_prefix_noslash + '/' + '%08x' % self.arbitrary_branch_id)
677
678+ def test_all_revisions_saved(self):
679+ # All revisions in the branch's repo are transferred, not just those
680+ # in the ancestry of the tip.
681+ # Consider a branch with two heads in its repo:
682+ # revid
683+ # / \
684+ # revid1 revid2 <- branch tip
685+ # A naive push/pull would just store 'revid' and 'revid2' in the
686+ # branch store -- we need to make sure all three revisions are stored
687+ # and retrieved.
688+ builder = self.make_branch_builder('tree')
689+ revid = builder.build_snapshot(
690+ None, None, [('add', ('', 'root-id', 'directory', ''))])
691+ revid1 = builder.build_snapshot(None, [revid], [])
692+ revid2 = builder.build_snapshot(None, [revid], [])
693+ branch = builder.get_branch()
694+ source_tree = branch.bzrdir.create_workingtree()
695+ store = self.makeBranchStore()
696+ store.push(self.arbitrary_branch_id, source_tree, default_format)
697+ retrieved_tree = store.pull(
698+ self.arbitrary_branch_id, 'pulled', default_format)
699+ self.assertEqual(
700+ set([revid, revid1, revid2]),
701+ set(retrieved_tree.branch.repository.all_revision_ids()))
702+
703
704 class TestImportDataStore(WorkerTest):
705 """Tests for `ImportDataStore`."""
706
707=== modified file 'lib/lp/codehosting/codeimport/worker.py'
708--- lib/lp/codehosting/codeimport/worker.py 2010-02-19 03:32:39 +0000
709+++ lib/lp/codehosting/codeimport/worker.py 2010-02-24 10:59:32 +0000
710@@ -71,20 +71,26 @@
711 """
712 remote_url = self._getMirrorURL(db_branch_id)
713 try:
714- bzr_dir = BzrDir.open(remote_url)
715+ remote_bzr_dir = BzrDir.open(remote_url)
716 except NotBranchError:
717 return BzrDir.create_standalone_workingtree(
718 target_path, required_format)
719 # XXX Tim Penhey 2009-09-18 bug 432217 Automatic upgrade of import
720 # branches disabled. Need an orderly upgrade process.
721- if False and bzr_dir.needs_format_conversion(format=required_format):
722+ if False and remote_bzr_dir.needs_format_conversion(
723+ format=required_format):
724 try:
725- bzr_dir.root_transport.delete_tree('backup.bzr')
726+ remote_bzr_dir.root_transport.delete_tree('backup.bzr')
727 except NoSuchFile:
728 pass
729 upgrade(remote_url, required_format)
730- bzr_dir.sprout(target_path)
731- return BzrDir.open(target_path).open_workingtree()
732+ local_bzr_dir = remote_bzr_dir.sprout(target_path)
733+ # Because of the way we do incremental imports, there may be revisions
734+ # in the branch's repo that are not in the ancestry of the branch tip.
735+ # We need to transfer them too.
736+ local_bzr_dir.open_repository().fetch(
737+ remote_bzr_dir.open_repository())
738+ return local_bzr_dir.open_workingtree()
739
740 def push(self, db_branch_id, bzr_tree, required_format):
741 """Push up `bzr_tree` as the Bazaar branch for `code_import`.
742@@ -101,6 +107,10 @@
743 branch_to = BzrDir.create_branch_and_repo(
744 target_url, format=required_format)
745 pull_result = branch_to.pull(branch_from, overwrite=True)
746+ # Because of the way we do incremental imports, there may be revisions
747+ # in the branch's repo that are not in the ancestry of the branch tip.
748+ # We need to transfer them too.
749+ branch_to.repository.fetch(branch_from.repository)
750 return pull_result.old_revid != pull_result.new_revid
751
752
753
754=== modified file 'lib/lp/codehosting/inmemory.py'
755--- lib/lp/codehosting/inmemory.py 2010-02-19 03:06:12 +0000
756+++ lib/lp/codehosting/inmemory.py 2010-02-24 10:59:32 +0000
757@@ -442,29 +442,6 @@
758 self._branch_set = branch_set
759 self._script_activity_set = script_activity_set
760
761- def _getBranchPullInfo(self, branch):
762- default_branch = ''
763- if branch.product is not None:
764- series = branch.product.development_focus
765- user_branch = series.branch
766- if (user_branch is not None
767- and not (
768- user_branch.private
769- and branch.branch_type == BranchType.MIRRORED)):
770- default_branch = '/' + user_branch.unique_name
771- return (
772- branch.id, branch.getPullURL(), branch.unique_name,
773- default_branch)
774-
775- def getBranchPullQueue(self, branch_type):
776- queue = []
777- branch_type = BranchType.items[branch_type]
778- for branch in self._branch_set:
779- if (branch.branch_type == branch_type
780- and branch.next_mirror_time < UTC_NOW):
781- queue.append(self._getBranchPullInfo(branch))
782- return queue
783-
784 def acquireBranchToPull(self):
785 branches = sorted(
786 [branch for branch in self._branch_set
787
788=== modified file 'lib/lp/soyuz/scripts/publishdistro.py'
789--- lib/lp/soyuz/scripts/publishdistro.py 2009-06-25 04:06:00 +0000
790+++ lib/lp/soyuz/scripts/publishdistro.py 2010-02-24 10:59:32 +0000
791@@ -69,6 +69,11 @@
792 dest="partner", metavar="PARTNER", default=False,
793 help="Run only over the partner archive.")
794
795+ parser.add_option("--copy-archive", action="store_true",
796+ dest="copy_archive", metavar="COPYARCHIVE",
797+ default=False,
798+ help="Run only over the copy archives.")
799+
800 parser.add_option(
801 "--primary-debug", action="store_true", default=False,
802 dest="primary_debug", metavar="PRIMARYDEBUG",
803@@ -103,12 +108,13 @@
804
805 exclusive_options = (
806 options.partner, options.ppa, options.private_ppa,
807- options.primary_debug)
808+ options.primary_debug, options.copy_archive)
809+
810 num_exclusive = [flag for flag in exclusive_options if flag]
811 if len(num_exclusive) > 1:
812 raise LaunchpadScriptFailure(
813- "Can only specify one of partner, ppa, private-ppa and "
814- "primary-debug.")
815+ "Can only specify one of partner, ppa, private-ppa, copy-archive"
816+ " and primary-debug.")
817
818 log.debug(" Distribution: %s" % options.distribution)
819 log.debug(" Publishing: %s" % careful_msg(options.careful_publishing))
820@@ -161,6 +167,13 @@
821 raise LaunchpadScriptFailure(
822 "Could not find DEBUG archive for %s" % distribution.name)
823 archives = [debug_archive]
824+ elif options.copy_archive:
825+ archives = getUtility(IArchiveSet).getArchivesForDistribution(
826+ distribution, purposes=[ArchivePurpose.COPY])
827+ # Fix this to use bool when Storm fixes __nonzero__ on sqlobj
828+ # result sets.
829+ if archives.count() == 0:
830+ raise LaunchpadScriptFailure("Could not find any COPY archives")
831 else:
832 archives = [distribution.main_archive]
833
834@@ -185,9 +198,9 @@
835 try_and_commit("dominating", publisher.B_dominate,
836 options.careful or options.careful_domination)
837
838- # The primary archive uses apt-ftparchive to generate the indexes,
839- # everything else uses the newer internal LP code.
840- if archive.purpose == ArchivePurpose.PRIMARY:
841+ # The primary and copy archives use apt-ftparchive to generate the
842+ # indexes, everything else uses the newer internal LP code.
843+ if archive.purpose in (ArchivePurpose.PRIMARY, ArchivePurpose.COPY):
844 try_and_commit("doing apt-ftparchive", publisher.C_doFTPArchive,
845 options.careful or options.careful_apt)
846 else:
847
848=== modified file 'lib/lp/soyuz/scripts/tests/test_publishdistro.py'
849--- lib/lp/soyuz/scripts/tests/test_publishdistro.py 2010-01-11 05:01:32 +0000
850+++ lib/lp/soyuz/scripts/tests/test_publishdistro.py 2010-02-24 10:59:32 +0000
851@@ -308,6 +308,47 @@
852 self.assertEqual(
853 open(debug_index_path).readlines()[0], 'Package: foo-bin\n')
854
855+ def testPublishCopyArchive(self):
856+ """Run publish-distro in copy archive mode.
857+
858+ It should only publish copy archives.
859+ """
860+ ubuntutest = getUtility(IDistributionSet)['ubuntutest']
861+ cprov = getUtility(IPersonSet).getByName('cprov')
862+ copy_archive_name = 'test-copy-publish'
863+
864+ # The COPY repository path is not created yet.
865+ repo_path = os.path.join(
866+ config.archivepublisher.root,
867+ ubuntutest.name + '-' + copy_archive_name)
868+ self.assertNotExists(repo_path)
869+
870+ copy_archive = getUtility(IArchiveSet).new(
871+ distribution=ubuntutest, owner=cprov, name=copy_archive_name,
872+ purpose=ArchivePurpose.COPY, enabled=True)
873+ # Save some test CPU cycles by avoiding logging in as the user
874+ # necessary to alter the publish flag.
875+ removeSecurityProxy(copy_archive).publish = True
876+
877+ # Publish something.
878+ pub_source = self.getPubSource(
879+ sourcename='baz', filecontent='baz', archive=copy_archive)
880+
881+ # Try a plain PPA run, to ensure the copy archive is not published.
882+ self.runPublishDistro(['--ppa'])
883+
884+ self.assertEqual(pub_source.status, PackagePublishingStatus.PENDING)
885+
886+ # Now publish the copy archives and make sure they are really
887+ # published.
888+ self.runPublishDistro(['--copy-archive'])
889+
890+ self.assertEqual(pub_source.status, PackagePublishingStatus.PUBLISHED)
891+
892+ # Make sure that the files were published in the right place.
893+ pool_path = os.path.join(repo_path, 'pool/main/b/baz/baz_666.dsc')
894+ self.assertExists(pool_path)
895+
896 def testRunWithEmptySuites(self):
897 """Try a publish-distro run on empty suites in careful_apt mode
898
899@@ -347,7 +388,8 @@
900 """Test that some command line options are mutually exclusive."""
901 self.assertRaises(
902 LaunchpadScriptFailure,
903- self.runPublishDistro, ['--ppa', '--partner', '--primary-debug'])
904+ self.runPublishDistro,
905+ ['--ppa', '--partner', '--primary-debug', '--copy-archive'])
906 self.assertRaises(
907 LaunchpadScriptFailure,
908 self.runPublishDistro, ['--ppa', '--partner'])
909@@ -359,10 +401,19 @@
910 self.runPublishDistro, ['--ppa', '--primary-debug'])
911 self.assertRaises(
912 LaunchpadScriptFailure,
913+ self.runPublishDistro, ['--ppa', '--copy-archive'])
914+ self.assertRaises(
915+ LaunchpadScriptFailure,
916 self.runPublishDistro, ['--partner', '--private-ppa'])
917 self.assertRaises(
918 LaunchpadScriptFailure,
919 self.runPublishDistro, ['--partner', '--primary-debug'])
920+ self.assertRaises(
921+ LaunchpadScriptFailure,
922+ self.runPublishDistro, ['--partner', '--copy-archive'])
923+ self.assertRaises(
924+ LaunchpadScriptFailure,
925+ self.runPublishDistro, ['--primary-debug', '--copy-archive'])
926
927
928 def test_suite():
929
930=== added file 'lib/lp/testing/faketransaction.py'
931--- lib/lp/testing/faketransaction.py 1970-01-01 00:00:00 +0000
932+++ lib/lp/testing/faketransaction.py 2010-02-24 10:59:32 +0000
933@@ -0,0 +1,35 @@
934+# Copyright 2010 Canonical Ltd. This software is licensed under the
935+# GNU Affero General Public License version 3 (see the file LICENSE).
936+
937+"""Fake transaction manager."""
938+
939+__metaclass__ = type
940+__all__ = ['FakeTransaction']
941+
942+
943+class FakeTransaction:
944+ """Fake transaction manager.
945+
946+ Use this instead of `transaction` (or the old Zopeless transaction
947+ manager) in tests if you don't really want to commit anything.
948+
949+ Set `log_calls` to True to enable printing of commits and aborts.
950+ """
951+ def __init__(self, log_calls=False):
952+ self.log_calls = log_calls
953+
954+ def _log(self, call):
955+ """Print calls that are being made, if desired."""
956+ if self.log_calls:
957+ print call
958+
959+ def begin(self):
960+ """Pretend to begin a transaction. Does not log."""
961+
962+ def commit(self):
963+ """Pretend to commit."""
964+ self._log("COMMIT")
965+
966+ def abort(self):
967+ """Pretend to roll back."""
968+ self._log("ABORT")
969
970=== modified file 'lib/lp/translations/browser/poexportrequest.py'
971--- lib/lp/translations/browser/poexportrequest.py 2009-07-17 00:26:05 +0000
972+++ lib/lp/translations/browser/poexportrequest.py 2010-02-24 10:59:32 +0000
973@@ -7,10 +7,13 @@
974 __all__ = ['BaseExportView']
975
976
977+from datetime import timedelta
978+
979 from zope.component import getUtility
980
981 from canonical.cachedproperty import cachedproperty
982 from canonical.launchpad import _
983+from canonical.launchpad.webapp.tales import DurationFormatterAPI
984 from lp.translations.interfaces.poexportrequest import (
985 IPOExportRequestSet)
986 from lp.translations.interfaces.potemplate import (
987@@ -29,6 +32,38 @@
988 def uses_translations(self):
989 return self.context.has_current_translation_templates
990
991+ @property
992+ def export_queue_status(self):
993+ """Summary of queue status."""
994+ queue_size = self.request_set.entry_count
995+ estimated_backlog = self.request_set.estimateBacklog()
996+
997+ size_text = self.describeQueueSize(queue_size)
998+ backlog_text = self.describeBacklog(estimated_backlog)
999+
1000+ return " ".join((size_text, backlog_text))
1001+
1002+ def describeQueueSize(self, queue_size):
1003+ """Return string describing the given queue size."""
1004+ if queue_size == 0:
1005+ return "The export queue is currently empty."
1006+ elif queue_size == 1:
1007+ return "There is 1 file request on the export queue."
1008+ else:
1009+ return (
1010+ "There are %d file requests on the export queue."
1011+ % queue_size)
1012+
1013+ def describeBacklog(self, estimated_backlog):
1014+ """Return string describing the current export backlog."""
1015+ threshold = timedelta(minutes=10)
1016+ if estimated_backlog is None or estimated_backlog < threshold:
1017+ return ""
1018+
1019+ formatter = DurationFormatterAPI(estimated_backlog)
1020+ time_string = formatter.approximateduration()
1021+ return "The backlog is approximately %s." % time_string
1022+
1023 def getDefaultFormat(self):
1024 """Overridable: return default file format to use for the export."""
1025 if not IHasTranslationTemplates.providedBy(self.context):
1026
1027=== modified file 'lib/lp/translations/browser/tests/test_baseexportview.py'
1028--- lib/lp/translations/browser/tests/test_baseexportview.py 2009-07-17 02:25:09 +0000
1029+++ lib/lp/translations/browser/tests/test_baseexportview.py 2010-02-24 10:59:32 +0000
1030@@ -1,8 +1,10 @@
1031-# Copyright 2009 Canonical Ltd. This software is licensed under the
1032+# Copyright 2009-2010 Canonical Ltd. This software is licensed under the
1033 # GNU Affero General Public License version 3 (see the file LICENSE).
1034
1035 __metaclass__ = type
1036
1037+from datetime import timedelta
1038+import transaction
1039 import unittest
1040
1041 from canonical.launchpad.webapp.servers import LaunchpadTestRequest
1042@@ -16,6 +18,12 @@
1043 from lp.testing import TestCaseWithFactory
1044
1045
1046+def wipe_queue(queue):
1047+ """Erase all export queue entries."""
1048+ while queue.entry_count > 0:
1049+ queue.popRequest()
1050+
1051+
1052 class BaseExportViewMixin(TestCaseWithFactory):
1053 """Test behaviour of objects subclassing BaseExportView."""
1054
1055@@ -122,7 +130,6 @@
1056 [pofile_sr.id, pofile_es.id, pofile_sr2.id],
1057 translations)
1058
1059-
1060 class TestProductSeries(BaseExportViewMixin):
1061 """Test implementation of BaseExportView on ProductSeries."""
1062
1063@@ -158,9 +165,68 @@
1064 self.container, LaunchpadTestRequest())
1065
1066
1067+class TestPOExportQueueStatusDescriptions(TestCaseWithFactory):
1068+
1069+ layer = ZopelessDatabaseLayer
1070+
1071+ def setUp(self):
1072+ super(TestPOExportQueueStatusDescriptions, self).setUp()
1073+ self.container = self.factory.makeProductSeries()
1074+ self.container.product.official_rosetta = True
1075+ self.view = ProductSeriesTranslationsExportView(
1076+ self.container, LaunchpadTestRequest())
1077+
1078+ def test_describeQueueSize(self):
1079+ self.assertEqual(
1080+ "The export queue is currently empty.",
1081+ self.view.describeQueueSize(0))
1082+
1083+ self.assertEqual(
1084+ "There is 1 file request on the export queue.",
1085+ self.view.describeQueueSize(1))
1086+
1087+ self.assertEqual(
1088+ "There are 2 file requests on the export queue.",
1089+ self.view.describeQueueSize(2))
1090+
1091+ def test_describeBacklog(self):
1092+ backlog = None
1093+ self.assertEqual("", self.view.describeBacklog(backlog).strip())
1094+
1095+ backlog = timedelta(hours=2)
1096+ self.assertEqual(
1097+ "The backlog is approximately two hours.",
1098+ self.view.describeBacklog(backlog).strip())
1099+
1100+ def test_export_queue_status(self):
1101+ self.view.initialize()
1102+ queue = self.view.request_set
1103+ wipe_queue(queue)
1104+
1105+ requester = self.factory.makePerson()
1106+
1107+ size = self.view.describeQueueSize(0)
1108+ backlog = self.view.describeBacklog(None)
1109+ status = "%s %s" % (size, backlog)
1110+ self.assertEqual(
1111+ status.strip(), self.view.export_queue_status.strip())
1112+
1113+ potemplate = self.factory.makePOTemplate()
1114+ queue.addRequest(requester, potemplates=[potemplate])
1115+ transaction.commit()
1116+
1117+ size = self.view.describeQueueSize(1)
1118+ backlog = self.view.describeBacklog(queue.estimateBacklog())
1119+ status = "%s %s" % (size, backlog)
1120+ self.assertEqual(
1121+ status.strip(), self.view.export_queue_status.strip())
1122+
1123+
1124 def test_suite():
1125 suite = unittest.TestSuite()
1126 loader = unittest.TestLoader()
1127 suite.addTest(loader.loadTestsFromTestCase(TestProductSeries))
1128 suite.addTest(loader.loadTestsFromTestCase(TestSourcePackage))
1129+ suite.addTest(loader.loadTestsFromTestCase(
1130+ TestPOExportQueueStatusDescriptions))
1131 return suite
1132
1133=== modified file 'lib/lp/translations/doc/distroseries-translations-copy.txt'
1134--- lib/lp/translations/doc/distroseries-translations-copy.txt 2009-07-03 17:01:24 +0000
1135+++ lib/lp/translations/doc/distroseries-translations-copy.txt 2010-02-24 10:59:32 +0000
1136@@ -65,13 +65,8 @@
1137
1138 We need a transaction manager (in this case a fake one) to make the copy work.
1139
1140- >>> class FakeTransactionManager:
1141- ... """Mock transaction manager for test."""
1142- ... def begin(self):
1143- ... pass
1144- ... def commit(self):
1145- ... pass
1146- >>> transaction_stub = FakeTransactionManager()
1147+ >>> from lp.testing.faketransaction import FakeTransaction
1148+ >>> transaction_stub = FakeTransaction()
1149
1150
1151 == Preconditions for migrating translations between distro series ==
1152
1153=== modified file 'lib/lp/translations/doc/gettext-check-messages.txt'
1154--- lib/lp/translations/doc/gettext-check-messages.txt 2009-07-01 20:45:39 +0000
1155+++ lib/lp/translations/doc/gettext-check-messages.txt 2010-02-24 10:59:32 +0000
1156@@ -29,15 +29,7 @@
1157 >>> from lp.translations.scripts.gettext_check_messages import (
1158 ... GettextCheckMessages)
1159 >>> from canonical.launchpad.scripts.logger import FakeLogger
1160-
1161- >>> class MockTransactionManager:
1162- ... """"Print out commits and aborts, ignore them otherwise."""
1163- ... def begin(self):
1164- ... pass
1165- ... def commit(self):
1166- ... print "Committing."
1167- ... def abort(self):
1168- ... print "Aborting."
1169+ >>> from lp.testing.faketransaction import FakeTransaction
1170
1171 >>> class InstrumentedGettextCheckMessages(GettextCheckMessages):
1172 ... _commit_interval = 3
1173@@ -49,7 +41,7 @@
1174 ... checker = InstrumentedGettextCheckMessages(
1175 ... 'gettext-check-messages-test', test_args=options)
1176 ... checker.logger = FakeLogger()
1177- ... checker.txn = MockTransactionManager()
1178+ ... checker.txn = FakeTransaction(log_calls=True)
1179 ... if commit_interval is not None:
1180 ... checker._commit_interval = commit_interval
1181 ... checker.main()
1182@@ -101,9 +93,9 @@
1183
1184 >>> run_checker(['-vv', "-w id=%s" % quote(current_message.id)])
1185 DEBUG Checking messages matching: id=...
1186- DEBUG Checking message ....
1187+ DEBUG Checking message ...
1188 DEBUG Commit point.
1189- Committing.
1190+ COMMIT
1191 INFO Done.
1192 INFO Messages checked: 1
1193 INFO Validation errors: 0
1194@@ -128,7 +120,7 @@
1195 INFO ... (current): format specifications ... are not the same
1196 INFO ...: unmasked ...
1197 DEBUG Commit point.
1198- Committing.
1199+ COMMIT
1200 INFO Done.
1201 INFO Messages checked: 1
1202 INFO Validation errors: 1
1203@@ -160,9 +152,9 @@
1204 DEBUG Checking message ...
1205 INFO ... (current): format specifications ... are not the same
1206 DEBUG Commit point.
1207- Committing.
1208+ COMMIT
1209 DEBUG Commit point.
1210- Committing.
1211+ COMMIT
1212 INFO Done.
1213 INFO Messages checked: 1
1214 INFO Validation errors: 2
1215@@ -189,11 +181,11 @@
1216 DEBUG Checking message ...
1217 INFO ... (unused): format specifications ... are not the same
1218 DEBUG Commit point.
1219- Committing.
1220+ COMMIT
1221 DEBUG Checking message ...
1222 INFO ... (imported): number of format specifications ... does not match...
1223 DEBUG Commit point.
1224- Committing.
1225+ COMMIT
1226 INFO Done.
1227 INFO Messages checked: 2
1228 INFO Validation errors: 3
1229@@ -226,9 +218,9 @@
1230 DEBUG Checking message ...
1231 INFO ... (current): format specifications ... are not the same
1232 DEBUG Commit point.
1233- Aborting.
1234+ ABORT
1235 DEBUG Commit point.
1236- Aborting.
1237+ ABORT
1238 INFO Done.
1239 INFO Messages checked: 1
1240 INFO Validation errors: 2
1241@@ -250,13 +242,13 @@
1242 DEBUG Checking message ...
1243 INFO ... (...): number of format specifications ...
1244 DEBUG Commit point.
1245- Committing.
1246+ COMMIT
1247 DEBUG Checking message ...
1248 INFO ... (...): format specifications ... are not the same
1249 DEBUG Commit point.
1250- Committing.
1251+ COMMIT
1252 DEBUG Commit point.
1253- Committing.
1254+ COMMIT
1255 INFO Done.
1256 INFO Messages checked: 2
1257 INFO Validation errors: 3
1258
1259=== modified file 'lib/lp/translations/doc/poexport-queue.txt'
1260--- lib/lp/translations/doc/poexport-queue.txt 2009-08-17 16:54:40 +0000
1261+++ lib/lp/translations/doc/poexport-queue.txt 2010-02-24 10:59:32 +0000
1262@@ -12,10 +12,12 @@
1263 >>> import transaction
1264 >>> from zope.component import getUtility
1265 >>> from canonical.launchpad.interfaces import IPersonSet
1266+ >>> from lp.testing.faketransaction import FakeTransaction
1267 >>> from lp.testing.mail_helpers import pop_notifications, print_emails
1268 >>> from lp.translations.scripts.po_export_queue import ExportResult
1269 >>> import logging
1270 >>> logger = logging.getLogger()
1271+ >>> fake_transaction = FakeTransaction()
1272
1273 When there is an error, the system will notify it.
1274
1275@@ -273,7 +275,7 @@
1276
1277 Once the queue is processed, the queue is empty again.
1278
1279- >>> process_queue(transaction, logging.getLogger())
1280+ >>> process_queue(fake_transaction, logging.getLogger())
1281 INFO:...Stored file at http://.../po_evolution-2.2.pot
1282
1283 >>> export_request_set.entry_count
1284@@ -331,7 +333,7 @@
1285
1286 >>> export_request_set.addRequest(
1287 ... carlos, pofiles=[pofile], format=TranslationFileFormat.PO)
1288- >>> process_queue(transaction, logging.getLogger())
1289+ >>> process_queue(fake_transaction, logging.getLogger())
1290 INFO:root:Stored file at http://...eo.po
1291
1292 >>> transaction.commit()
1293@@ -352,7 +354,7 @@
1294
1295 >>> export_request_set.addRequest(
1296 ... carlos, pofiles=[pofile], format=TranslationFileFormat.POCHANGED)
1297- >>> process_queue(transaction, logging.getLogger())
1298+ >>> process_queue(fake_transaction, logging.getLogger())
1299 INFO:root:Stored file at http://...eo.po
1300
1301 >>> transaction.commit()
1302@@ -372,6 +374,6 @@
1303 Finally, if we try to do an export with an empty queue, we don't do
1304 anything:
1305
1306- >>> process_queue(transaction, logging.getLogger())
1307+ >>> process_queue(fake_transaction, logging.getLogger())
1308 >>> len(pop_notifications())
1309 0
1310
1311=== modified file 'lib/lp/translations/doc/poexport-request-productseries.txt'
1312--- lib/lp/translations/doc/poexport-request-productseries.txt 2009-08-17 13:42:00 +0000
1313+++ lib/lp/translations/doc/poexport-request-productseries.txt 2010-02-24 10:59:32 +0000
1314@@ -34,15 +34,12 @@
1315
1316 Now we request that the queue be processed.
1317
1318- >>> class MockTransactionManager:
1319- ... def commit(self):
1320- ... pass
1321-
1322 >>> import logging
1323+ >>> from lp.testing.faketransaction import FakeTransaction
1324 >>> from lp.translations.scripts.po_export_queue import process_queue
1325 >>> logger = MockLogger()
1326 >>> logger.setLevel(logging.DEBUG)
1327- >>> process_queue(MockTransactionManager(), logger)
1328+ >>> process_queue(FakeTransaction(), logger)
1329 log> Exporting objects for ..., related to template evolution-2.2 in
1330 Evolution trunk
1331 log> Exporting objects for ..., related to template evolution-2.2-test in
1332
1333=== modified file 'lib/lp/translations/doc/poexport-request.txt'
1334--- lib/lp/translations/doc/poexport-request.txt 2009-08-17 23:37:19 +0000
1335+++ lib/lp/translations/doc/poexport-request.txt 2010-02-24 10:59:32 +0000
1336@@ -53,12 +53,9 @@
1337
1338 Now we request that the queue be processed.
1339
1340- >>> class MockTransactionManager:
1341- ... def commit(self):
1342- ... pass
1343-
1344+ >>> from lp.testing.faketransaction import FakeTransaction
1345 >>> from lp.translations.scripts.po_export_queue import process_queue
1346- >>> process_queue(MockTransactionManager(), MockLogger())
1347+ >>> process_queue(FakeTransaction(), MockLogger())
1348 log> Exporting objects for Happy Downloader, related to template pmount
1349 in Ubuntu Hoary package "pmount"
1350 log> Stored file at http://.../launchpad-export.tar.gz
1351@@ -188,7 +185,7 @@
1352 >>> from lp.translations.interfaces.translationfileformat import (
1353 ... TranslationFileFormat)
1354 >>> request_set.addRequest(person, None, [cs], TranslationFileFormat.MO)
1355- >>> process_queue(MockTransactionManager(), MockLogger())
1356+ >>> process_queue(FakeTransaction(), MockLogger())
1357 log> Exporting objects for Happy Downloader, related to template pmount
1358 in Ubuntu Hoary package "pmount"
1359 log> Stored file at http://.../cs_LC_MESSAGES_pmount.mo
1360
1361=== modified file 'lib/lp/translations/doc/poimport.txt'
1362--- lib/lp/translations/doc/poimport.txt 2009-11-17 09:51:40 +0000
1363+++ lib/lp/translations/doc/poimport.txt 2010-02-24 10:59:32 +0000
1364@@ -103,14 +103,7 @@
1365 To prevent this, the importer now does intermediate commits while
1366 recomputing statistics.
1367
1368- >>> class FakeTransactionManager:
1369- ... """Pretend to manage a transaction, log what happens."""
1370- ... def begin(self):
1371- ... pass
1372- ... def commit(self):
1373- ... print "Committing."
1374- ... def abort(self):
1375- ... print "Aborting."
1376+ >>> from lp.testing.faketransaction import FakeTransaction
1377
1378 Attach the import to the translations import queue:
1379
1380@@ -136,8 +129,7 @@
1381 Now, we tell the PO template to import from the file data it has.
1382
1383 >>> (subject, body) = potemplate.importFromQueue(
1384- ... entry, FakeLogger(), txn=FakeTransactionManager())
1385- Committing.
1386+ ... entry, FakeLogger(), txn=FakeTransaction())
1387
1388 Our request has now been serviced.
1389
1390
1391=== modified file 'lib/lp/translations/interfaces/poexportrequest.py'
1392--- lib/lp/translations/interfaces/poexportrequest.py 2009-07-17 00:26:05 +0000
1393+++ lib/lp/translations/interfaces/poexportrequest.py 2010-02-24 10:59:32 +0000
1394@@ -1,4 +1,4 @@
1395-# Copyright 2009 Canonical Ltd. This software is licensed under the
1396+# Copyright 2009-2010 Canonical Ltd. This software is licensed under the
1397 # GNU Affero General Public License version 3 (see the file LICENSE).
1398
1399 # pylint: disable-msg=E0211,E0213
1400@@ -18,11 +18,16 @@
1401 from lp.translations.interfaces.potemplate import IPOTemplate
1402 from lp.translations.interfaces.translationfileformat import (
1403 TranslationFileFormat)
1404+
1405+
1406 class IPOExportRequestSet(Interface):
1407 entry_count = Int(
1408 title=u'Number of entries waiting in the queue.',
1409 required=True, readonly=True)
1410
1411+ def estimateBacklog():
1412+ """Return approximate age of oldest request on the export queue."""
1413+
1414 def addRequest(person, potemplates=None, pofiles=None,
1415 format=TranslationFileFormat.PO):
1416 """Add a request to export a set of files.
1417@@ -40,6 +45,7 @@
1418 objects to export.
1419 """
1420
1421+
1422 class IPOExportRequest(Interface):
1423 person = Object(
1424 title=u'The person who made the request.',
1425
1426=== modified file 'lib/lp/translations/model/poexportrequest.py'
1427--- lib/lp/translations/model/poexportrequest.py 2009-07-17 00:26:05 +0000
1428+++ lib/lp/translations/model/poexportrequest.py 2010-02-24 10:59:32 +0000
1429@@ -1,17 +1,21 @@
1430-# Copyright 2009 Canonical Ltd. This software is licensed under the
1431+# Copyright 2009-2010 Canonical Ltd. This software is licensed under the
1432 # GNU Affero General Public License version 3 (see the file LICENSE).
1433
1434 # pylint: disable-msg=E0611,W0212
1435
1436 __metaclass__ = type
1437
1438-__all__ = ('POExportRequestSet', 'POExportRequest')
1439+__all__ = [
1440+ 'POExportRequest',
1441+ 'POExportRequestSet',
1442+ ]
1443
1444 from sqlobject import ForeignKey
1445
1446+from zope.component import getUtility
1447 from zope.interface import implements
1448
1449-from canonical.database.sqlbase import cursor, quote, SQLBase, sqlvalues
1450+from canonical.database.sqlbase import quote, SQLBase, sqlvalues
1451 from canonical.database.enumcol import EnumCol
1452
1453 from lp.translations.interfaces.poexportrequest import (
1454@@ -19,6 +23,8 @@
1455 from lp.translations.interfaces.potemplate import IPOTemplate
1456 from lp.translations.interfaces.translationfileformat import (
1457 TranslationFileFormat)
1458+from canonical.launchpad.webapp.interfaces import (
1459+ DEFAULT_FLAVOR, IStoreSelector, MAIN_STORE, MASTER_FLAVOR)
1460 from lp.registry.interfaces.person import validate_public_person
1461
1462
1463@@ -28,7 +34,17 @@
1464 @property
1465 def entry_count(self):
1466 """See `IPOExportRequestSet`."""
1467- return POExportRequest.select().count()
1468+ store = getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR)
1469+ return store.find(POExportRequest, True).count()
1470+
1471+ def estimateBacklog(self):
1472+ store = getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR)
1473+ row = store.execute(
1474+ "SELECT now() - min(date_created) FROM POExportRequest").get_one()
1475+ if row is None:
1476+ return None
1477+ else:
1478+ return row[0]
1479
1480 def addRequest(self, person, potemplates=None, pofiles=None,
1481 format=TranslationFileFormat.PO):
1482@@ -58,13 +74,13 @@
1483 'pofiles': pofile_ids,
1484 }
1485
1486- cur = cursor()
1487+ store = getUtility(IStoreSelector).get(MAIN_STORE, MASTER_FLAVOR)
1488
1489 if potemplates:
1490 # Create requests for all these templates, insofar as the same
1491 # user doesn't already have requests pending for them in the same
1492 # format.
1493- cur.execute("""
1494+ store.execute("""
1495 INSERT INTO POExportRequest(person, potemplate, format)
1496 SELECT %(person)s, template.id, %(format)s
1497 FROM POTemplate AS template
1498@@ -81,7 +97,7 @@
1499 if pofiles:
1500 # Create requests for all these translations, insofar as the same
1501 # user doesn't already have identical requests pending.
1502- cur.execute("""
1503+ store.execute("""
1504 INSERT INTO POExportRequest(
1505 person, potemplate, pofile, format)
1506 SELECT %(person)s, template.id, pofile.id, %(format)s
1507
1508=== modified file 'lib/lp/translations/scripts/tests/test_copy_distroseries_translations.py'
1509--- lib/lp/translations/scripts/tests/test_copy_distroseries_translations.py 2009-07-17 00:26:05 +0000
1510+++ lib/lp/translations/scripts/tests/test_copy_distroseries_translations.py 2010-02-24 10:59:32 +0000
1511@@ -13,24 +13,16 @@
1512
1513 from canonical.launchpad.ftests import syncUpdate
1514 from lp.registry.interfaces.distroseries import IDistroSeriesSet
1515+from lp.testing.faketransaction import FakeTransaction
1516 from lp.translations.scripts.copy_distroseries_translations import (
1517 copy_distroseries_translations)
1518
1519 from canonical.testing import LaunchpadZopelessLayer
1520
1521
1522-class MockTransactionManager:
1523- def begin(self):
1524- pass
1525- def commit(self):
1526- pass
1527- def abort(self):
1528- pass
1529-
1530-
1531 class TestCopying(TestCase):
1532 layer = LaunchpadZopelessLayer
1533- txn = MockTransactionManager()
1534+ txn = FakeTransaction()
1535
1536 def test_flagsHandling(self):
1537 """Flags are correctly restored, no matter what their values."""
1538
1539=== modified file 'lib/lp/translations/templates/translations-export.pt'
1540--- lib/lp/translations/templates/translations-export.pt 2009-09-17 21:07:36 +0000
1541+++ lib/lp/translations/templates/translations-export.pt 2010-02-24 10:59:32 +0000
1542@@ -38,6 +38,11 @@
1543 This message will tell you where you can download your file.
1544 </p>
1545
1546+ <p tal:content="view/export_queue_status">
1547+ There are 201 file reequests on the export queue. The backlog is
1548+ approximately 25 minutes.
1549+ </p>
1550+
1551 <div class="actions">
1552 <p>
1553 <input type="submit" value="Request Download" />

Subscribers

People subscribed via source and target branches

to status/vote changes: