Merge lp:~julian-edwards/launchpad/publish-copy-archives-bug-520520-publish-distro into lp:launchpad/db-devel
- publish-copy-archives-bug-520520-publish-distro
- Merge into db-devel
Proposed by
Julian Edwards
Status: | Rejected |
---|---|
Rejected by: | Julian Edwards |
Proposed branch: | lp:~julian-edwards/launchpad/publish-copy-archives-bug-520520-publish-distro |
Merge into: | lp:launchpad/db-devel |
Diff against target: |
1553 lines (+343/-460) 33 files modified
lib/canonical/launchpad/doc/product-update-remote-product.txt (+1/-6) lib/lp/bugs/doc/bugzilla-import.txt (+2/-6) lib/lp/bugs/doc/externalbugtracker.txt (+3/-27) lib/lp/code/browser/branch.py (+3/-3) lib/lp/code/doc/xmlrpc-branch-puller.txt (+1/-16) lib/lp/code/enums.py (+1/-1) lib/lp/code/interfaces/branchpuller.py (+0/-6) lib/lp/code/interfaces/codehosting.py (+0/-14) lib/lp/code/model/branchpuller.py (+0/-22) lib/lp/code/model/tests/test_branchjob.py (+5/-24) lib/lp/code/model/tests/test_branchpuller.py (+0/-82) lib/lp/code/model/tests/test_codeimport.py (+9/-0) lib/lp/code/model/tests/test_codeimportjob.py (+2/-1) lib/lp/code/xmlrpc/codehosting.py (+1/-43) lib/lp/code/xmlrpc/tests/test_codehosting.py (+0/-100) lib/lp/codehosting/codeimport/tests/test_worker.py (+25/-0) lib/lp/codehosting/codeimport/worker.py (+15/-5) lib/lp/codehosting/inmemory.py (+0/-23) lib/lp/soyuz/scripts/publishdistro.py (+19/-6) lib/lp/soyuz/scripts/tests/test_publishdistro.py (+52/-1) lib/lp/testing/faketransaction.py (+35/-0) lib/lp/translations/browser/poexportrequest.py (+35/-0) lib/lp/translations/browser/tests/test_baseexportview.py (+68/-2) lib/lp/translations/doc/distroseries-translations-copy.txt (+2/-7) lib/lp/translations/doc/gettext-check-messages.txt (+14/-22) lib/lp/translations/doc/poexport-queue.txt (+6/-4) lib/lp/translations/doc/poexport-request-productseries.txt (+2/-5) lib/lp/translations/doc/poexport-request.txt (+3/-6) lib/lp/translations/doc/poimport.txt (+2/-10) lib/lp/translations/interfaces/poexportrequest.py (+7/-1) lib/lp/translations/model/poexportrequest.py (+23/-7) lib/lp/translations/scripts/tests/test_copy_distroseries_translations.py (+2/-10) lib/lp/translations/templates/translations-export.pt (+5/-0) |
To merge this branch: | bzr merge lp:~julian-edwards/launchpad/publish-copy-archives-bug-520520-publish-distro |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Canonical Launchpad Engineering | Pending | ||
Review via email: mp+19987@code.launchpad.net |
Commit message
Add support for publishing of copy archives to the publish-distro script.
Description of the change
To post a comment you must log in.
Revision history for this message
Julian Edwards (julian-edwards) wrote : | # |
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'lib/canonical/launchpad/doc/product-update-remote-product.txt' | |||
2 | --- lib/canonical/launchpad/doc/product-update-remote-product.txt 2009-06-12 16:36:02 +0000 | |||
3 | +++ lib/canonical/launchpad/doc/product-update-remote-product.txt 2010-02-24 10:59:32 +0000 | |||
4 | @@ -17,12 +17,7 @@ | |||
5 | 17 | ... FakeLogger, QuietFakeLogger) | 17 | ... FakeLogger, QuietFakeLogger) |
6 | 18 | >>> from canonical.launchpad.scripts.updateremoteproduct import ( | 18 | >>> from canonical.launchpad.scripts.updateremoteproduct import ( |
7 | 19 | ... RemoteProductUpdater) | 19 | ... RemoteProductUpdater) |
14 | 20 | >>> class FakeTransaction: | 20 | >>> from lp.testing.faketransaction import FakeTransaction |
9 | 21 | ... def __init__(self, log_calls=False): | ||
10 | 22 | ... self.log_calls = log_calls | ||
11 | 23 | ... def commit(self): | ||
12 | 24 | ... if self.log_calls: | ||
13 | 25 | ... print "COMMIT" | ||
15 | 26 | >>> updater = RemoteProductUpdater(FakeTransaction(), QuietFakeLogger()) | 21 | >>> updater = RemoteProductUpdater(FakeTransaction(), QuietFakeLogger()) |
16 | 27 | 22 | ||
17 | 28 | 23 | ||
18 | 29 | 24 | ||
19 | === modified file 'lib/lp/bugs/doc/bugzilla-import.txt' | |||
20 | --- lib/lp/bugs/doc/bugzilla-import.txt 2009-06-12 16:36:02 +0000 | |||
21 | +++ lib/lp/bugs/doc/bugzilla-import.txt 2010-02-24 10:59:32 +0000 | |||
22 | @@ -522,12 +522,8 @@ | |||
23 | 522 | The Bugzilla duplicate bugs table can be used to mark the | 522 | The Bugzilla duplicate bugs table can be used to mark the |
24 | 523 | corresponding Launchpad bugs as duplicates too: | 523 | corresponding Launchpad bugs as duplicates too: |
25 | 524 | 524 | ||
32 | 525 | >>> class FakeTransactionManager: | 525 | >>> from lp.testing.faketransaction import FakeTransaction |
33 | 526 | ... def begin(self): | 526 | >>> bz.processDuplicates(FakeTransaction()) |
28 | 527 | ... pass | ||
29 | 528 | ... abort = commit = begin | ||
30 | 529 | ... | ||
31 | 530 | >>> bz.processDuplicates(FakeTransactionManager()) | ||
34 | 531 | 527 | ||
35 | 532 | Now check that the bugs have been marked duplicate: | 528 | Now check that the bugs have been marked duplicate: |
36 | 533 | 529 | ||
37 | 534 | 530 | ||
38 | === modified file 'lib/lp/bugs/doc/externalbugtracker.txt' | |||
39 | --- lib/lp/bugs/doc/externalbugtracker.txt 2010-02-19 12:05:10 +0000 | |||
40 | +++ lib/lp/bugs/doc/externalbugtracker.txt 2010-02-24 10:59:32 +0000 | |||
41 | @@ -27,17 +27,9 @@ | |||
42 | 27 | ... print "initializeRemoteBugDB() called: %r" % ( | 27 | ... print "initializeRemoteBugDB() called: %r" % ( |
43 | 28 | ... remote_bug_ids, ) | 28 | ... remote_bug_ids, ) |
44 | 29 | 29 | ||
54 | 30 | >>> class FakeTransaction: | 30 | >>> from lp.testing.faketransaction import FakeTransaction |
46 | 31 | ... """Transaction class to track transaction boundaries.""" | ||
47 | 32 | ... def commit(self): | ||
48 | 33 | ... print "COMMIT" | ||
49 | 34 | ... def abort(self): | ||
50 | 35 | ... print "ABORT" | ||
51 | 36 | ... def begin(self): | ||
52 | 37 | ... print "BEGIN" | ||
53 | 38 | |||
55 | 39 | >>> from lp.bugs.scripts.checkwatches import BugWatchUpdater | 31 | >>> from lp.bugs.scripts.checkwatches import BugWatchUpdater |
57 | 40 | >>> bug_watch_updater = BugWatchUpdater(FakeTransaction()) | 32 | >>> bug_watch_updater = BugWatchUpdater(FakeTransaction(log_calls=True)) |
58 | 41 | >>> bug_watch_updater.updateBugWatches( | 33 | >>> bug_watch_updater.updateBugWatches( |
59 | 42 | ... InitializingExternalBugTracker(), []) | 34 | ... InitializingExternalBugTracker(), []) |
60 | 43 | COMMIT | 35 | COMMIT |
61 | @@ -305,7 +297,7 @@ | |||
62 | 305 | and the remote system is never asked about product information. | 297 | and the remote system is never asked about product information. |
63 | 306 | 298 | ||
64 | 307 | >>> bug_watch_updater = BugWatchUpdater( | 299 | >>> bug_watch_updater = BugWatchUpdater( |
66 | 308 | ... FakeTransaction(), syncable_gnome_products=[]) | 300 | ... FakeTransaction(log_calls=True), syncable_gnome_products=[]) |
67 | 309 | 301 | ||
68 | 310 | >>> trackers_and_watches = get_trackers_and_watches( | 302 | >>> trackers_and_watches = get_trackers_and_watches( |
69 | 311 | ... gnome_bugzilla, bug_watches) | 303 | ... gnome_bugzilla, bug_watches) |
70 | @@ -506,16 +498,12 @@ | |||
71 | 506 | ... external_bugtracker, bug_watches) | 498 | ... external_bugtracker, bug_watches) |
72 | 507 | COMMIT | 499 | COMMIT |
73 | 508 | initializeRemoteBugDB() called: [u'1', u'2', u'3', u'4'] | 500 | initializeRemoteBugDB() called: [u'1', u'2', u'3', u'4'] |
74 | 509 | BEGIN | ||
75 | 510 | getRemoteStatus() called: u'1' | 501 | getRemoteStatus() called: u'1' |
76 | 511 | COMMIT | 502 | COMMIT |
77 | 512 | BEGIN | ||
78 | 513 | getRemoteStatus() called: u'2' | 503 | getRemoteStatus() called: u'2' |
79 | 514 | COMMIT | 504 | COMMIT |
80 | 515 | BEGIN | ||
81 | 516 | getRemoteStatus() called: u'3' | 505 | getRemoteStatus() called: u'3' |
82 | 517 | COMMIT | 506 | COMMIT |
83 | 518 | BEGIN | ||
84 | 519 | getRemoteStatus() called: u'4' | 507 | getRemoteStatus() called: u'4' |
85 | 520 | COMMIT | 508 | COMMIT |
86 | 521 | 509 | ||
87 | @@ -549,14 +537,10 @@ | |||
88 | 549 | last_checked: 2007-03-17 15:...:... | 537 | last_checked: 2007-03-17 15:...:... |
89 | 550 | getModifiedRemoteBugs() called: [u'1', u'2', u'3', u'4'] | 538 | getModifiedRemoteBugs() called: [u'1', u'2', u'3', u'4'] |
90 | 551 | initializeRemoteBugDB() called: [u'1', u'4'] | 539 | initializeRemoteBugDB() called: [u'1', u'4'] |
91 | 552 | BEGIN | ||
92 | 553 | getRemoteStatus() called: u'1' | 540 | getRemoteStatus() called: u'1' |
93 | 554 | COMMIT | 541 | COMMIT |
94 | 555 | BEGIN | ||
95 | 556 | getRemoteStatus() called: u'4' | 542 | getRemoteStatus() called: u'4' |
96 | 557 | COMMIT | 543 | COMMIT |
97 | 558 | BEGIN | ||
98 | 559 | BEGIN | ||
99 | 560 | 544 | ||
100 | 561 | The bug watches that are deemed as not being modified are still marked | 545 | The bug watches that are deemed as not being modified are still marked |
101 | 562 | as being checked. | 546 | as being checked. |
102 | @@ -604,16 +588,12 @@ | |||
103 | 604 | last_checked: 2007-03-16 15:...:... | 588 | last_checked: 2007-03-16 15:...:... |
104 | 605 | getModifiedRemoteBugs() called: [u'1', u'4'] | 589 | getModifiedRemoteBugs() called: [u'1', u'4'] |
105 | 606 | initializeRemoteBugDB() called: [u'1', u'2', u'3', u'4'] | 590 | initializeRemoteBugDB() called: [u'1', u'2', u'3', u'4'] |
106 | 607 | BEGIN | ||
107 | 608 | getRemoteStatus() called: u'1' | 591 | getRemoteStatus() called: u'1' |
108 | 609 | COMMIT | 592 | COMMIT |
109 | 610 | BEGIN | ||
110 | 611 | getRemoteStatus() called: u'2' | 593 | getRemoteStatus() called: u'2' |
111 | 612 | COMMIT | 594 | COMMIT |
112 | 613 | BEGIN | ||
113 | 614 | getRemoteStatus() called: u'3' | 595 | getRemoteStatus() called: u'3' |
114 | 615 | COMMIT | 596 | COMMIT |
115 | 616 | BEGIN | ||
116 | 617 | getRemoteStatus() called: u'4' | 597 | getRemoteStatus() called: u'4' |
117 | 618 | COMMIT | 598 | COMMIT |
118 | 619 | 599 | ||
119 | @@ -631,16 +611,12 @@ | |||
120 | 631 | ... TimeUnknownExternalBugTracker(), bug_watches) | 611 | ... TimeUnknownExternalBugTracker(), bug_watches) |
121 | 632 | COMMIT | 612 | COMMIT |
122 | 633 | initializeRemoteBugDB() called: [u'1', u'2', u'3', u'4'] | 613 | initializeRemoteBugDB() called: [u'1', u'2', u'3', u'4'] |
123 | 634 | BEGIN | ||
124 | 635 | getRemoteStatus() called: u'1' | 614 | getRemoteStatus() called: u'1' |
125 | 636 | COMMIT | 615 | COMMIT |
126 | 637 | BEGIN | ||
127 | 638 | getRemoteStatus() called: u'2' | 616 | getRemoteStatus() called: u'2' |
128 | 639 | COMMIT | 617 | COMMIT |
129 | 640 | BEGIN | ||
130 | 641 | getRemoteStatus() called: u'3' | 618 | getRemoteStatus() called: u'3' |
131 | 642 | COMMIT | 619 | COMMIT |
132 | 643 | BEGIN | ||
133 | 644 | getRemoteStatus() called: u'4' | 620 | getRemoteStatus() called: u'4' |
134 | 645 | COMMIT | 621 | COMMIT |
135 | 646 | 622 | ||
136 | 647 | 623 | ||
137 | === modified file 'lib/lp/code/browser/branch.py' | |||
138 | --- lib/lp/code/browser/branch.py 2010-02-24 10:18:16 +0000 | |||
139 | +++ lib/lp/code/browser/branch.py 2010-02-24 10:59:32 +0000 | |||
140 | @@ -514,10 +514,10 @@ | |||
141 | 514 | 514 | ||
142 | 515 | def iconForCodeImportResultStatus(self, status): | 515 | def iconForCodeImportResultStatus(self, status): |
143 | 516 | """The icon to represent the `CodeImportResultStatus` `status`.""" | 516 | """The icon to represent the `CodeImportResultStatus` `status`.""" |
145 | 517 | if status in CodeImportResultStatus.successes: | 517 | if status == CodeImportResultStatus.SUCCESS_PARTIAL: |
146 | 518 | return "/@@/yes-gray" | ||
147 | 519 | elif status in CodeImportResultStatus.successes: | ||
148 | 518 | return "/@@/yes" | 520 | return "/@@/yes" |
149 | 519 | elif status == CodeImportResultStatus.SUCCESS_PARTIAL: | ||
150 | 520 | return "/@@/yes-gray" | ||
151 | 521 | else: | 521 | else: |
152 | 522 | return "/@@/no" | 522 | return "/@@/no" |
153 | 523 | 523 | ||
154 | 524 | 524 | ||
155 | === modified file 'lib/lp/code/doc/xmlrpc-branch-puller.txt' | |||
156 | --- lib/lp/code/doc/xmlrpc-branch-puller.txt 2009-10-22 11:55:51 +0000 | |||
157 | +++ lib/lp/code/doc/xmlrpc-branch-puller.txt 2010-02-24 10:59:32 +0000 | |||
158 | @@ -28,19 +28,4 @@ | |||
159 | 28 | True | 28 | True |
160 | 29 | 29 | ||
161 | 30 | The IBranchPuller interface defines some methods, for which see the unit | 30 | The IBranchPuller interface defines some methods, for which see the unit |
178 | 31 | tests. To allow a minimal test here, we call getBranchPullQueue, | 31 | tests. |
163 | 32 | which will return an empty list. | ||
164 | 33 | |||
165 | 34 | >>> from lp.code.enums import BranchType | ||
166 | 35 | >>> branch_puller.getBranchPullQueue(BranchType.HOSTED.name) | ||
167 | 36 | [] | ||
168 | 37 | |||
169 | 38 | This remains true when it is accessed over XMLRPC. | ||
170 | 39 | |||
171 | 40 | >>> import xmlrpclib | ||
172 | 41 | >>> from canonical.functional import XMLRPCTestTransport | ||
173 | 42 | >>> puller = xmlrpclib.ServerProxy( | ||
174 | 43 | ... 'http://xmlrpc-private.launchpad.dev:8087/branch_puller', | ||
175 | 44 | ... transport=XMLRPCTestTransport()) | ||
176 | 45 | >>> puller.getBranchPullQueue(BranchType.HOSTED.name) | ||
177 | 46 | [] | ||
179 | 47 | 32 | ||
180 | === modified file 'lib/lp/code/enums.py' | |||
181 | --- lib/lp/code/enums.py 2010-02-17 04:28:48 +0000 | |||
182 | +++ lib/lp/code/enums.py 2010-02-24 10:59:32 +0000 | |||
183 | @@ -871,7 +871,7 @@ | |||
184 | 871 | job, or the deletion of a CodeImport which had a running job. | 871 | job, or the deletion of a CodeImport which had a running job. |
185 | 872 | """) | 872 | """) |
186 | 873 | 873 | ||
188 | 874 | successes = [SUCCESS, SUCCESS_NOCHANGE] | 874 | successes = [SUCCESS, SUCCESS_NOCHANGE, SUCCESS_PARTIAL] |
189 | 875 | 875 | ||
190 | 876 | 876 | ||
191 | 877 | class CodeReviewVote(DBEnumeratedType): | 877 | class CodeReviewVote(DBEnumeratedType): |
192 | 878 | 878 | ||
193 | === modified file 'lib/lp/code/interfaces/branchpuller.py' | |||
194 | --- lib/lp/code/interfaces/branchpuller.py 2009-06-30 16:56:07 +0000 | |||
195 | +++ lib/lp/code/interfaces/branchpuller.py 2010-02-24 10:59:32 +0000 | |||
196 | @@ -23,12 +23,6 @@ | |||
197 | 23 | MIRROR_TIME_INCREMENT = Attribute( | 23 | MIRROR_TIME_INCREMENT = Attribute( |
198 | 24 | "How frequently we mirror branches.") | 24 | "How frequently we mirror branches.") |
199 | 25 | 25 | ||
200 | 26 | def getPullQueue(branch_type): | ||
201 | 27 | """Return a queue of branches to mirror using the puller. | ||
202 | 28 | |||
203 | 29 | :param branch_type: A value from the `BranchType` enum. | ||
204 | 30 | """ | ||
205 | 31 | |||
206 | 32 | def acquireBranchToPull(): | 26 | def acquireBranchToPull(): |
207 | 33 | """Return a Branch to pull and mark it as mirror-started. | 27 | """Return a Branch to pull and mark it as mirror-started. |
208 | 34 | 28 | ||
209 | 35 | 29 | ||
210 | === modified file 'lib/lp/code/interfaces/codehosting.py' | |||
211 | --- lib/lp/code/interfaces/codehosting.py 2009-06-25 04:06:00 +0000 | |||
212 | +++ lib/lp/code/interfaces/codehosting.py 2010-02-24 10:59:32 +0000 | |||
213 | @@ -58,20 +58,6 @@ | |||
214 | 58 | Published at 'branch_puller' on the private XML-RPC server. | 58 | Published at 'branch_puller' on the private XML-RPC server. |
215 | 59 | """ | 59 | """ |
216 | 60 | 60 | ||
217 | 61 | def getBranchPullQueue(branch_type): | ||
218 | 62 | """Get the list of branches to be mirrored. | ||
219 | 63 | |||
220 | 64 | :param branch_type: One of 'HOSTED', 'MIRRORED', or 'IMPORTED'. | ||
221 | 65 | |||
222 | 66 | :raise UnknownBranchTypeError: if the branch type is unrecognized. | ||
223 | 67 | |||
224 | 68 | :returns: a list of (branch_id, pull_url, unique_name, default_branch) | ||
225 | 69 | 4-tuples. branch_id is the database id of the branch, pull_url is | ||
226 | 70 | where to pull from, unique_name is the unique_name of the branch | ||
227 | 71 | and default_branch is the default stacked on branch for the | ||
228 | 72 | branch's target. | ||
229 | 73 | """ | ||
230 | 74 | |||
231 | 75 | def acquireBranchToPull(): | 61 | def acquireBranchToPull(): |
232 | 76 | """Return a Branch to pull and mark it as mirror-started. | 62 | """Return a Branch to pull and mark it as mirror-started. |
233 | 77 | 63 | ||
234 | 78 | 64 | ||
235 | === modified file 'lib/lp/code/model/branchpuller.py' | |||
236 | --- lib/lp/code/model/branchpuller.py 2009-08-04 05:14:32 +0000 | |||
237 | +++ lib/lp/code/model/branchpuller.py 2010-02-24 10:59:32 +0000 | |||
238 | @@ -9,17 +9,13 @@ | |||
239 | 9 | 9 | ||
240 | 10 | from datetime import timedelta | 10 | from datetime import timedelta |
241 | 11 | 11 | ||
242 | 12 | from storm.expr import LeftJoin, Join | ||
243 | 13 | from zope.component import getUtility | 12 | from zope.component import getUtility |
244 | 14 | from zope.interface import implements | 13 | from zope.interface import implements |
245 | 15 | 14 | ||
246 | 16 | from canonical.database.constants import UTC_NOW | 15 | from canonical.database.constants import UTC_NOW |
247 | 17 | from lp.code.enums import BranchType | 16 | from lp.code.enums import BranchType |
248 | 18 | from lp.code.model.branch import Branch | 17 | from lp.code.model.branch import Branch |
249 | 19 | from lp.code.interfaces.branch import BranchTypeError | ||
250 | 20 | from lp.code.interfaces.branchpuller import IBranchPuller | 18 | from lp.code.interfaces.branchpuller import IBranchPuller |
251 | 21 | from lp.registry.model.person import Owner | ||
252 | 22 | from lp.registry.model.product import Product | ||
253 | 23 | from canonical.launchpad.webapp.interfaces import ( | 19 | from canonical.launchpad.webapp.interfaces import ( |
254 | 24 | IStoreSelector, MAIN_STORE, DEFAULT_FLAVOR) | 20 | IStoreSelector, MAIN_STORE, DEFAULT_FLAVOR) |
255 | 25 | 21 | ||
256 | @@ -32,24 +28,6 @@ | |||
257 | 32 | MAXIMUM_MIRROR_FAILURES = 5 | 28 | MAXIMUM_MIRROR_FAILURES = 5 |
258 | 33 | MIRROR_TIME_INCREMENT = timedelta(hours=6) | 29 | MIRROR_TIME_INCREMENT = timedelta(hours=6) |
259 | 34 | 30 | ||
260 | 35 | def getPullQueue(self, branch_type): | ||
261 | 36 | """See `IBranchPuller`.""" | ||
262 | 37 | if branch_type == BranchType.REMOTE: | ||
263 | 38 | raise BranchTypeError("No pull queue for REMOTE branches.") | ||
264 | 39 | store = getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR) | ||
265 | 40 | # Prejoin on owner and product to preserve existing behaviour. | ||
266 | 41 | # XXX: JonathanLange 2009-03-22 spec=package-branches: This prejoin is | ||
267 | 42 | # inappropriate in the face of package branches. | ||
268 | 43 | prejoin = store.using( | ||
269 | 44 | Branch, | ||
270 | 45 | LeftJoin(Product, Branch.product == Product.id), | ||
271 | 46 | Join(Owner, Branch.owner == Owner.id)) | ||
272 | 47 | return prejoin.find( | ||
273 | 48 | Branch, | ||
274 | 49 | Branch.branch_type == branch_type, | ||
275 | 50 | Branch.next_mirror_time <= UTC_NOW).order_by( | ||
276 | 51 | Branch.next_mirror_time) | ||
277 | 52 | |||
278 | 53 | def acquireBranchToPull(self): | 31 | def acquireBranchToPull(self): |
279 | 54 | """See `IBranchPuller`.""" | 32 | """See `IBranchPuller`.""" |
280 | 55 | store = getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR) | 33 | store = getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR) |
281 | 56 | 34 | ||
282 | === modified file 'lib/lp/code/model/tests/test_branchjob.py' | |||
283 | --- lib/lp/code/model/tests/test_branchjob.py 2010-02-22 12:26:18 +0000 | |||
284 | +++ lib/lp/code/model/tests/test_branchjob.py 2010-02-24 10:59:32 +0000 | |||
285 | @@ -867,22 +867,6 @@ | |||
286 | 867 | self.assertFalse(job.generateDiffs()) | 867 | self.assertFalse(job.generateDiffs()) |
287 | 868 | 868 | ||
288 | 869 | 869 | ||
289 | 870 | def all_dirs(directory): | ||
290 | 871 | """Generate all parent directories and the directory itself. | ||
291 | 872 | |||
292 | 873 | Passing 'a/b/c/d' produces ['a', 'a/b', 'a/b/c', 'a/b/c/d']. | ||
293 | 874 | """ | ||
294 | 875 | if directory == '': | ||
295 | 876 | return [] | ||
296 | 877 | dirs = [directory] | ||
297 | 878 | while(1): | ||
298 | 879 | head, tail = os.path.split(directory) | ||
299 | 880 | if head == '': | ||
300 | 881 | return reversed(dirs) | ||
301 | 882 | directory = head | ||
302 | 883 | dirs.append(directory) | ||
303 | 884 | |||
304 | 885 | |||
305 | 886 | class TestRosettaUploadJob(TestCaseWithFactory): | 870 | class TestRosettaUploadJob(TestCaseWithFactory): |
306 | 887 | """Tests for RosettaUploadJob.""" | 871 | """Tests for RosettaUploadJob.""" |
307 | 888 | 872 | ||
308 | @@ -937,21 +921,18 @@ | |||
309 | 937 | seen_dirs = set() | 921 | seen_dirs = set() |
310 | 938 | for file_pair in files: | 922 | for file_pair in files: |
311 | 939 | file_name = file_pair[0] | 923 | file_name = file_pair[0] |
312 | 940 | dname, fname = os.path.split(file_name) | ||
313 | 941 | for adir in all_dirs(dname): | ||
314 | 942 | if adir in seen_dirs: | ||
315 | 943 | continue | ||
316 | 944 | self.tree.bzrdir.root_transport.mkdir(adir) | ||
317 | 945 | self.tree.add(adir) | ||
318 | 946 | seen_dirs.add(adir) | ||
319 | 947 | try: | 924 | try: |
320 | 948 | file_content = file_pair[1] | 925 | file_content = file_pair[1] |
321 | 949 | if file_content is None: | 926 | if file_content is None: |
322 | 950 | raise IndexError # Same as if missing. | 927 | raise IndexError # Same as if missing. |
323 | 951 | except IndexError: | 928 | except IndexError: |
324 | 952 | file_content = self.factory.getUniqueString() | 929 | file_content = self.factory.getUniqueString() |
325 | 930 | dname = os.path.dirname(file_name) | ||
326 | 931 | self.tree.bzrdir.root_transport.clone(dname).create_prefix() | ||
327 | 953 | self.tree.bzrdir.root_transport.put_bytes(file_name, file_content) | 932 | self.tree.bzrdir.root_transport.put_bytes(file_name, file_content) |
329 | 954 | self.tree.add(file_name) | 933 | if len(files) > 0: |
330 | 934 | self.tree.smart_add( | ||
331 | 935 | [self.tree.abspath(file_pair[0]) for file_pair in files]) | ||
332 | 955 | if commit_message is None: | 936 | if commit_message is None: |
333 | 956 | commit_message = self.factory.getUniqueString('commit') | 937 | commit_message = self.factory.getUniqueString('commit') |
334 | 957 | revision_id = self.tree.commit(commit_message) | 938 | revision_id = self.tree.commit(commit_message) |
335 | 958 | 939 | ||
336 | === modified file 'lib/lp/code/model/tests/test_branchpuller.py' | |||
337 | --- lib/lp/code/model/tests/test_branchpuller.py 2009-09-28 23:51:54 +0000 | |||
338 | +++ lib/lp/code/model/tests/test_branchpuller.py 2010-02-24 10:59:32 +0000 | |||
339 | @@ -17,7 +17,6 @@ | |||
340 | 17 | from canonical.database.constants import UTC_NOW | 17 | from canonical.database.constants import UTC_NOW |
341 | 18 | from canonical.testing.layers import DatabaseFunctionalLayer | 18 | from canonical.testing.layers import DatabaseFunctionalLayer |
342 | 19 | from lp.code.enums import BranchType | 19 | from lp.code.enums import BranchType |
343 | 20 | from lp.code.interfaces.branch import BranchTypeError | ||
344 | 21 | from lp.code.interfaces.branchpuller import IBranchPuller | 20 | from lp.code.interfaces.branchpuller import IBranchPuller |
345 | 22 | from lp.testing import TestCaseWithFactory, login_person | 21 | from lp.testing import TestCaseWithFactory, login_person |
346 | 23 | 22 | ||
347 | @@ -78,37 +77,6 @@ | |||
348 | 78 | branch.requestMirror() | 77 | branch.requestMirror() |
349 | 79 | self.assertEqual(UTC_NOW, branch.next_mirror_time) | 78 | self.assertEqual(UTC_NOW, branch.next_mirror_time) |
350 | 80 | 79 | ||
351 | 81 | def test_requestMirrorDuringPull(self): | ||
352 | 82 | """Branches can have mirrors requested while they are being mirrored. | ||
353 | 83 | If so, they should not be removed from the pull queue when the mirror | ||
354 | 84 | is complete. | ||
355 | 85 | """ | ||
356 | 86 | # We run these in separate transactions so as to have the times set to | ||
357 | 87 | # different values. This is closer to what happens in production. | ||
358 | 88 | branch = self.makeAnyBranch() | ||
359 | 89 | branch.startMirroring() | ||
360 | 90 | self.assertEqual( | ||
361 | 91 | [], list(self.branch_puller.getPullQueue(branch.branch_type))) | ||
362 | 92 | branch.requestMirror() | ||
363 | 93 | self.assertEqual( | ||
364 | 94 | [branch], | ||
365 | 95 | list(self.branch_puller.getPullQueue(branch.branch_type))) | ||
366 | 96 | branch.mirrorComplete('rev1') | ||
367 | 97 | self.assertEqual( | ||
368 | 98 | [branch], | ||
369 | 99 | list(self.branch_puller.getPullQueue(branch.branch_type))) | ||
370 | 100 | |||
371 | 101 | def test_startMirroringRemovesFromPullQueue(self): | ||
372 | 102 | # Starting a mirror removes the branch from the pull queue. | ||
373 | 103 | branch = self.makeAnyBranch() | ||
374 | 104 | branch.requestMirror() | ||
375 | 105 | self.assertEqual( | ||
376 | 106 | set([branch]), | ||
377 | 107 | set(self.branch_puller.getPullQueue(branch.branch_type))) | ||
378 | 108 | branch.startMirroring() | ||
379 | 109 | self.assertEqual( | ||
380 | 110 | set(), set(self.branch_puller.getPullQueue(branch.branch_type))) | ||
381 | 111 | |||
382 | 112 | def test_mirroringResetsMirrorRequest(self): | 80 | def test_mirroringResetsMirrorRequest(self): |
383 | 113 | """Mirroring branches resets their mirror request times.""" | 81 | """Mirroring branches resets their mirror request times.""" |
384 | 114 | branch = self.makeAnyBranch() | 82 | branch = self.makeAnyBranch() |
385 | @@ -129,44 +97,6 @@ | |||
386 | 129 | self.assertEqual(1, branch.mirror_failures) | 97 | self.assertEqual(1, branch.mirror_failures) |
387 | 130 | self.assertEqual(None, branch.next_mirror_time) | 98 | self.assertEqual(None, branch.next_mirror_time) |
388 | 131 | 99 | ||
389 | 132 | def test_pullQueueEmpty(self): | ||
390 | 133 | """Branches with no next_mirror_time are not in the pull queue.""" | ||
391 | 134 | branch = self.makeAnyBranch() | ||
392 | 135 | self.assertIs(None, branch.next_mirror_time) | ||
393 | 136 | self.assertEqual( | ||
394 | 137 | [], list(self.branch_puller.getPullQueue(self.branch_type))) | ||
395 | 138 | |||
396 | 139 | def test_pastNextMirrorTimeInQueue(self): | ||
397 | 140 | """Branches with next_mirror_time in the past are mirrored.""" | ||
398 | 141 | transaction.begin() | ||
399 | 142 | branch = self.makeAnyBranch() | ||
400 | 143 | branch.requestMirror() | ||
401 | 144 | queue = self.branch_puller.getPullQueue(branch.branch_type) | ||
402 | 145 | self.assertEqual([branch], list(queue)) | ||
403 | 146 | |||
404 | 147 | def test_futureNextMirrorTimeInQueue(self): | ||
405 | 148 | """Branches with next_mirror_time in the future are not mirrored.""" | ||
406 | 149 | transaction.begin() | ||
407 | 150 | branch = removeSecurityProxy(self.makeAnyBranch()) | ||
408 | 151 | tomorrow = self.getNow() + timedelta(1) | ||
409 | 152 | branch.next_mirror_time = tomorrow | ||
410 | 153 | branch.syncUpdate() | ||
411 | 154 | transaction.commit() | ||
412 | 155 | self.assertEqual( | ||
413 | 156 | [], list(self.branch_puller.getPullQueue(branch.branch_type))) | ||
414 | 157 | |||
415 | 158 | def test_pullQueueOrder(self): | ||
416 | 159 | """Pull queue has the oldest mirror request times first.""" | ||
417 | 160 | branches = [] | ||
418 | 161 | for i in range(3): | ||
419 | 162 | branch = removeSecurityProxy(self.makeAnyBranch()) | ||
420 | 163 | branch.next_mirror_time = self.getNow() - timedelta(hours=i+1) | ||
421 | 164 | branch.sync() | ||
422 | 165 | branches.append(branch) | ||
423 | 166 | self.assertEqual( | ||
424 | 167 | list(reversed(branches)), | ||
425 | 168 | list(self.branch_puller.getPullQueue(self.branch_type))) | ||
426 | 169 | |||
427 | 170 | 100 | ||
428 | 171 | class TestMirroringForMirroredBranches(TestMirroringForHostedBranches): | 101 | class TestMirroringForMirroredBranches(TestMirroringForHostedBranches): |
429 | 172 | 102 | ||
430 | @@ -231,18 +161,6 @@ | |||
431 | 231 | branch_type = BranchType.IMPORTED | 161 | branch_type = BranchType.IMPORTED |
432 | 232 | 162 | ||
433 | 233 | 163 | ||
434 | 234 | class TestRemoteBranches(TestCaseWithFactory): | ||
435 | 235 | |||
436 | 236 | layer = DatabaseFunctionalLayer | ||
437 | 237 | |||
438 | 238 | def test_raises_branch_type_error(self): | ||
439 | 239 | # getPullQueue raises `BranchTypeError` if passed BranchType.REMOTE. | ||
440 | 240 | # It's impossible to mirror remote branches, so we shouldn't even try. | ||
441 | 241 | puller = getUtility(IBranchPuller) | ||
442 | 242 | self.assertRaises( | ||
443 | 243 | BranchTypeError, puller.getPullQueue, BranchType.REMOTE) | ||
444 | 244 | |||
445 | 245 | |||
446 | 246 | class AcquireBranchToPullTests: | 164 | class AcquireBranchToPullTests: |
447 | 247 | """Tests for acquiring branches to pull. | 165 | """Tests for acquiring branches to pull. |
448 | 248 | 166 | ||
449 | 249 | 167 | ||
450 | === modified file 'lib/lp/code/model/tests/test_codeimport.py' | |||
451 | --- lib/lp/code/model/tests/test_codeimport.py 2010-02-22 10:31:50 +0000 | |||
452 | +++ lib/lp/code/model/tests/test_codeimport.py 2010-02-24 10:59:32 +0000 | |||
453 | @@ -432,6 +432,15 @@ | |||
454 | 432 | code_import, CodeImportResultStatus.SUCCESS_NOCHANGE) | 432 | code_import, CodeImportResultStatus.SUCCESS_NOCHANGE) |
455 | 433 | self.assertEqual(0, code_import.consecutive_failure_count) | 433 | self.assertEqual(0, code_import.consecutive_failure_count) |
456 | 434 | 434 | ||
457 | 435 | def test_consecutive_failure_count_succeed_succeed_partial(self): | ||
458 | 436 | # A code import that has succeeded then succeeded with no changes has | ||
459 | 437 | # a consecutive_failure_count of 0. | ||
460 | 438 | code_import = self.factory.makeCodeImport() | ||
461 | 439 | self.succeedImport(code_import) | ||
462 | 440 | self.succeedImport( | ||
463 | 441 | code_import, CodeImportResultStatus.SUCCESS_NOCHANGE) | ||
464 | 442 | self.assertEqual(0, code_import.consecutive_failure_count) | ||
465 | 443 | |||
466 | 435 | def test_consecutive_failure_count_fail_fail(self): | 444 | def test_consecutive_failure_count_fail_fail(self): |
467 | 436 | # A code import that has failed twice has a consecutive_failure_count | 445 | # A code import that has failed twice has a consecutive_failure_count |
468 | 437 | # of 2. | 446 | # of 2. |
469 | 438 | 447 | ||
470 | === modified file 'lib/lp/code/model/tests/test_codeimportjob.py' | |||
471 | --- lib/lp/code/model/tests/test_codeimportjob.py 2010-02-24 10:18:16 +0000 | |||
472 | +++ lib/lp/code/model/tests/test_codeimportjob.py 2010-02-24 10:59:32 +0000 | |||
473 | @@ -946,7 +946,8 @@ | |||
474 | 946 | code_import = job.code_import | 946 | code_import = job.code_import |
475 | 947 | self.assertTrue(code_import.date_last_successful is None) | 947 | self.assertTrue(code_import.date_last_successful is None) |
476 | 948 | getUtility(ICodeImportJobWorkflow).finishJob(job, status, None) | 948 | getUtility(ICodeImportJobWorkflow).finishJob(job, status, None) |
478 | 949 | if status in CodeImportResultStatus.successes: | 949 | if status in [CodeImportResultStatus.SUCCESS, |
479 | 950 | CodeImportResultStatus.SUCCESS_NOCHANGE]: | ||
480 | 950 | self.assertTrue(code_import.date_last_successful is not None) | 951 | self.assertTrue(code_import.date_last_successful is not None) |
481 | 951 | else: | 952 | else: |
482 | 952 | self.assertTrue(code_import.date_last_successful is None) | 953 | self.assertTrue(code_import.date_last_successful is None) |
483 | 953 | 954 | ||
484 | === modified file 'lib/lp/code/xmlrpc/codehosting.py' | |||
485 | --- lib/lp/code/xmlrpc/codehosting.py 2009-11-23 22:39:21 +0000 | |||
486 | +++ lib/lp/code/xmlrpc/codehosting.py 2010-02-24 10:59:32 +0000 | |||
487 | @@ -12,7 +12,6 @@ | |||
488 | 12 | 12 | ||
489 | 13 | 13 | ||
490 | 14 | import datetime | 14 | import datetime |
491 | 15 | import urllib | ||
492 | 16 | 15 | ||
493 | 17 | import pytz | 16 | import pytz |
494 | 18 | 17 | ||
495 | @@ -25,8 +24,7 @@ | |||
496 | 25 | 24 | ||
497 | 26 | from canonical.launchpad.ftests import login_person, logout | 25 | from canonical.launchpad.ftests import login_person, logout |
498 | 27 | from lp.code.enums import BranchType | 26 | from lp.code.enums import BranchType |
501 | 28 | from lp.code.interfaces.branch import ( | 27 | from lp.code.interfaces.branch import BranchCreationException |
500 | 29 | BranchCreationException, UnknownBranchTypeError) | ||
502 | 30 | from lp.code.interfaces.branchlookup import IBranchLookup | 28 | from lp.code.interfaces.branchlookup import IBranchLookup |
503 | 31 | from lp.code.interfaces.branchnamespace import ( | 29 | from lp.code.interfaces.branchnamespace import ( |
504 | 32 | InvalidNamespace, lookup_branch_namespace, split_unique_name) | 30 | InvalidNamespace, lookup_branch_namespace, split_unique_name) |
505 | @@ -56,46 +54,6 @@ | |||
506 | 56 | 54 | ||
507 | 57 | implements(IBranchPuller) | 55 | implements(IBranchPuller) |
508 | 58 | 56 | ||
509 | 59 | def _getBranchPullInfo(self, branch): | ||
510 | 60 | """Return information the branch puller needs to pull this branch. | ||
511 | 61 | |||
512 | 62 | This is outside of the IBranch interface so that the authserver can | ||
513 | 63 | access the information without logging in as a particular user. | ||
514 | 64 | |||
515 | 65 | :return: (id, url, unique_name, default_stacked_on_url), where 'id' | ||
516 | 66 | is the branch database ID, 'url' is the URL to pull from, | ||
517 | 67 | 'unique_name' is the `unique_name` property and | ||
518 | 68 | 'default_stacked_on_url' is the URL of the branch to stack on by | ||
519 | 69 | default (normally of the form '/~foo/bar/baz'). If there is no | ||
520 | 70 | default stacked-on branch, then it's ''. | ||
521 | 71 | """ | ||
522 | 72 | branch = removeSecurityProxy(branch) | ||
523 | 73 | if branch.branch_type == BranchType.REMOTE: | ||
524 | 74 | raise AssertionError( | ||
525 | 75 | 'Remote branches should never be in the pull queue.') | ||
526 | 76 | default_branch = branch.target.default_stacked_on_branch | ||
527 | 77 | if default_branch is None: | ||
528 | 78 | default_branch = '' | ||
529 | 79 | elif (branch.branch_type == BranchType.MIRRORED | ||
530 | 80 | and default_branch.private): | ||
531 | 81 | default_branch = '' | ||
532 | 82 | else: | ||
533 | 83 | default_branch = '/' + default_branch.unique_name | ||
534 | 84 | return ( | ||
535 | 85 | branch.id, branch.getPullURL(), branch.unique_name, | ||
536 | 86 | default_branch) | ||
537 | 87 | |||
538 | 88 | def getBranchPullQueue(self, branch_type): | ||
539 | 89 | """See `IBranchPuller`.""" | ||
540 | 90 | try: | ||
541 | 91 | branch_type = BranchType.items[branch_type] | ||
542 | 92 | except KeyError: | ||
543 | 93 | raise UnknownBranchTypeError( | ||
544 | 94 | 'Unknown branch type: %r' % (branch_type,)) | ||
545 | 95 | branches = getUtility(branchpuller.IBranchPuller).getPullQueue( | ||
546 | 96 | branch_type) | ||
547 | 97 | return [self._getBranchPullInfo(branch) for branch in branches] | ||
548 | 98 | |||
549 | 99 | def acquireBranchToPull(self): | 57 | def acquireBranchToPull(self): |
550 | 100 | """See `IBranchPuller`.""" | 58 | """See `IBranchPuller`.""" |
551 | 101 | branch = getUtility(branchpuller.IBranchPuller).acquireBranchToPull() | 59 | branch = getUtility(branchpuller.IBranchPuller).acquireBranchToPull() |
552 | 102 | 60 | ||
553 | === modified file 'lib/lp/code/xmlrpc/tests/test_codehosting.py' | |||
554 | --- lib/lp/code/xmlrpc/tests/test_codehosting.py 2010-02-18 03:11:03 +0000 | |||
555 | +++ lib/lp/code/xmlrpc/tests/test_codehosting.py 2010-02-24 10:59:32 +0000 | |||
556 | @@ -399,105 +399,6 @@ | |||
557 | 399 | self.assertFaultEqual(faults.NoBranchWithID(branch_id), fault) | 399 | self.assertFaultEqual(faults.NoBranchWithID(branch_id), fault) |
558 | 400 | 400 | ||
559 | 401 | 401 | ||
560 | 402 | class BranchPullQueueTest(TestCaseWithFactory): | ||
561 | 403 | """Tests for the pull queue methods of `IBranchPuller`.""" | ||
562 | 404 | |||
563 | 405 | def setUp(self): | ||
564 | 406 | super(BranchPullQueueTest, self).setUp() | ||
565 | 407 | frontend = self.frontend() | ||
566 | 408 | self.storage = frontend.getPullerEndpoint() | ||
567 | 409 | self.factory = frontend.getLaunchpadObjectFactory() | ||
568 | 410 | |||
569 | 411 | def assertBranchQueues(self, hosted, mirrored, imported): | ||
570 | 412 | expected_hosted = [ | ||
571 | 413 | self.storage._getBranchPullInfo(branch) for branch in hosted] | ||
572 | 414 | expected_mirrored = [ | ||
573 | 415 | self.storage._getBranchPullInfo(branch) for branch in mirrored] | ||
574 | 416 | expected_imported = [ | ||
575 | 417 | self.storage._getBranchPullInfo(branch) for branch in imported] | ||
576 | 418 | self.assertEqual( | ||
577 | 419 | expected_hosted, self.storage.getBranchPullQueue('HOSTED')) | ||
578 | 420 | self.assertEqual( | ||
579 | 421 | expected_mirrored, self.storage.getBranchPullQueue('MIRRORED')) | ||
580 | 422 | self.assertEqual( | ||
581 | 423 | expected_imported, self.storage.getBranchPullQueue('IMPORTED')) | ||
582 | 424 | |||
583 | 425 | def test_pullQueuesEmpty(self): | ||
584 | 426 | """getBranchPullQueue returns an empty list when there are no branches | ||
585 | 427 | to pull. | ||
586 | 428 | """ | ||
587 | 429 | self.assertBranchQueues([], [], []) | ||
588 | 430 | |||
589 | 431 | def makeBranchAndRequestMirror(self, branch_type): | ||
590 | 432 | """Make a branch of the given type and call requestMirror on it.""" | ||
591 | 433 | branch = self.factory.makeAnyBranch(branch_type=branch_type) | ||
592 | 434 | branch.requestMirror() | ||
593 | 435 | # The pull queues contain branches that have next_mirror_time strictly | ||
594 | 436 | # in the past, but requestMirror sets this field to UTC_NOW, so we | ||
595 | 437 | # push the time back slightly here to get the branch to show up in the | ||
596 | 438 | # queue. | ||
597 | 439 | naked_branch = removeSecurityProxy(branch) | ||
598 | 440 | naked_branch.next_mirror_time -= datetime.timedelta(seconds=1) | ||
599 | 441 | return branch | ||
600 | 442 | |||
601 | 443 | def test_getBranchPullInfo_no_default_stacked_branch(self): | ||
602 | 444 | # If there's no default stacked branch for the project that a branch | ||
603 | 445 | # is on, then _getBranchPullInfo returns (id, url, unique_name, ''). | ||
604 | 446 | branch = self.factory.makeAnyBranch() | ||
605 | 447 | info = self.storage._getBranchPullInfo(branch) | ||
606 | 448 | self.assertEqual( | ||
607 | 449 | (branch.id, branch.getPullURL(), branch.unique_name, ''), info) | ||
608 | 450 | |||
609 | 451 | def test_getBranchPullInfo_default_stacked_branch(self): | ||
610 | 452 | # If there's a default stacked branch for the project that a branch is | ||
611 | 453 | # on, then _getBranchPullInfo returns (id, url, unique_name, | ||
612 | 454 | # default_branch_unique_name). | ||
613 | 455 | product = self.factory.makeProduct() | ||
614 | 456 | default_branch = self.factory.enableDefaultStackingForProduct(product) | ||
615 | 457 | branch = self.factory.makeProductBranch(product=product) | ||
616 | 458 | info = self.storage._getBranchPullInfo(branch) | ||
617 | 459 | self.assertEqual( | ||
618 | 460 | (branch.id, branch.getPullURL(), branch.unique_name, | ||
619 | 461 | '/' + default_branch.unique_name), info) | ||
620 | 462 | |||
621 | 463 | def test_getBranchPullInfo_private_branch(self): | ||
622 | 464 | # We don't want to stack mirrored branches onto private branches: | ||
623 | 465 | # mirrored branches are public by their nature. Thus, if the default | ||
624 | 466 | # stacked-on branch for the project is private and the branch is | ||
625 | 467 | # MIRRORED then we don't include the default stacked-on branch's | ||
626 | 468 | # details in the tuple. | ||
627 | 469 | product = self.factory.makeProduct() | ||
628 | 470 | default_branch = self.factory.makeProductBranch( | ||
629 | 471 | product=product, private=True) | ||
630 | 472 | self.factory.enableDefaultStackingForProduct(product, default_branch) | ||
631 | 473 | mirrored_branch = self.factory.makeProductBranch( | ||
632 | 474 | branch_type=BranchType.MIRRORED, product=product) | ||
633 | 475 | info = self.storage._getBranchPullInfo(mirrored_branch) | ||
634 | 476 | self.assertEqual( | ||
635 | 477 | (mirrored_branch.id, mirrored_branch.getPullURL(), | ||
636 | 478 | mirrored_branch.unique_name, ''), info) | ||
637 | 479 | |||
638 | 480 | def test_getBranchPullInfo_junk(self): | ||
639 | 481 | # _getBranchPullInfo returns (id, url, unique_name, '') for junk | ||
640 | 482 | # branches. | ||
641 | 483 | branch = self.factory.makePersonalBranch() | ||
642 | 484 | info = self.storage._getBranchPullInfo(branch) | ||
643 | 485 | self.assertEqual( | ||
644 | 486 | (branch.id, branch.getPullURL(), branch.unique_name, ''), info) | ||
645 | 487 | |||
646 | 488 | def test_requestMirrorPutsBranchInQueue_hosted(self): | ||
647 | 489 | branch = self.makeBranchAndRequestMirror(BranchType.HOSTED) | ||
648 | 490 | self.assertBranchQueues([branch], [], []) | ||
649 | 491 | |||
650 | 492 | def test_requestMirrorPutsBranchInQueue_mirrored(self): | ||
651 | 493 | branch = self.makeBranchAndRequestMirror(BranchType.MIRRORED) | ||
652 | 494 | self.assertBranchQueues([], [branch], []) | ||
653 | 495 | |||
654 | 496 | def test_requestMirrorPutsBranchInQueue_imported(self): | ||
655 | 497 | branch = self.makeBranchAndRequestMirror(BranchType.IMPORTED) | ||
656 | 498 | self.assertBranchQueues([], [], [branch]) | ||
657 | 499 | |||
658 | 500 | |||
659 | 501 | class AcquireBranchToPullTestsViaEndpoint(TestCaseWithFactory, | 402 | class AcquireBranchToPullTestsViaEndpoint(TestCaseWithFactory, |
660 | 502 | AcquireBranchToPullTests): | 403 | AcquireBranchToPullTests): |
661 | 503 | """Tests for `acquireBranchToPull` method of `IBranchPuller`.""" | 404 | """Tests for `acquireBranchToPull` method of `IBranchPuller`.""" |
662 | @@ -1175,7 +1076,6 @@ | |||
663 | 1175 | suite = unittest.TestSuite() | 1076 | suite = unittest.TestSuite() |
664 | 1176 | puller_tests = unittest.TestSuite( | 1077 | puller_tests = unittest.TestSuite( |
665 | 1177 | [loader.loadTestsFromTestCase(BranchPullerTest), | 1078 | [loader.loadTestsFromTestCase(BranchPullerTest), |
666 | 1178 | loader.loadTestsFromTestCase(BranchPullQueueTest), | ||
667 | 1179 | loader.loadTestsFromTestCase(AcquireBranchToPullTestsViaEndpoint), | 1079 | loader.loadTestsFromTestCase(AcquireBranchToPullTestsViaEndpoint), |
668 | 1180 | loader.loadTestsFromTestCase(BranchFileSystemTest), | 1080 | loader.loadTestsFromTestCase(BranchFileSystemTest), |
669 | 1181 | ]) | 1081 | ]) |
670 | 1182 | 1082 | ||
671 | === modified file 'lib/lp/codehosting/codeimport/tests/test_worker.py' | |||
672 | --- lib/lp/codehosting/codeimport/tests/test_worker.py 2010-02-22 05:37:36 +0000 | |||
673 | +++ lib/lp/codehosting/codeimport/tests/test_worker.py 2010-02-24 10:59:32 +0000 | |||
674 | @@ -254,6 +254,31 @@ | |||
675 | 254 | store._getMirrorURL(self.arbitrary_branch_id), | 254 | store._getMirrorURL(self.arbitrary_branch_id), |
676 | 255 | sftp_prefix_noslash + '/' + '%08x' % self.arbitrary_branch_id) | 255 | sftp_prefix_noslash + '/' + '%08x' % self.arbitrary_branch_id) |
677 | 256 | 256 | ||
678 | 257 | def test_all_revisions_saved(self): | ||
679 | 258 | # All revisions in the branch's repo are transferred, not just those | ||
680 | 259 | # in the ancestry of the tip. | ||
681 | 260 | # Consider a branch with two heads in its repo: | ||
682 | 261 | # revid | ||
683 | 262 | # / \ | ||
684 | 263 | # revid1 revid2 <- branch tip | ||
685 | 264 | # A naive push/pull would just store 'revid' and 'revid2' in the | ||
686 | 265 | # branch store -- we need to make sure all three revisions are stored | ||
687 | 266 | # and retrieved. | ||
688 | 267 | builder = self.make_branch_builder('tree') | ||
689 | 268 | revid = builder.build_snapshot( | ||
690 | 269 | None, None, [('add', ('', 'root-id', 'directory', ''))]) | ||
691 | 270 | revid1 = builder.build_snapshot(None, [revid], []) | ||
692 | 271 | revid2 = builder.build_snapshot(None, [revid], []) | ||
693 | 272 | branch = builder.get_branch() | ||
694 | 273 | source_tree = branch.bzrdir.create_workingtree() | ||
695 | 274 | store = self.makeBranchStore() | ||
696 | 275 | store.push(self.arbitrary_branch_id, source_tree, default_format) | ||
697 | 276 | retrieved_tree = store.pull( | ||
698 | 277 | self.arbitrary_branch_id, 'pulled', default_format) | ||
699 | 278 | self.assertEqual( | ||
700 | 279 | set([revid, revid1, revid2]), | ||
701 | 280 | set(retrieved_tree.branch.repository.all_revision_ids())) | ||
702 | 281 | |||
703 | 257 | 282 | ||
704 | 258 | class TestImportDataStore(WorkerTest): | 283 | class TestImportDataStore(WorkerTest): |
705 | 259 | """Tests for `ImportDataStore`.""" | 284 | """Tests for `ImportDataStore`.""" |
706 | 260 | 285 | ||
707 | === modified file 'lib/lp/codehosting/codeimport/worker.py' | |||
708 | --- lib/lp/codehosting/codeimport/worker.py 2010-02-19 03:32:39 +0000 | |||
709 | +++ lib/lp/codehosting/codeimport/worker.py 2010-02-24 10:59:32 +0000 | |||
710 | @@ -71,20 +71,26 @@ | |||
711 | 71 | """ | 71 | """ |
712 | 72 | remote_url = self._getMirrorURL(db_branch_id) | 72 | remote_url = self._getMirrorURL(db_branch_id) |
713 | 73 | try: | 73 | try: |
715 | 74 | bzr_dir = BzrDir.open(remote_url) | 74 | remote_bzr_dir = BzrDir.open(remote_url) |
716 | 75 | except NotBranchError: | 75 | except NotBranchError: |
717 | 76 | return BzrDir.create_standalone_workingtree( | 76 | return BzrDir.create_standalone_workingtree( |
718 | 77 | target_path, required_format) | 77 | target_path, required_format) |
719 | 78 | # XXX Tim Penhey 2009-09-18 bug 432217 Automatic upgrade of import | 78 | # XXX Tim Penhey 2009-09-18 bug 432217 Automatic upgrade of import |
720 | 79 | # branches disabled. Need an orderly upgrade process. | 79 | # branches disabled. Need an orderly upgrade process. |
722 | 80 | if False and bzr_dir.needs_format_conversion(format=required_format): | 80 | if False and remote_bzr_dir.needs_format_conversion( |
723 | 81 | format=required_format): | ||
724 | 81 | try: | 82 | try: |
726 | 82 | bzr_dir.root_transport.delete_tree('backup.bzr') | 83 | remote_bzr_dir.root_transport.delete_tree('backup.bzr') |
727 | 83 | except NoSuchFile: | 84 | except NoSuchFile: |
728 | 84 | pass | 85 | pass |
729 | 85 | upgrade(remote_url, required_format) | 86 | upgrade(remote_url, required_format) |
732 | 86 | bzr_dir.sprout(target_path) | 87 | local_bzr_dir = remote_bzr_dir.sprout(target_path) |
733 | 87 | return BzrDir.open(target_path).open_workingtree() | 88 | # Because of the way we do incremental imports, there may be revisions |
734 | 89 | # in the branch's repo that are not in the ancestry of the branch tip. | ||
735 | 90 | # We need to transfer them too. | ||
736 | 91 | local_bzr_dir.open_repository().fetch( | ||
737 | 92 | remote_bzr_dir.open_repository()) | ||
738 | 93 | return local_bzr_dir.open_workingtree() | ||
739 | 88 | 94 | ||
740 | 89 | def push(self, db_branch_id, bzr_tree, required_format): | 95 | def push(self, db_branch_id, bzr_tree, required_format): |
741 | 90 | """Push up `bzr_tree` as the Bazaar branch for `code_import`. | 96 | """Push up `bzr_tree` as the Bazaar branch for `code_import`. |
742 | @@ -101,6 +107,10 @@ | |||
743 | 101 | branch_to = BzrDir.create_branch_and_repo( | 107 | branch_to = BzrDir.create_branch_and_repo( |
744 | 102 | target_url, format=required_format) | 108 | target_url, format=required_format) |
745 | 103 | pull_result = branch_to.pull(branch_from, overwrite=True) | 109 | pull_result = branch_to.pull(branch_from, overwrite=True) |
746 | 110 | # Because of the way we do incremental imports, there may be revisions | ||
747 | 111 | # in the branch's repo that are not in the ancestry of the branch tip. | ||
748 | 112 | # We need to transfer them too. | ||
749 | 113 | branch_to.repository.fetch(branch_from.repository) | ||
750 | 104 | return pull_result.old_revid != pull_result.new_revid | 114 | return pull_result.old_revid != pull_result.new_revid |
751 | 105 | 115 | ||
752 | 106 | 116 | ||
753 | 107 | 117 | ||
754 | === modified file 'lib/lp/codehosting/inmemory.py' | |||
755 | --- lib/lp/codehosting/inmemory.py 2010-02-19 03:06:12 +0000 | |||
756 | +++ lib/lp/codehosting/inmemory.py 2010-02-24 10:59:32 +0000 | |||
757 | @@ -442,29 +442,6 @@ | |||
758 | 442 | self._branch_set = branch_set | 442 | self._branch_set = branch_set |
759 | 443 | self._script_activity_set = script_activity_set | 443 | self._script_activity_set = script_activity_set |
760 | 444 | 444 | ||
761 | 445 | def _getBranchPullInfo(self, branch): | ||
762 | 446 | default_branch = '' | ||
763 | 447 | if branch.product is not None: | ||
764 | 448 | series = branch.product.development_focus | ||
765 | 449 | user_branch = series.branch | ||
766 | 450 | if (user_branch is not None | ||
767 | 451 | and not ( | ||
768 | 452 | user_branch.private | ||
769 | 453 | and branch.branch_type == BranchType.MIRRORED)): | ||
770 | 454 | default_branch = '/' + user_branch.unique_name | ||
771 | 455 | return ( | ||
772 | 456 | branch.id, branch.getPullURL(), branch.unique_name, | ||
773 | 457 | default_branch) | ||
774 | 458 | |||
775 | 459 | def getBranchPullQueue(self, branch_type): | ||
776 | 460 | queue = [] | ||
777 | 461 | branch_type = BranchType.items[branch_type] | ||
778 | 462 | for branch in self._branch_set: | ||
779 | 463 | if (branch.branch_type == branch_type | ||
780 | 464 | and branch.next_mirror_time < UTC_NOW): | ||
781 | 465 | queue.append(self._getBranchPullInfo(branch)) | ||
782 | 466 | return queue | ||
783 | 467 | |||
784 | 468 | def acquireBranchToPull(self): | 445 | def acquireBranchToPull(self): |
785 | 469 | branches = sorted( | 446 | branches = sorted( |
786 | 470 | [branch for branch in self._branch_set | 447 | [branch for branch in self._branch_set |
787 | 471 | 448 | ||
788 | === modified file 'lib/lp/soyuz/scripts/publishdistro.py' | |||
789 | --- lib/lp/soyuz/scripts/publishdistro.py 2009-06-25 04:06:00 +0000 | |||
790 | +++ lib/lp/soyuz/scripts/publishdistro.py 2010-02-24 10:59:32 +0000 | |||
791 | @@ -69,6 +69,11 @@ | |||
792 | 69 | dest="partner", metavar="PARTNER", default=False, | 69 | dest="partner", metavar="PARTNER", default=False, |
793 | 70 | help="Run only over the partner archive.") | 70 | help="Run only over the partner archive.") |
794 | 71 | 71 | ||
795 | 72 | parser.add_option("--copy-archive", action="store_true", | ||
796 | 73 | dest="copy_archive", metavar="COPYARCHIVE", | ||
797 | 74 | default=False, | ||
798 | 75 | help="Run only over the copy archives.") | ||
799 | 76 | |||
800 | 72 | parser.add_option( | 77 | parser.add_option( |
801 | 73 | "--primary-debug", action="store_true", default=False, | 78 | "--primary-debug", action="store_true", default=False, |
802 | 74 | dest="primary_debug", metavar="PRIMARYDEBUG", | 79 | dest="primary_debug", metavar="PRIMARYDEBUG", |
803 | @@ -103,12 +108,13 @@ | |||
804 | 103 | 108 | ||
805 | 104 | exclusive_options = ( | 109 | exclusive_options = ( |
806 | 105 | options.partner, options.ppa, options.private_ppa, | 110 | options.partner, options.ppa, options.private_ppa, |
808 | 106 | options.primary_debug) | 111 | options.primary_debug, options.copy_archive) |
809 | 112 | |||
810 | 107 | num_exclusive = [flag for flag in exclusive_options if flag] | 113 | num_exclusive = [flag for flag in exclusive_options if flag] |
811 | 108 | if len(num_exclusive) > 1: | 114 | if len(num_exclusive) > 1: |
812 | 109 | raise LaunchpadScriptFailure( | 115 | raise LaunchpadScriptFailure( |
815 | 110 | "Can only specify one of partner, ppa, private-ppa and " | 116 | "Can only specify one of partner, ppa, private-ppa, copy-archive" |
816 | 111 | "primary-debug.") | 117 | " and primary-debug.") |
817 | 112 | 118 | ||
818 | 113 | log.debug(" Distribution: %s" % options.distribution) | 119 | log.debug(" Distribution: %s" % options.distribution) |
819 | 114 | log.debug(" Publishing: %s" % careful_msg(options.careful_publishing)) | 120 | log.debug(" Publishing: %s" % careful_msg(options.careful_publishing)) |
820 | @@ -161,6 +167,13 @@ | |||
821 | 161 | raise LaunchpadScriptFailure( | 167 | raise LaunchpadScriptFailure( |
822 | 162 | "Could not find DEBUG archive for %s" % distribution.name) | 168 | "Could not find DEBUG archive for %s" % distribution.name) |
823 | 163 | archives = [debug_archive] | 169 | archives = [debug_archive] |
824 | 170 | elif options.copy_archive: | ||
825 | 171 | archives = getUtility(IArchiveSet).getArchivesForDistribution( | ||
826 | 172 | distribution, purposes=[ArchivePurpose.COPY]) | ||
827 | 173 | # Fix this to use bool when Storm fixes __nonzero__ on sqlobj | ||
828 | 174 | # result sets. | ||
829 | 175 | if archives.count() == 0: | ||
830 | 176 | raise LaunchpadScriptFailure("Could not find any COPY archives") | ||
831 | 164 | else: | 177 | else: |
832 | 165 | archives = [distribution.main_archive] | 178 | archives = [distribution.main_archive] |
833 | 166 | 179 | ||
834 | @@ -185,9 +198,9 @@ | |||
835 | 185 | try_and_commit("dominating", publisher.B_dominate, | 198 | try_and_commit("dominating", publisher.B_dominate, |
836 | 186 | options.careful or options.careful_domination) | 199 | options.careful or options.careful_domination) |
837 | 187 | 200 | ||
841 | 188 | # The primary archive uses apt-ftparchive to generate the indexes, | 201 | # The primary and copy archives use apt-ftparchive to generate the |
842 | 189 | # everything else uses the newer internal LP code. | 202 | # indexes, everything else uses the newer internal LP code. |
843 | 190 | if archive.purpose == ArchivePurpose.PRIMARY: | 203 | if archive.purpose in (ArchivePurpose.PRIMARY, ArchivePurpose.COPY): |
844 | 191 | try_and_commit("doing apt-ftparchive", publisher.C_doFTPArchive, | 204 | try_and_commit("doing apt-ftparchive", publisher.C_doFTPArchive, |
845 | 192 | options.careful or options.careful_apt) | 205 | options.careful or options.careful_apt) |
846 | 193 | else: | 206 | else: |
847 | 194 | 207 | ||
848 | === modified file 'lib/lp/soyuz/scripts/tests/test_publishdistro.py' | |||
849 | --- lib/lp/soyuz/scripts/tests/test_publishdistro.py 2010-01-11 05:01:32 +0000 | |||
850 | +++ lib/lp/soyuz/scripts/tests/test_publishdistro.py 2010-02-24 10:59:32 +0000 | |||
851 | @@ -308,6 +308,47 @@ | |||
852 | 308 | self.assertEqual( | 308 | self.assertEqual( |
853 | 309 | open(debug_index_path).readlines()[0], 'Package: foo-bin\n') | 309 | open(debug_index_path).readlines()[0], 'Package: foo-bin\n') |
854 | 310 | 310 | ||
855 | 311 | def testPublishCopyArchive(self): | ||
856 | 312 | """Run publish-distro in copy archive mode. | ||
857 | 313 | |||
858 | 314 | It should only publish copy archives. | ||
859 | 315 | """ | ||
860 | 316 | ubuntutest = getUtility(IDistributionSet)['ubuntutest'] | ||
861 | 317 | cprov = getUtility(IPersonSet).getByName('cprov') | ||
862 | 318 | copy_archive_name = 'test-copy-publish' | ||
863 | 319 | |||
864 | 320 | # The COPY repository path is not created yet. | ||
865 | 321 | repo_path = os.path.join( | ||
866 | 322 | config.archivepublisher.root, | ||
867 | 323 | ubuntutest.name + '-' + copy_archive_name) | ||
868 | 324 | self.assertNotExists(repo_path) | ||
869 | 325 | |||
870 | 326 | copy_archive = getUtility(IArchiveSet).new( | ||
871 | 327 | distribution=ubuntutest, owner=cprov, name=copy_archive_name, | ||
872 | 328 | purpose=ArchivePurpose.COPY, enabled=True) | ||
873 | 329 | # Save some test CPU cycles by avoiding logging in as the user | ||
874 | 330 | # necessary to alter the publish flag. | ||
875 | 331 | removeSecurityProxy(copy_archive).publish = True | ||
876 | 332 | |||
877 | 333 | # Publish something. | ||
878 | 334 | pub_source = self.getPubSource( | ||
879 | 335 | sourcename='baz', filecontent='baz', archive=copy_archive) | ||
880 | 336 | |||
881 | 337 | # Try a plain PPA run, to ensure the copy archive is not published. | ||
882 | 338 | self.runPublishDistro(['--ppa']) | ||
883 | 339 | |||
884 | 340 | self.assertEqual(pub_source.status, PackagePublishingStatus.PENDING) | ||
885 | 341 | |||
886 | 342 | # Now publish the copy archives and make sure they are really | ||
887 | 343 | # published. | ||
888 | 344 | self.runPublishDistro(['--copy-archive']) | ||
889 | 345 | |||
890 | 346 | self.assertEqual(pub_source.status, PackagePublishingStatus.PUBLISHED) | ||
891 | 347 | |||
892 | 348 | # Make sure that the files were published in the right place. | ||
893 | 349 | pool_path = os.path.join(repo_path, 'pool/main/b/baz/baz_666.dsc') | ||
894 | 350 | self.assertExists(pool_path) | ||
895 | 351 | |||
896 | 311 | def testRunWithEmptySuites(self): | 352 | def testRunWithEmptySuites(self): |
897 | 312 | """Try a publish-distro run on empty suites in careful_apt mode | 353 | """Try a publish-distro run on empty suites in careful_apt mode |
898 | 313 | 354 | ||
899 | @@ -347,7 +388,8 @@ | |||
900 | 347 | """Test that some command line options are mutually exclusive.""" | 388 | """Test that some command line options are mutually exclusive.""" |
901 | 348 | self.assertRaises( | 389 | self.assertRaises( |
902 | 349 | LaunchpadScriptFailure, | 390 | LaunchpadScriptFailure, |
904 | 350 | self.runPublishDistro, ['--ppa', '--partner', '--primary-debug']) | 391 | self.runPublishDistro, |
905 | 392 | ['--ppa', '--partner', '--primary-debug', '--copy-archive']) | ||
906 | 351 | self.assertRaises( | 393 | self.assertRaises( |
907 | 352 | LaunchpadScriptFailure, | 394 | LaunchpadScriptFailure, |
908 | 353 | self.runPublishDistro, ['--ppa', '--partner']) | 395 | self.runPublishDistro, ['--ppa', '--partner']) |
909 | @@ -359,10 +401,19 @@ | |||
910 | 359 | self.runPublishDistro, ['--ppa', '--primary-debug']) | 401 | self.runPublishDistro, ['--ppa', '--primary-debug']) |
911 | 360 | self.assertRaises( | 402 | self.assertRaises( |
912 | 361 | LaunchpadScriptFailure, | 403 | LaunchpadScriptFailure, |
913 | 404 | self.runPublishDistro, ['--ppa', '--copy-archive']) | ||
914 | 405 | self.assertRaises( | ||
915 | 406 | LaunchpadScriptFailure, | ||
916 | 362 | self.runPublishDistro, ['--partner', '--private-ppa']) | 407 | self.runPublishDistro, ['--partner', '--private-ppa']) |
917 | 363 | self.assertRaises( | 408 | self.assertRaises( |
918 | 364 | LaunchpadScriptFailure, | 409 | LaunchpadScriptFailure, |
919 | 365 | self.runPublishDistro, ['--partner', '--primary-debug']) | 410 | self.runPublishDistro, ['--partner', '--primary-debug']) |
920 | 411 | self.assertRaises( | ||
921 | 412 | LaunchpadScriptFailure, | ||
922 | 413 | self.runPublishDistro, ['--partner', '--copy-archive']) | ||
923 | 414 | self.assertRaises( | ||
924 | 415 | LaunchpadScriptFailure, | ||
925 | 416 | self.runPublishDistro, ['--primary-debug', '--copy-archive']) | ||
926 | 366 | 417 | ||
927 | 367 | 418 | ||
928 | 368 | def test_suite(): | 419 | def test_suite(): |
929 | 369 | 420 | ||
930 | === added file 'lib/lp/testing/faketransaction.py' | |||
931 | --- lib/lp/testing/faketransaction.py 1970-01-01 00:00:00 +0000 | |||
932 | +++ lib/lp/testing/faketransaction.py 2010-02-24 10:59:32 +0000 | |||
933 | @@ -0,0 +1,35 @@ | |||
934 | 1 | # Copyright 2010 Canonical Ltd. This software is licensed under the | ||
935 | 2 | # GNU Affero General Public License version 3 (see the file LICENSE). | ||
936 | 3 | |||
937 | 4 | """Fake transaction manager.""" | ||
938 | 5 | |||
939 | 6 | __metaclass__ = type | ||
940 | 7 | __all__ = ['FakeTransaction'] | ||
941 | 8 | |||
942 | 9 | |||
943 | 10 | class FakeTransaction: | ||
944 | 11 | """Fake transaction manager. | ||
945 | 12 | |||
946 | 13 | Use this instead of `transaction` (or the old Zopeless transaction | ||
947 | 14 | manager) in tests if you don't really want to commit anything. | ||
948 | 15 | |||
949 | 16 | Set `log_calls` to True to enable printing of commits and aborts. | ||
950 | 17 | """ | ||
951 | 18 | def __init__(self, log_calls=False): | ||
952 | 19 | self.log_calls = log_calls | ||
953 | 20 | |||
954 | 21 | def _log(self, call): | ||
955 | 22 | """Print calls that are being made, if desired.""" | ||
956 | 23 | if self.log_calls: | ||
957 | 24 | print call | ||
958 | 25 | |||
959 | 26 | def begin(self): | ||
960 | 27 | """Pretend to begin a transaction. Does not log.""" | ||
961 | 28 | |||
962 | 29 | def commit(self): | ||
963 | 30 | """Pretend to commit.""" | ||
964 | 31 | self._log("COMMIT") | ||
965 | 32 | |||
966 | 33 | def abort(self): | ||
967 | 34 | """Pretend to roll back.""" | ||
968 | 35 | self._log("ABORT") | ||
969 | 0 | 36 | ||
970 | === modified file 'lib/lp/translations/browser/poexportrequest.py' | |||
971 | --- lib/lp/translations/browser/poexportrequest.py 2009-07-17 00:26:05 +0000 | |||
972 | +++ lib/lp/translations/browser/poexportrequest.py 2010-02-24 10:59:32 +0000 | |||
973 | @@ -7,10 +7,13 @@ | |||
974 | 7 | __all__ = ['BaseExportView'] | 7 | __all__ = ['BaseExportView'] |
975 | 8 | 8 | ||
976 | 9 | 9 | ||
977 | 10 | from datetime import timedelta | ||
978 | 11 | |||
979 | 10 | from zope.component import getUtility | 12 | from zope.component import getUtility |
980 | 11 | 13 | ||
981 | 12 | from canonical.cachedproperty import cachedproperty | 14 | from canonical.cachedproperty import cachedproperty |
982 | 13 | from canonical.launchpad import _ | 15 | from canonical.launchpad import _ |
983 | 16 | from canonical.launchpad.webapp.tales import DurationFormatterAPI | ||
984 | 14 | from lp.translations.interfaces.poexportrequest import ( | 17 | from lp.translations.interfaces.poexportrequest import ( |
985 | 15 | IPOExportRequestSet) | 18 | IPOExportRequestSet) |
986 | 16 | from lp.translations.interfaces.potemplate import ( | 19 | from lp.translations.interfaces.potemplate import ( |
987 | @@ -29,6 +32,38 @@ | |||
988 | 29 | def uses_translations(self): | 32 | def uses_translations(self): |
989 | 30 | return self.context.has_current_translation_templates | 33 | return self.context.has_current_translation_templates |
990 | 31 | 34 | ||
991 | 35 | @property | ||
992 | 36 | def export_queue_status(self): | ||
993 | 37 | """Summary of queue status.""" | ||
994 | 38 | queue_size = self.request_set.entry_count | ||
995 | 39 | estimated_backlog = self.request_set.estimateBacklog() | ||
996 | 40 | |||
997 | 41 | size_text = self.describeQueueSize(queue_size) | ||
998 | 42 | backlog_text = self.describeBacklog(estimated_backlog) | ||
999 | 43 | |||
1000 | 44 | return " ".join((size_text, backlog_text)) | ||
1001 | 45 | |||
1002 | 46 | def describeQueueSize(self, queue_size): | ||
1003 | 47 | """Return string describing the given queue size.""" | ||
1004 | 48 | if queue_size == 0: | ||
1005 | 49 | return "The export queue is currently empty." | ||
1006 | 50 | elif queue_size == 1: | ||
1007 | 51 | return "There is 1 file request on the export queue." | ||
1008 | 52 | else: | ||
1009 | 53 | return ( | ||
1010 | 54 | "There are %d file requests on the export queue." | ||
1011 | 55 | % queue_size) | ||
1012 | 56 | |||
1013 | 57 | def describeBacklog(self, estimated_backlog): | ||
1014 | 58 | """Return string describing the current export backlog.""" | ||
1015 | 59 | threshold = timedelta(minutes=10) | ||
1016 | 60 | if estimated_backlog is None or estimated_backlog < threshold: | ||
1017 | 61 | return "" | ||
1018 | 62 | |||
1019 | 63 | formatter = DurationFormatterAPI(estimated_backlog) | ||
1020 | 64 | time_string = formatter.approximateduration() | ||
1021 | 65 | return "The backlog is approximately %s." % time_string | ||
1022 | 66 | |||
1023 | 32 | def getDefaultFormat(self): | 67 | def getDefaultFormat(self): |
1024 | 33 | """Overridable: return default file format to use for the export.""" | 68 | """Overridable: return default file format to use for the export.""" |
1025 | 34 | if not IHasTranslationTemplates.providedBy(self.context): | 69 | if not IHasTranslationTemplates.providedBy(self.context): |
1026 | 35 | 70 | ||
1027 | === modified file 'lib/lp/translations/browser/tests/test_baseexportview.py' | |||
1028 | --- lib/lp/translations/browser/tests/test_baseexportview.py 2009-07-17 02:25:09 +0000 | |||
1029 | +++ lib/lp/translations/browser/tests/test_baseexportview.py 2010-02-24 10:59:32 +0000 | |||
1030 | @@ -1,8 +1,10 @@ | |||
1032 | 1 | # Copyright 2009 Canonical Ltd. This software is licensed under the | 1 | # Copyright 2009-2010 Canonical Ltd. This software is licensed under the |
1033 | 2 | # GNU Affero General Public License version 3 (see the file LICENSE). | 2 | # GNU Affero General Public License version 3 (see the file LICENSE). |
1034 | 3 | 3 | ||
1035 | 4 | __metaclass__ = type | 4 | __metaclass__ = type |
1036 | 5 | 5 | ||
1037 | 6 | from datetime import timedelta | ||
1038 | 7 | import transaction | ||
1039 | 6 | import unittest | 8 | import unittest |
1040 | 7 | 9 | ||
1041 | 8 | from canonical.launchpad.webapp.servers import LaunchpadTestRequest | 10 | from canonical.launchpad.webapp.servers import LaunchpadTestRequest |
1042 | @@ -16,6 +18,12 @@ | |||
1043 | 16 | from lp.testing import TestCaseWithFactory | 18 | from lp.testing import TestCaseWithFactory |
1044 | 17 | 19 | ||
1045 | 18 | 20 | ||
1046 | 21 | def wipe_queue(queue): | ||
1047 | 22 | """Erase all export queue entries.""" | ||
1048 | 23 | while queue.entry_count > 0: | ||
1049 | 24 | queue.popRequest() | ||
1050 | 25 | |||
1051 | 26 | |||
1052 | 19 | class BaseExportViewMixin(TestCaseWithFactory): | 27 | class BaseExportViewMixin(TestCaseWithFactory): |
1053 | 20 | """Test behaviour of objects subclassing BaseExportView.""" | 28 | """Test behaviour of objects subclassing BaseExportView.""" |
1054 | 21 | 29 | ||
1055 | @@ -122,7 +130,6 @@ | |||
1056 | 122 | [pofile_sr.id, pofile_es.id, pofile_sr2.id], | 130 | [pofile_sr.id, pofile_es.id, pofile_sr2.id], |
1057 | 123 | translations) | 131 | translations) |
1058 | 124 | 132 | ||
1059 | 125 | |||
1060 | 126 | class TestProductSeries(BaseExportViewMixin): | 133 | class TestProductSeries(BaseExportViewMixin): |
1061 | 127 | """Test implementation of BaseExportView on ProductSeries.""" | 134 | """Test implementation of BaseExportView on ProductSeries.""" |
1062 | 128 | 135 | ||
1063 | @@ -158,9 +165,68 @@ | |||
1064 | 158 | self.container, LaunchpadTestRequest()) | 165 | self.container, LaunchpadTestRequest()) |
1065 | 159 | 166 | ||
1066 | 160 | 167 | ||
1067 | 168 | class TestPOExportQueueStatusDescriptions(TestCaseWithFactory): | ||
1068 | 169 | |||
1069 | 170 | layer = ZopelessDatabaseLayer | ||
1070 | 171 | |||
1071 | 172 | def setUp(self): | ||
1072 | 173 | super(TestPOExportQueueStatusDescriptions, self).setUp() | ||
1073 | 174 | self.container = self.factory.makeProductSeries() | ||
1074 | 175 | self.container.product.official_rosetta = True | ||
1075 | 176 | self.view = ProductSeriesTranslationsExportView( | ||
1076 | 177 | self.container, LaunchpadTestRequest()) | ||
1077 | 178 | |||
1078 | 179 | def test_describeQueueSize(self): | ||
1079 | 180 | self.assertEqual( | ||
1080 | 181 | "The export queue is currently empty.", | ||
1081 | 182 | self.view.describeQueueSize(0)) | ||
1082 | 183 | |||
1083 | 184 | self.assertEqual( | ||
1084 | 185 | "There is 1 file request on the export queue.", | ||
1085 | 186 | self.view.describeQueueSize(1)) | ||
1086 | 187 | |||
1087 | 188 | self.assertEqual( | ||
1088 | 189 | "There are 2 file requests on the export queue.", | ||
1089 | 190 | self.view.describeQueueSize(2)) | ||
1090 | 191 | |||
1091 | 192 | def test_describeBacklog(self): | ||
1092 | 193 | backlog = None | ||
1093 | 194 | self.assertEqual("", self.view.describeBacklog(backlog).strip()) | ||
1094 | 195 | |||
1095 | 196 | backlog = timedelta(hours=2) | ||
1096 | 197 | self.assertEqual( | ||
1097 | 198 | "The backlog is approximately two hours.", | ||
1098 | 199 | self.view.describeBacklog(backlog).strip()) | ||
1099 | 200 | |||
1100 | 201 | def test_export_queue_status(self): | ||
1101 | 202 | self.view.initialize() | ||
1102 | 203 | queue = self.view.request_set | ||
1103 | 204 | wipe_queue(queue) | ||
1104 | 205 | |||
1105 | 206 | requester = self.factory.makePerson() | ||
1106 | 207 | |||
1107 | 208 | size = self.view.describeQueueSize(0) | ||
1108 | 209 | backlog = self.view.describeBacklog(None) | ||
1109 | 210 | status = "%s %s" % (size, backlog) | ||
1110 | 211 | self.assertEqual( | ||
1111 | 212 | status.strip(), self.view.export_queue_status.strip()) | ||
1112 | 213 | |||
1113 | 214 | potemplate = self.factory.makePOTemplate() | ||
1114 | 215 | queue.addRequest(requester, potemplates=[potemplate]) | ||
1115 | 216 | transaction.commit() | ||
1116 | 217 | |||
1117 | 218 | size = self.view.describeQueueSize(1) | ||
1118 | 219 | backlog = self.view.describeBacklog(queue.estimateBacklog()) | ||
1119 | 220 | status = "%s %s" % (size, backlog) | ||
1120 | 221 | self.assertEqual( | ||
1121 | 222 | status.strip(), self.view.export_queue_status.strip()) | ||
1122 | 223 | |||
1123 | 224 | |||
1124 | 161 | def test_suite(): | 225 | def test_suite(): |
1125 | 162 | suite = unittest.TestSuite() | 226 | suite = unittest.TestSuite() |
1126 | 163 | loader = unittest.TestLoader() | 227 | loader = unittest.TestLoader() |
1127 | 164 | suite.addTest(loader.loadTestsFromTestCase(TestProductSeries)) | 228 | suite.addTest(loader.loadTestsFromTestCase(TestProductSeries)) |
1128 | 165 | suite.addTest(loader.loadTestsFromTestCase(TestSourcePackage)) | 229 | suite.addTest(loader.loadTestsFromTestCase(TestSourcePackage)) |
1129 | 230 | suite.addTest(loader.loadTestsFromTestCase( | ||
1130 | 231 | TestPOExportQueueStatusDescriptions)) | ||
1131 | 166 | return suite | 232 | return suite |
1132 | 167 | 233 | ||
1133 | === modified file 'lib/lp/translations/doc/distroseries-translations-copy.txt' | |||
1134 | --- lib/lp/translations/doc/distroseries-translations-copy.txt 2009-07-03 17:01:24 +0000 | |||
1135 | +++ lib/lp/translations/doc/distroseries-translations-copy.txt 2010-02-24 10:59:32 +0000 | |||
1136 | @@ -65,13 +65,8 @@ | |||
1137 | 65 | 65 | ||
1138 | 66 | We need a transaction manager (in this case a fake one) to make the copy work. | 66 | We need a transaction manager (in this case a fake one) to make the copy work. |
1139 | 67 | 67 | ||
1147 | 68 | >>> class FakeTransactionManager: | 68 | >>> from lp.testing.faketransaction import FakeTransaction |
1148 | 69 | ... """Mock transaction manager for test.""" | 69 | >>> transaction_stub = FakeTransaction() |
1142 | 70 | ... def begin(self): | ||
1143 | 71 | ... pass | ||
1144 | 72 | ... def commit(self): | ||
1145 | 73 | ... pass | ||
1146 | 74 | >>> transaction_stub = FakeTransactionManager() | ||
1149 | 75 | 70 | ||
1150 | 76 | 71 | ||
1151 | 77 | == Preconditions for migrating translations between distro series == | 72 | == Preconditions for migrating translations between distro series == |
1152 | 78 | 73 | ||
1153 | === modified file 'lib/lp/translations/doc/gettext-check-messages.txt' | |||
1154 | --- lib/lp/translations/doc/gettext-check-messages.txt 2009-07-01 20:45:39 +0000 | |||
1155 | +++ lib/lp/translations/doc/gettext-check-messages.txt 2010-02-24 10:59:32 +0000 | |||
1156 | @@ -29,15 +29,7 @@ | |||
1157 | 29 | >>> from lp.translations.scripts.gettext_check_messages import ( | 29 | >>> from lp.translations.scripts.gettext_check_messages import ( |
1158 | 30 | ... GettextCheckMessages) | 30 | ... GettextCheckMessages) |
1159 | 31 | >>> from canonical.launchpad.scripts.logger import FakeLogger | 31 | >>> from canonical.launchpad.scripts.logger import FakeLogger |
1169 | 32 | 32 | >>> from lp.testing.faketransaction import FakeTransaction | |
1161 | 33 | >>> class MockTransactionManager: | ||
1162 | 34 | ... """"Print out commits and aborts, ignore them otherwise.""" | ||
1163 | 35 | ... def begin(self): | ||
1164 | 36 | ... pass | ||
1165 | 37 | ... def commit(self): | ||
1166 | 38 | ... print "Committing." | ||
1167 | 39 | ... def abort(self): | ||
1168 | 40 | ... print "Aborting." | ||
1170 | 41 | 33 | ||
1171 | 42 | >>> class InstrumentedGettextCheckMessages(GettextCheckMessages): | 34 | >>> class InstrumentedGettextCheckMessages(GettextCheckMessages): |
1172 | 43 | ... _commit_interval = 3 | 35 | ... _commit_interval = 3 |
1173 | @@ -49,7 +41,7 @@ | |||
1174 | 49 | ... checker = InstrumentedGettextCheckMessages( | 41 | ... checker = InstrumentedGettextCheckMessages( |
1175 | 50 | ... 'gettext-check-messages-test', test_args=options) | 42 | ... 'gettext-check-messages-test', test_args=options) |
1176 | 51 | ... checker.logger = FakeLogger() | 43 | ... checker.logger = FakeLogger() |
1178 | 52 | ... checker.txn = MockTransactionManager() | 44 | ... checker.txn = FakeTransaction(log_calls=True) |
1179 | 53 | ... if commit_interval is not None: | 45 | ... if commit_interval is not None: |
1180 | 54 | ... checker._commit_interval = commit_interval | 46 | ... checker._commit_interval = commit_interval |
1181 | 55 | ... checker.main() | 47 | ... checker.main() |
1182 | @@ -101,9 +93,9 @@ | |||
1183 | 101 | 93 | ||
1184 | 102 | >>> run_checker(['-vv', "-w id=%s" % quote(current_message.id)]) | 94 | >>> run_checker(['-vv', "-w id=%s" % quote(current_message.id)]) |
1185 | 103 | DEBUG Checking messages matching: id=... | 95 | DEBUG Checking messages matching: id=... |
1187 | 104 | DEBUG Checking message .... | 96 | DEBUG Checking message ... |
1188 | 105 | DEBUG Commit point. | 97 | DEBUG Commit point. |
1190 | 106 | Committing. | 98 | COMMIT |
1191 | 107 | INFO Done. | 99 | INFO Done. |
1192 | 108 | INFO Messages checked: 1 | 100 | INFO Messages checked: 1 |
1193 | 109 | INFO Validation errors: 0 | 101 | INFO Validation errors: 0 |
1194 | @@ -128,7 +120,7 @@ | |||
1195 | 128 | INFO ... (current): format specifications ... are not the same | 120 | INFO ... (current): format specifications ... are not the same |
1196 | 129 | INFO ...: unmasked ... | 121 | INFO ...: unmasked ... |
1197 | 130 | DEBUG Commit point. | 122 | DEBUG Commit point. |
1199 | 131 | Committing. | 123 | COMMIT |
1200 | 132 | INFO Done. | 124 | INFO Done. |
1201 | 133 | INFO Messages checked: 1 | 125 | INFO Messages checked: 1 |
1202 | 134 | INFO Validation errors: 1 | 126 | INFO Validation errors: 1 |
1203 | @@ -160,9 +152,9 @@ | |||
1204 | 160 | DEBUG Checking message ... | 152 | DEBUG Checking message ... |
1205 | 161 | INFO ... (current): format specifications ... are not the same | 153 | INFO ... (current): format specifications ... are not the same |
1206 | 162 | DEBUG Commit point. | 154 | DEBUG Commit point. |
1208 | 163 | Committing. | 155 | COMMIT |
1209 | 164 | DEBUG Commit point. | 156 | DEBUG Commit point. |
1211 | 165 | Committing. | 157 | COMMIT |
1212 | 166 | INFO Done. | 158 | INFO Done. |
1213 | 167 | INFO Messages checked: 1 | 159 | INFO Messages checked: 1 |
1214 | 168 | INFO Validation errors: 2 | 160 | INFO Validation errors: 2 |
1215 | @@ -189,11 +181,11 @@ | |||
1216 | 189 | DEBUG Checking message ... | 181 | DEBUG Checking message ... |
1217 | 190 | INFO ... (unused): format specifications ... are not the same | 182 | INFO ... (unused): format specifications ... are not the same |
1218 | 191 | DEBUG Commit point. | 183 | DEBUG Commit point. |
1220 | 192 | Committing. | 184 | COMMIT |
1221 | 193 | DEBUG Checking message ... | 185 | DEBUG Checking message ... |
1222 | 194 | INFO ... (imported): number of format specifications ... does not match... | 186 | INFO ... (imported): number of format specifications ... does not match... |
1223 | 195 | DEBUG Commit point. | 187 | DEBUG Commit point. |
1225 | 196 | Committing. | 188 | COMMIT |
1226 | 197 | INFO Done. | 189 | INFO Done. |
1227 | 198 | INFO Messages checked: 2 | 190 | INFO Messages checked: 2 |
1228 | 199 | INFO Validation errors: 3 | 191 | INFO Validation errors: 3 |
1229 | @@ -226,9 +218,9 @@ | |||
1230 | 226 | DEBUG Checking message ... | 218 | DEBUG Checking message ... |
1231 | 227 | INFO ... (current): format specifications ... are not the same | 219 | INFO ... (current): format specifications ... are not the same |
1232 | 228 | DEBUG Commit point. | 220 | DEBUG Commit point. |
1234 | 229 | Aborting. | 221 | ABORT |
1235 | 230 | DEBUG Commit point. | 222 | DEBUG Commit point. |
1237 | 231 | Aborting. | 223 | ABORT |
1238 | 232 | INFO Done. | 224 | INFO Done. |
1239 | 233 | INFO Messages checked: 1 | 225 | INFO Messages checked: 1 |
1240 | 234 | INFO Validation errors: 2 | 226 | INFO Validation errors: 2 |
1241 | @@ -250,13 +242,13 @@ | |||
1242 | 250 | DEBUG Checking message ... | 242 | DEBUG Checking message ... |
1243 | 251 | INFO ... (...): number of format specifications ... | 243 | INFO ... (...): number of format specifications ... |
1244 | 252 | DEBUG Commit point. | 244 | DEBUG Commit point. |
1246 | 253 | Committing. | 245 | COMMIT |
1247 | 254 | DEBUG Checking message ... | 246 | DEBUG Checking message ... |
1248 | 255 | INFO ... (...): format specifications ... are not the same | 247 | INFO ... (...): format specifications ... are not the same |
1249 | 256 | DEBUG Commit point. | 248 | DEBUG Commit point. |
1251 | 257 | Committing. | 249 | COMMIT |
1252 | 258 | DEBUG Commit point. | 250 | DEBUG Commit point. |
1254 | 259 | Committing. | 251 | COMMIT |
1255 | 260 | INFO Done. | 252 | INFO Done. |
1256 | 261 | INFO Messages checked: 2 | 253 | INFO Messages checked: 2 |
1257 | 262 | INFO Validation errors: 3 | 254 | INFO Validation errors: 3 |
1258 | 263 | 255 | ||
1259 | === modified file 'lib/lp/translations/doc/poexport-queue.txt' | |||
1260 | --- lib/lp/translations/doc/poexport-queue.txt 2009-08-17 16:54:40 +0000 | |||
1261 | +++ lib/lp/translations/doc/poexport-queue.txt 2010-02-24 10:59:32 +0000 | |||
1262 | @@ -12,10 +12,12 @@ | |||
1263 | 12 | >>> import transaction | 12 | >>> import transaction |
1264 | 13 | >>> from zope.component import getUtility | 13 | >>> from zope.component import getUtility |
1265 | 14 | >>> from canonical.launchpad.interfaces import IPersonSet | 14 | >>> from canonical.launchpad.interfaces import IPersonSet |
1266 | 15 | >>> from lp.testing.faketransaction import FakeTransaction | ||
1267 | 15 | >>> from lp.testing.mail_helpers import pop_notifications, print_emails | 16 | >>> from lp.testing.mail_helpers import pop_notifications, print_emails |
1268 | 16 | >>> from lp.translations.scripts.po_export_queue import ExportResult | 17 | >>> from lp.translations.scripts.po_export_queue import ExportResult |
1269 | 17 | >>> import logging | 18 | >>> import logging |
1270 | 18 | >>> logger = logging.getLogger() | 19 | >>> logger = logging.getLogger() |
1271 | 20 | >>> fake_transaction = FakeTransaction() | ||
1272 | 19 | 21 | ||
1273 | 20 | When there is an error, the system will notify it. | 22 | When there is an error, the system will notify it. |
1274 | 21 | 23 | ||
1275 | @@ -273,7 +275,7 @@ | |||
1276 | 273 | 275 | ||
1277 | 274 | Once the queue is processed, the queue is empty again. | 276 | Once the queue is processed, the queue is empty again. |
1278 | 275 | 277 | ||
1280 | 276 | >>> process_queue(transaction, logging.getLogger()) | 278 | >>> process_queue(fake_transaction, logging.getLogger()) |
1281 | 277 | INFO:...Stored file at http://.../po_evolution-2.2.pot | 279 | INFO:...Stored file at http://.../po_evolution-2.2.pot |
1282 | 278 | 280 | ||
1283 | 279 | >>> export_request_set.entry_count | 281 | >>> export_request_set.entry_count |
1284 | @@ -331,7 +333,7 @@ | |||
1285 | 331 | 333 | ||
1286 | 332 | >>> export_request_set.addRequest( | 334 | >>> export_request_set.addRequest( |
1287 | 333 | ... carlos, pofiles=[pofile], format=TranslationFileFormat.PO) | 335 | ... carlos, pofiles=[pofile], format=TranslationFileFormat.PO) |
1289 | 334 | >>> process_queue(transaction, logging.getLogger()) | 336 | >>> process_queue(fake_transaction, logging.getLogger()) |
1290 | 335 | INFO:root:Stored file at http://...eo.po | 337 | INFO:root:Stored file at http://...eo.po |
1291 | 336 | 338 | ||
1292 | 337 | >>> transaction.commit() | 339 | >>> transaction.commit() |
1293 | @@ -352,7 +354,7 @@ | |||
1294 | 352 | 354 | ||
1295 | 353 | >>> export_request_set.addRequest( | 355 | >>> export_request_set.addRequest( |
1296 | 354 | ... carlos, pofiles=[pofile], format=TranslationFileFormat.POCHANGED) | 356 | ... carlos, pofiles=[pofile], format=TranslationFileFormat.POCHANGED) |
1298 | 355 | >>> process_queue(transaction, logging.getLogger()) | 357 | >>> process_queue(fake_transaction, logging.getLogger()) |
1299 | 356 | INFO:root:Stored file at http://...eo.po | 358 | INFO:root:Stored file at http://...eo.po |
1300 | 357 | 359 | ||
1301 | 358 | >>> transaction.commit() | 360 | >>> transaction.commit() |
1302 | @@ -372,6 +374,6 @@ | |||
1303 | 372 | Finally, if we try to do an export with an empty queue, we don't do | 374 | Finally, if we try to do an export with an empty queue, we don't do |
1304 | 373 | anything: | 375 | anything: |
1305 | 374 | 376 | ||
1307 | 375 | >>> process_queue(transaction, logging.getLogger()) | 377 | >>> process_queue(fake_transaction, logging.getLogger()) |
1308 | 376 | >>> len(pop_notifications()) | 378 | >>> len(pop_notifications()) |
1309 | 377 | 0 | 379 | 0 |
1310 | 378 | 380 | ||
1311 | === modified file 'lib/lp/translations/doc/poexport-request-productseries.txt' | |||
1312 | --- lib/lp/translations/doc/poexport-request-productseries.txt 2009-08-17 13:42:00 +0000 | |||
1313 | +++ lib/lp/translations/doc/poexport-request-productseries.txt 2010-02-24 10:59:32 +0000 | |||
1314 | @@ -34,15 +34,12 @@ | |||
1315 | 34 | 34 | ||
1316 | 35 | Now we request that the queue be processed. | 35 | Now we request that the queue be processed. |
1317 | 36 | 36 | ||
1318 | 37 | >>> class MockTransactionManager: | ||
1319 | 38 | ... def commit(self): | ||
1320 | 39 | ... pass | ||
1321 | 40 | |||
1322 | 41 | >>> import logging | 37 | >>> import logging |
1323 | 38 | >>> from lp.testing.faketransaction import FakeTransaction | ||
1324 | 42 | >>> from lp.translations.scripts.po_export_queue import process_queue | 39 | >>> from lp.translations.scripts.po_export_queue import process_queue |
1325 | 43 | >>> logger = MockLogger() | 40 | >>> logger = MockLogger() |
1326 | 44 | >>> logger.setLevel(logging.DEBUG) | 41 | >>> logger.setLevel(logging.DEBUG) |
1328 | 45 | >>> process_queue(MockTransactionManager(), logger) | 42 | >>> process_queue(FakeTransaction(), logger) |
1329 | 46 | log> Exporting objects for ..., related to template evolution-2.2 in | 43 | log> Exporting objects for ..., related to template evolution-2.2 in |
1330 | 47 | Evolution trunk | 44 | Evolution trunk |
1331 | 48 | log> Exporting objects for ..., related to template evolution-2.2-test in | 45 | log> Exporting objects for ..., related to template evolution-2.2-test in |
1332 | 49 | 46 | ||
1333 | === modified file 'lib/lp/translations/doc/poexport-request.txt' | |||
1334 | --- lib/lp/translations/doc/poexport-request.txt 2009-08-17 23:37:19 +0000 | |||
1335 | +++ lib/lp/translations/doc/poexport-request.txt 2010-02-24 10:59:32 +0000 | |||
1336 | @@ -53,12 +53,9 @@ | |||
1337 | 53 | 53 | ||
1338 | 54 | Now we request that the queue be processed. | 54 | Now we request that the queue be processed. |
1339 | 55 | 55 | ||
1344 | 56 | >>> class MockTransactionManager: | 56 | >>> from lp.testing.faketransaction import FakeTransaction |
1341 | 57 | ... def commit(self): | ||
1342 | 58 | ... pass | ||
1343 | 59 | |||
1345 | 60 | >>> from lp.translations.scripts.po_export_queue import process_queue | 57 | >>> from lp.translations.scripts.po_export_queue import process_queue |
1347 | 61 | >>> process_queue(MockTransactionManager(), MockLogger()) | 58 | >>> process_queue(FakeTransaction(), MockLogger()) |
1348 | 62 | log> Exporting objects for Happy Downloader, related to template pmount | 59 | log> Exporting objects for Happy Downloader, related to template pmount |
1349 | 63 | in Ubuntu Hoary package "pmount" | 60 | in Ubuntu Hoary package "pmount" |
1350 | 64 | log> Stored file at http://.../launchpad-export.tar.gz | 61 | log> Stored file at http://.../launchpad-export.tar.gz |
1351 | @@ -188,7 +185,7 @@ | |||
1352 | 188 | >>> from lp.translations.interfaces.translationfileformat import ( | 185 | >>> from lp.translations.interfaces.translationfileformat import ( |
1353 | 189 | ... TranslationFileFormat) | 186 | ... TranslationFileFormat) |
1354 | 190 | >>> request_set.addRequest(person, None, [cs], TranslationFileFormat.MO) | 187 | >>> request_set.addRequest(person, None, [cs], TranslationFileFormat.MO) |
1356 | 191 | >>> process_queue(MockTransactionManager(), MockLogger()) | 188 | >>> process_queue(FakeTransaction(), MockLogger()) |
1357 | 192 | log> Exporting objects for Happy Downloader, related to template pmount | 189 | log> Exporting objects for Happy Downloader, related to template pmount |
1358 | 193 | in Ubuntu Hoary package "pmount" | 190 | in Ubuntu Hoary package "pmount" |
1359 | 194 | log> Stored file at http://.../cs_LC_MESSAGES_pmount.mo | 191 | log> Stored file at http://.../cs_LC_MESSAGES_pmount.mo |
1360 | 195 | 192 | ||
1361 | === modified file 'lib/lp/translations/doc/poimport.txt' | |||
1362 | --- lib/lp/translations/doc/poimport.txt 2009-11-17 09:51:40 +0000 | |||
1363 | +++ lib/lp/translations/doc/poimport.txt 2010-02-24 10:59:32 +0000 | |||
1364 | @@ -103,14 +103,7 @@ | |||
1365 | 103 | To prevent this, the importer now does intermediate commits while | 103 | To prevent this, the importer now does intermediate commits while |
1366 | 104 | recomputing statistics. | 104 | recomputing statistics. |
1367 | 105 | 105 | ||
1376 | 106 | >>> class FakeTransactionManager: | 106 | >>> from lp.testing.faketransaction import FakeTransaction |
1369 | 107 | ... """Pretend to manage a transaction, log what happens.""" | ||
1370 | 108 | ... def begin(self): | ||
1371 | 109 | ... pass | ||
1372 | 110 | ... def commit(self): | ||
1373 | 111 | ... print "Committing." | ||
1374 | 112 | ... def abort(self): | ||
1375 | 113 | ... print "Aborting." | ||
1377 | 114 | 107 | ||
1378 | 115 | Attach the import to the translations import queue: | 108 | Attach the import to the translations import queue: |
1379 | 116 | 109 | ||
1380 | @@ -136,8 +129,7 @@ | |||
1381 | 136 | Now, we tell the PO template to import from the file data it has. | 129 | Now, we tell the PO template to import from the file data it has. |
1382 | 137 | 130 | ||
1383 | 138 | >>> (subject, body) = potemplate.importFromQueue( | 131 | >>> (subject, body) = potemplate.importFromQueue( |
1386 | 139 | ... entry, FakeLogger(), txn=FakeTransactionManager()) | 132 | ... entry, FakeLogger(), txn=FakeTransaction()) |
1385 | 140 | Committing. | ||
1387 | 141 | 133 | ||
1388 | 142 | Our request has now been serviced. | 134 | Our request has now been serviced. |
1389 | 143 | 135 | ||
1390 | 144 | 136 | ||
1391 | === modified file 'lib/lp/translations/interfaces/poexportrequest.py' | |||
1392 | --- lib/lp/translations/interfaces/poexportrequest.py 2009-07-17 00:26:05 +0000 | |||
1393 | +++ lib/lp/translations/interfaces/poexportrequest.py 2010-02-24 10:59:32 +0000 | |||
1394 | @@ -1,4 +1,4 @@ | |||
1396 | 1 | # Copyright 2009 Canonical Ltd. This software is licensed under the | 1 | # Copyright 2009-2010 Canonical Ltd. This software is licensed under the |
1397 | 2 | # GNU Affero General Public License version 3 (see the file LICENSE). | 2 | # GNU Affero General Public License version 3 (see the file LICENSE). |
1398 | 3 | 3 | ||
1399 | 4 | # pylint: disable-msg=E0211,E0213 | 4 | # pylint: disable-msg=E0211,E0213 |
1400 | @@ -18,11 +18,16 @@ | |||
1401 | 18 | from lp.translations.interfaces.potemplate import IPOTemplate | 18 | from lp.translations.interfaces.potemplate import IPOTemplate |
1402 | 19 | from lp.translations.interfaces.translationfileformat import ( | 19 | from lp.translations.interfaces.translationfileformat import ( |
1403 | 20 | TranslationFileFormat) | 20 | TranslationFileFormat) |
1404 | 21 | |||
1405 | 22 | |||
1406 | 21 | class IPOExportRequestSet(Interface): | 23 | class IPOExportRequestSet(Interface): |
1407 | 22 | entry_count = Int( | 24 | entry_count = Int( |
1408 | 23 | title=u'Number of entries waiting in the queue.', | 25 | title=u'Number of entries waiting in the queue.', |
1409 | 24 | required=True, readonly=True) | 26 | required=True, readonly=True) |
1410 | 25 | 27 | ||
1411 | 28 | def estimateBacklog(): | ||
1412 | 29 | """Return approximate age of oldest request on the export queue.""" | ||
1413 | 30 | |||
1414 | 26 | def addRequest(person, potemplates=None, pofiles=None, | 31 | def addRequest(person, potemplates=None, pofiles=None, |
1415 | 27 | format=TranslationFileFormat.PO): | 32 | format=TranslationFileFormat.PO): |
1416 | 28 | """Add a request to export a set of files. | 33 | """Add a request to export a set of files. |
1417 | @@ -40,6 +45,7 @@ | |||
1418 | 40 | objects to export. | 45 | objects to export. |
1419 | 41 | """ | 46 | """ |
1420 | 42 | 47 | ||
1421 | 48 | |||
1422 | 43 | class IPOExportRequest(Interface): | 49 | class IPOExportRequest(Interface): |
1423 | 44 | person = Object( | 50 | person = Object( |
1424 | 45 | title=u'The person who made the request.', | 51 | title=u'The person who made the request.', |
1425 | 46 | 52 | ||
1426 | === modified file 'lib/lp/translations/model/poexportrequest.py' | |||
1427 | --- lib/lp/translations/model/poexportrequest.py 2009-07-17 00:26:05 +0000 | |||
1428 | +++ lib/lp/translations/model/poexportrequest.py 2010-02-24 10:59:32 +0000 | |||
1429 | @@ -1,17 +1,21 @@ | |||
1431 | 1 | # Copyright 2009 Canonical Ltd. This software is licensed under the | 1 | # Copyright 2009-2010 Canonical Ltd. This software is licensed under the |
1432 | 2 | # GNU Affero General Public License version 3 (see the file LICENSE). | 2 | # GNU Affero General Public License version 3 (see the file LICENSE). |
1433 | 3 | 3 | ||
1434 | 4 | # pylint: disable-msg=E0611,W0212 | 4 | # pylint: disable-msg=E0611,W0212 |
1435 | 5 | 5 | ||
1436 | 6 | __metaclass__ = type | 6 | __metaclass__ = type |
1437 | 7 | 7 | ||
1439 | 8 | __all__ = ('POExportRequestSet', 'POExportRequest') | 8 | __all__ = [ |
1440 | 9 | 'POExportRequest', | ||
1441 | 10 | 'POExportRequestSet', | ||
1442 | 11 | ] | ||
1443 | 9 | 12 | ||
1444 | 10 | from sqlobject import ForeignKey | 13 | from sqlobject import ForeignKey |
1445 | 11 | 14 | ||
1446 | 15 | from zope.component import getUtility | ||
1447 | 12 | from zope.interface import implements | 16 | from zope.interface import implements |
1448 | 13 | 17 | ||
1450 | 14 | from canonical.database.sqlbase import cursor, quote, SQLBase, sqlvalues | 18 | from canonical.database.sqlbase import quote, SQLBase, sqlvalues |
1451 | 15 | from canonical.database.enumcol import EnumCol | 19 | from canonical.database.enumcol import EnumCol |
1452 | 16 | 20 | ||
1453 | 17 | from lp.translations.interfaces.poexportrequest import ( | 21 | from lp.translations.interfaces.poexportrequest import ( |
1454 | @@ -19,6 +23,8 @@ | |||
1455 | 19 | from lp.translations.interfaces.potemplate import IPOTemplate | 23 | from lp.translations.interfaces.potemplate import IPOTemplate |
1456 | 20 | from lp.translations.interfaces.translationfileformat import ( | 24 | from lp.translations.interfaces.translationfileformat import ( |
1457 | 21 | TranslationFileFormat) | 25 | TranslationFileFormat) |
1458 | 26 | from canonical.launchpad.webapp.interfaces import ( | ||
1459 | 27 | DEFAULT_FLAVOR, IStoreSelector, MAIN_STORE, MASTER_FLAVOR) | ||
1460 | 22 | from lp.registry.interfaces.person import validate_public_person | 28 | from lp.registry.interfaces.person import validate_public_person |
1461 | 23 | 29 | ||
1462 | 24 | 30 | ||
1463 | @@ -28,7 +34,17 @@ | |||
1464 | 28 | @property | 34 | @property |
1465 | 29 | def entry_count(self): | 35 | def entry_count(self): |
1466 | 30 | """See `IPOExportRequestSet`.""" | 36 | """See `IPOExportRequestSet`.""" |
1468 | 31 | return POExportRequest.select().count() | 37 | store = getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR) |
1469 | 38 | return store.find(POExportRequest, True).count() | ||
1470 | 39 | |||
1471 | 40 | def estimateBacklog(self): | ||
1472 | 41 | store = getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR) | ||
1473 | 42 | row = store.execute( | ||
1474 | 43 | "SELECT now() - min(date_created) FROM POExportRequest").get_one() | ||
1475 | 44 | if row is None: | ||
1476 | 45 | return None | ||
1477 | 46 | else: | ||
1478 | 47 | return row[0] | ||
1479 | 32 | 48 | ||
1480 | 33 | def addRequest(self, person, potemplates=None, pofiles=None, | 49 | def addRequest(self, person, potemplates=None, pofiles=None, |
1481 | 34 | format=TranslationFileFormat.PO): | 50 | format=TranslationFileFormat.PO): |
1482 | @@ -58,13 +74,13 @@ | |||
1483 | 58 | 'pofiles': pofile_ids, | 74 | 'pofiles': pofile_ids, |
1484 | 59 | } | 75 | } |
1485 | 60 | 76 | ||
1487 | 61 | cur = cursor() | 77 | store = getUtility(IStoreSelector).get(MAIN_STORE, MASTER_FLAVOR) |
1488 | 62 | 78 | ||
1489 | 63 | if potemplates: | 79 | if potemplates: |
1490 | 64 | # Create requests for all these templates, insofar as the same | 80 | # Create requests for all these templates, insofar as the same |
1491 | 65 | # user doesn't already have requests pending for them in the same | 81 | # user doesn't already have requests pending for them in the same |
1492 | 66 | # format. | 82 | # format. |
1494 | 67 | cur.execute(""" | 83 | store.execute(""" |
1495 | 68 | INSERT INTO POExportRequest(person, potemplate, format) | 84 | INSERT INTO POExportRequest(person, potemplate, format) |
1496 | 69 | SELECT %(person)s, template.id, %(format)s | 85 | SELECT %(person)s, template.id, %(format)s |
1497 | 70 | FROM POTemplate AS template | 86 | FROM POTemplate AS template |
1498 | @@ -81,7 +97,7 @@ | |||
1499 | 81 | if pofiles: | 97 | if pofiles: |
1500 | 82 | # Create requests for all these translations, insofar as the same | 98 | # Create requests for all these translations, insofar as the same |
1501 | 83 | # user doesn't already have identical requests pending. | 99 | # user doesn't already have identical requests pending. |
1503 | 84 | cur.execute(""" | 100 | store.execute(""" |
1504 | 85 | INSERT INTO POExportRequest( | 101 | INSERT INTO POExportRequest( |
1505 | 86 | person, potemplate, pofile, format) | 102 | person, potemplate, pofile, format) |
1506 | 87 | SELECT %(person)s, template.id, pofile.id, %(format)s | 103 | SELECT %(person)s, template.id, pofile.id, %(format)s |
1507 | 88 | 104 | ||
1508 | === modified file 'lib/lp/translations/scripts/tests/test_copy_distroseries_translations.py' | |||
1509 | --- lib/lp/translations/scripts/tests/test_copy_distroseries_translations.py 2009-07-17 00:26:05 +0000 | |||
1510 | +++ lib/lp/translations/scripts/tests/test_copy_distroseries_translations.py 2010-02-24 10:59:32 +0000 | |||
1511 | @@ -13,24 +13,16 @@ | |||
1512 | 13 | 13 | ||
1513 | 14 | from canonical.launchpad.ftests import syncUpdate | 14 | from canonical.launchpad.ftests import syncUpdate |
1514 | 15 | from lp.registry.interfaces.distroseries import IDistroSeriesSet | 15 | from lp.registry.interfaces.distroseries import IDistroSeriesSet |
1515 | 16 | from lp.testing.faketransaction import FakeTransaction | ||
1516 | 16 | from lp.translations.scripts.copy_distroseries_translations import ( | 17 | from lp.translations.scripts.copy_distroseries_translations import ( |
1517 | 17 | copy_distroseries_translations) | 18 | copy_distroseries_translations) |
1518 | 18 | 19 | ||
1519 | 19 | from canonical.testing import LaunchpadZopelessLayer | 20 | from canonical.testing import LaunchpadZopelessLayer |
1520 | 20 | 21 | ||
1521 | 21 | 22 | ||
1522 | 22 | class MockTransactionManager: | ||
1523 | 23 | def begin(self): | ||
1524 | 24 | pass | ||
1525 | 25 | def commit(self): | ||
1526 | 26 | pass | ||
1527 | 27 | def abort(self): | ||
1528 | 28 | pass | ||
1529 | 29 | |||
1530 | 30 | |||
1531 | 31 | class TestCopying(TestCase): | 23 | class TestCopying(TestCase): |
1532 | 32 | layer = LaunchpadZopelessLayer | 24 | layer = LaunchpadZopelessLayer |
1534 | 33 | txn = MockTransactionManager() | 25 | txn = FakeTransaction() |
1535 | 34 | 26 | ||
1536 | 35 | def test_flagsHandling(self): | 27 | def test_flagsHandling(self): |
1537 | 36 | """Flags are correctly restored, no matter what their values.""" | 28 | """Flags are correctly restored, no matter what their values.""" |
1538 | 37 | 29 | ||
1539 | === modified file 'lib/lp/translations/templates/translations-export.pt' | |||
1540 | --- lib/lp/translations/templates/translations-export.pt 2009-09-17 21:07:36 +0000 | |||
1541 | +++ lib/lp/translations/templates/translations-export.pt 2010-02-24 10:59:32 +0000 | |||
1542 | @@ -38,6 +38,11 @@ | |||
1543 | 38 | This message will tell you where you can download your file. | 38 | This message will tell you where you can download your file. |
1544 | 39 | </p> | 39 | </p> |
1545 | 40 | 40 | ||
1546 | 41 | <p tal:content="view/export_queue_status"> | ||
1547 | 42 | There are 201 file reequests on the export queue. The backlog is | ||
1548 | 43 | approximately 25 minutes. | ||
1549 | 44 | </p> | ||
1550 | 45 | |||
1551 | 41 | <div class="actions"> | 46 | <div class="actions"> |
1552 | 42 | <p> | 47 | <p> |
1553 | 43 | <input type="submit" value="Request Download" /> | 48 | <input type="submit" value="Request Download" /> |
This is a simple branch that adds support for publishing of copy archives to
the publish-distro script.
It adds the option --copy-archive which will cause all archives with
ArchivePurpose.COPY to be published, provided their "publish" flag is set.
There's also a simple test case added.