Merge lp:~cjwatson/launchpad/soyuz-doctests-future-imports into lp:launchpad
- soyuz-doctests-future-imports
- Merge into devel
Proposed by
Colin Watson
Status: | Merged |
---|---|
Merged at revision: | 18670 |
Proposed branch: | lp:~cjwatson/launchpad/soyuz-doctests-future-imports |
Merge into: | lp:launchpad |
Diff against target: |
4448 lines (+684/-674) 39 files modified
lib/lp/soyuz/doc/archive-deletion.txt (+5/-5) lib/lp/soyuz/doc/archive.txt (+69/-68) lib/lp/soyuz/doc/archiveauthtoken.txt (+14/-14) lib/lp/soyuz/doc/archivepermission.txt (+13/-13) lib/lp/soyuz/doc/archivesubscriber.txt (+29/-29) lib/lp/soyuz/doc/binarypackagerelease.txt (+2/-2) lib/lp/soyuz/doc/build-failedtoupload-workflow.txt (+6/-6) lib/lp/soyuz/doc/build-files.txt (+4/-4) lib/lp/soyuz/doc/buildd-mass-retry.txt (+5/-5) lib/lp/soyuz/doc/closing-bugs-from-changelogs.txt (+10/-10) lib/lp/soyuz/doc/components-and-sections.txt (+10/-10) lib/lp/soyuz/doc/distribution.txt (+9/-9) lib/lp/soyuz/doc/distroarchseries.txt (+6/-6) lib/lp/soyuz/doc/distroarchseriesbinarypackage.txt (+2/-2) lib/lp/soyuz/doc/distroarchseriesbinarypackagerelease.txt (+12/-9) lib/lp/soyuz/doc/distroseriesbinarypackage.txt (+10/-10) lib/lp/soyuz/doc/distroseriesqueue-notify.txt (+18/-18) lib/lp/soyuz/doc/distroseriesqueue-translations.txt (+11/-11) lib/lp/soyuz/doc/distroseriesqueue.txt (+55/-55) lib/lp/soyuz/doc/fakepackager.txt (+20/-20) lib/lp/soyuz/doc/gina-multiple-arch.txt (+23/-23) lib/lp/soyuz/doc/gina.txt (+68/-68) lib/lp/soyuz/doc/hasbuildrecords.txt (+1/-1) lib/lp/soyuz/doc/package-cache-script.txt (+2/-2) lib/lp/soyuz/doc/package-cache.txt (+51/-51) lib/lp/soyuz/doc/package-diff.txt (+50/-50) lib/lp/soyuz/doc/package-meta-classes.txt (+2/-2) lib/lp/soyuz/doc/package-relationship-pages.txt (+4/-2) lib/lp/soyuz/doc/packagecopyrequest.txt (+1/-1) lib/lp/soyuz/doc/packageupload-lookups.txt (+7/-7) lib/lp/soyuz/doc/pocketchroot.txt (+7/-7) lib/lp/soyuz/doc/publishing-security.txt (+8/-8) lib/lp/soyuz/doc/publishing.txt (+85/-85) lib/lp/soyuz/doc/sampledata-setup.txt (+2/-2) lib/lp/soyuz/doc/sourcepackagerelease.txt (+5/-5) lib/lp/soyuz/doc/soyuz-set-of-uploads.txt (+10/-10) lib/lp/soyuz/doc/soyuz-upload.txt (+27/-25) lib/lp/soyuz/doc/vocabularies.txt (+8/-8) lib/lp/soyuz/tests/test_doc.py (+13/-11) |
To merge this branch: | bzr merge lp:~cjwatson/launchpad/soyuz-doctests-future-imports |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Colin Watson (community) | Approve | ||
Review via email: mp+346922@code.launchpad.net |
Commit message
Convert doctests under lp.soyuz to Launchpad's preferred __future__ imports.
Description of the change
Huge, but mostly boring. The only bit that wasn't essentially mechanical is that in a few places I switched to using print() for strings rather than having doctests that assert on the __repr__ of text strings.
To post a comment you must log in.
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'lib/lp/soyuz/doc/archive-deletion.txt' |
2 | --- lib/lp/soyuz/doc/archive-deletion.txt 2013-02-20 04:01:45 +0000 |
3 | +++ lib/lp/soyuz/doc/archive-deletion.txt 2018-05-27 20:15:09 +0000 |
4 | @@ -17,10 +17,10 @@ |
5 | |
6 | The archive is currently active: |
7 | |
8 | - >>> print archive.enabled |
9 | + >>> print(archive.enabled) |
10 | True |
11 | |
12 | - >>> print archive.status.name |
13 | + >>> print(archive.status.name) |
14 | ACTIVE |
15 | |
16 | We can create some packages in it using the test publisher: |
17 | @@ -54,10 +54,10 @@ |
18 | Now the archive is disabled and the status is DELETING to tell the |
19 | publisher to remove the publications and the repository: |
20 | |
21 | - >>> print archive.enabled |
22 | + >>> print(archive.enabled) |
23 | False |
24 | |
25 | - >>> print archive.status.name |
26 | + >>> print(archive.status.name) |
27 | DELETING |
28 | |
29 | Once deleted the archive can't be reenabled. |
30 | @@ -67,5 +67,5 @@ |
31 | ... |
32 | AssertionError: Deleted archives can't be enabled. |
33 | |
34 | - >>> print archive.enabled |
35 | + >>> print(archive.enabled) |
36 | False |
37 | |
38 | === modified file 'lib/lp/soyuz/doc/archive.txt' |
39 | --- lib/lp/soyuz/doc/archive.txt 2017-06-03 16:40:44 +0000 |
40 | +++ lib/lp/soyuz/doc/archive.txt 2018-05-27 20:15:09 +0000 |
41 | @@ -17,23 +17,23 @@ |
42 | >>> cprov = getUtility(IPersonSet).getByName('cprov') |
43 | >>> cprov_archive = cprov.archive |
44 | |
45 | - >>> print cprov_archive.owner.name |
46 | + >>> print(cprov_archive.owner.name) |
47 | cprov |
48 | - >>> print cprov_archive.distribution.name |
49 | + >>> print(cprov_archive.distribution.name) |
50 | ubuntu |
51 | - >>> print cprov_archive.name |
52 | + >>> print(cprov_archive.name) |
53 | ppa |
54 | - >>> print cprov_archive.purpose.name |
55 | + >>> print(cprov_archive.purpose.name) |
56 | PPA |
57 | - >>> print cprov_archive.displayname |
58 | + >>> print(cprov_archive.displayname) |
59 | PPA for Celso Providelo |
60 | >>> cprov_archive.enabled |
61 | True |
62 | >>> cprov_archive.authorized_size |
63 | 1024 |
64 | - >>> print cprov_archive.signing_key |
65 | + >>> print(cprov_archive.signing_key) |
66 | None |
67 | - >>> print cprov_archive.signing_key_fingerprint |
68 | + >>> print(cprov_archive.signing_key_fingerprint) |
69 | None |
70 | >>> cprov_archive.private |
71 | False |
72 | @@ -84,7 +84,7 @@ |
73 | build. This allows an admin to set external repositories as a source for |
74 | build dependencies on the context PPA. Its default value is None: |
75 | |
76 | - >>> print cprov_archive.external_dependencies |
77 | + >>> print(cprov_archive.external_dependencies) |
78 | None |
79 | |
80 | Amending it as an unprivileged user results in failure: |
81 | @@ -101,7 +101,7 @@ |
82 | |
83 | Useful properties: |
84 | |
85 | - >>> print cprov_archive.archive_url |
86 | + >>> print(cprov_archive.archive_url) |
87 | http://ppa.launchpad.dev/cprov/ppa/ubuntu |
88 | |
89 | Inquire what Distribution Series this archive has published sources to: |
90 | @@ -125,11 +125,11 @@ |
91 | |
92 | It is only editable by someone with launchpad.Edit permissions: |
93 | |
94 | - >>> print cprov_archive.status.name |
95 | + >>> print(cprov_archive.status.name) |
96 | ACTIVE |
97 | |
98 | >>> cprov_archive.status = ArchiveStatus.DELETING |
99 | - >>> print cprov_archive.status.name |
100 | + >>> print(cprov_archive.status.name) |
101 | DELETING |
102 | |
103 | >>> login(ANONYMOUS) |
104 | @@ -202,7 +202,7 @@ |
105 | Valid names work as expected. |
106 | |
107 | >>> rebuild_archive.name = 'there-we-go' |
108 | - >>> print rebuild_archive.name |
109 | + >>> print(rebuild_archive.name) |
110 | there-we-go |
111 | |
112 | Please note that copy archive displayname doesn't follow the name change. |
113 | @@ -221,7 +221,7 @@ |
114 | |
115 | Uploads to copy archives are not allowed. |
116 | |
117 | - >>> print rebuild_archive.checkArchivePermission(cprov) |
118 | + >>> print(rebuild_archive.checkArchivePermission(cprov)) |
119 | False |
120 | |
121 | |
122 | @@ -268,7 +268,7 @@ |
123 | ... title = pub.binarypackagerelease.title |
124 | ... arch_spec = pub.binarypackagerelease.architecturespecific |
125 | ... pub_arch = pub.distroarchseries.architecturetag |
126 | - ... print "%s (%s) -> %s" % (title, arch_spec, pub_arch) |
127 | + ... print("%s (%s) -> %s" % (title, arch_spec, pub_arch)) |
128 | |
129 | The PPA for cprov contains only 4 binary publications, however 'pmount' is |
130 | 'architecture independent', which means that the same binary (DB) is |
131 | @@ -527,11 +527,11 @@ |
132 | >>> def print_published_files(archive): |
133 | ... for pub_source in archive.getPublishedSources(): |
134 | ... for src_file in pub_source.sourcepackagerelease.files: |
135 | - ... print '%s: %s (%s, %d bytes)' % ( |
136 | + ... print('%s: %s (%s, %d bytes)' % ( |
137 | ... src_file.sourcepackagerelease.title, |
138 | ... src_file.libraryfile.filename, |
139 | ... src_file.filetype.name, |
140 | - ... src_file.libraryfile.content.filesize) |
141 | + ... src_file.libraryfile.content.filesize)) |
142 | |
143 | First, let's print the currently published files in cprov's PPA: |
144 | |
145 | @@ -749,10 +749,10 @@ |
146 | >>> def print_dependencies(archive): |
147 | ... dependencies = archive.dependencies |
148 | ... if not dependencies: |
149 | - ... print "No dependencies recorded." |
150 | + ... print("No dependencies recorded.") |
151 | ... return |
152 | ... for dep in dependencies: |
153 | - ... print dep.dependency.displayname |
154 | + ... print(dep.dependency.displayname) |
155 | |
156 | Celso's PPA has no dependencies stored in the sampledata. |
157 | |
158 | @@ -774,16 +774,16 @@ |
159 | |
160 | The `IArchiveDependency` object simply maps the desired relationship. |
161 | |
162 | - >>> print archive_dependency.archive.displayname |
163 | + >>> print(archive_dependency.archive.displayname) |
164 | PPA for Celso Providelo |
165 | |
166 | - >>> print archive_dependency.dependency.displayname |
167 | + >>> print(archive_dependency.dependency.displayname) |
168 | PPA for Mark Shuttleworth |
169 | |
170 | The `IArchiveDependency` object itself implement a 'title' |
171 | property. For PPA dependencies the title defaults to the PPA displayname. |
172 | |
173 | - >>> print archive_dependency.title |
174 | + >>> print(archive_dependency.title) |
175 | PPA for Mark Shuttleworth |
176 | |
177 | The archive dependency is immediately recorded on Celso's PPA. |
178 | @@ -794,12 +794,12 @@ |
179 | 'getArchiveDependency' returns the corresponding `IArchiveDependency` |
180 | for a given 'dependency', otherwise it returns None. |
181 | |
182 | - >>> print cprov.archive.getArchiveDependency( |
183 | - ... mark.archive).dependency.displayname |
184 | + >>> print(cprov.archive.getArchiveDependency( |
185 | + ... mark.archive).dependency.displayname) |
186 | PPA for Mark Shuttleworth |
187 | |
188 | >>> no_priv = getUtility(IPersonSet).getByName('no-priv') |
189 | - >>> print cprov.archive.getArchiveDependency(no_priv.archive) |
190 | + >>> print(cprov.archive.getArchiveDependency(no_priv.archive)) |
191 | None |
192 | |
193 | As mentioned above, the archive dependency engine doesn't follow |
194 | @@ -848,12 +848,12 @@ |
195 | the target 'pocket' and a human-readable reference to the components |
196 | involved. |
197 | |
198 | - >>> print primary_dependency.title |
199 | + >>> print(primary_dependency.title) |
200 | Primary Archive for Ubuntu Linux - UPDATES (main, universe) |
201 | |
202 | They also expose the name of the component directly, for use in the API. |
203 | |
204 | - >>> print primary_dependency.component_name |
205 | + >>> print(primary_dependency.component_name) |
206 | universe |
207 | |
208 | See further implications of archive dependencies in |
209 | @@ -879,12 +879,12 @@ |
210 | >>> primary_component_dep = no_priv.archive.addArchiveDependency( |
211 | ... ubuntu.main_archive, PackagePublishingPocket.SECURITY) |
212 | |
213 | - >>> print primary_component_dep.title |
214 | + >>> print(primary_component_dep.title) |
215 | Primary Archive for Ubuntu Linux - SECURITY |
216 | |
217 | In this case the component name is None. |
218 | |
219 | - >>> print primary_component_dep.component_name |
220 | + >>> print(primary_component_dep.component_name) |
221 | None |
222 | |
223 | However only PRIMARY archive dependencies support pockets other than |
224 | @@ -942,7 +942,7 @@ |
225 | ... copy_target) |
226 | >>> pcr = ubuntu.main_archive.requestPackageCopy( |
227 | ... naked_copy_target, requestor) |
228 | - >>> print pcr |
229 | + >>> print(pcr) |
230 | Package copy request |
231 | source = primary/hoary/-/RELEASE |
232 | target = my-copy-archive/hoary/-/RELEASE |
233 | @@ -955,7 +955,7 @@ |
234 | |
235 | >>> package_copy_request = ubuntu.main_archive.requestPackageCopy( |
236 | ... naked_copy_target, requestor, suite="hoary-updates"); |
237 | - >>> print package_copy_request |
238 | + >>> print(package_copy_request) |
239 | Package copy request |
240 | source = primary/hoary/-/UPDATES |
241 | target = my-copy-archive/hoary/-/RELEASE |
242 | @@ -986,7 +986,7 @@ |
243 | |
244 | PPAs are created with the name attribute set to 'ppa' by default. |
245 | |
246 | - >>> print sandbox_archive.name |
247 | + >>> print(sandbox_archive.name) |
248 | ppa |
249 | |
250 | We can take the opportunity to check if the default 'authorized_size' |
251 | @@ -1012,13 +1012,13 @@ |
252 | >>> ubuntutest = getUtility(IDistributionSet)['ubuntutest'] |
253 | >>> partner_archive = getUtility(IArchiveSet).getByDistroPurpose( |
254 | ... ubuntutest, ArchivePurpose.PARTNER) |
255 | - >>> print partner_archive.name |
256 | + >>> print(partner_archive.name) |
257 | partner |
258 | - >>> print partner_archive.is_partner |
259 | + >>> print(partner_archive.is_partner) |
260 | True |
261 | - >>> print partner_archive.is_primary |
262 | + >>> print(partner_archive.is_primary) |
263 | False |
264 | - >>> print partner_archive.is_main |
265 | + >>> print(partner_archive.is_main) |
266 | True |
267 | |
268 | It explicitly fails when purpose is PPA, since such lookup should be |
269 | @@ -1043,14 +1043,14 @@ |
270 | |
271 | >>> partner_archive = getUtility(IArchiveSet).getByDistroAndName( |
272 | ... ubuntutest, 'partner') |
273 | - >>> print partner_archive.displayname |
274 | + >>> print(partner_archive.displayname) |
275 | Partner Archive for Ubuntu Test |
276 | |
277 | Passing an invalid name will cause an empty result set. |
278 | |
279 | >>> bogus = getUtility(IArchiveSet).getByDistroAndName( |
280 | ... ubuntutest, 'bogus') |
281 | - >>> print bogus |
282 | + >>> print(bogus) |
283 | None |
284 | |
285 | IArchive.archive_url will return a URL for the archive that the builder can |
286 | @@ -1058,31 +1058,31 @@ |
287 | PunlisherConfig require us to log in as an admin: |
288 | |
289 | >>> login('admin@canonical.com') |
290 | - >>> print partner_archive.archive_url |
291 | + >>> print(partner_archive.archive_url) |
292 | http://archive.launchpad.dev/ubuntutest-partner |
293 | |
294 | - >>> print sandbox_archive.archive_url |
295 | + >>> print(sandbox_archive.archive_url) |
296 | http://ppa.launchpad.dev/name16/ppa/ubuntu |
297 | |
298 | - >>> print getUtility(IArchiveSet).getByDistroPurpose( |
299 | - ... ubuntutest, ArchivePurpose.PRIMARY).archive_url |
300 | + >>> print(getUtility(IArchiveSet).getByDistroPurpose( |
301 | + ... ubuntutest, ArchivePurpose.PRIMARY).archive_url) |
302 | http://archive.launchpad.dev/ubuntutest |
303 | |
304 | COPY archives use a URL format of <distro-name>-<archive-name>: |
305 | |
306 | - >>> print naked_copy_target.archive.is_copy |
307 | + >>> print(naked_copy_target.archive.is_copy) |
308 | True |
309 | - >>> print naked_copy_target.archive.archive_url |
310 | + >>> print(naked_copy_target.archive.archive_url) |
311 | http://rebuild-test.internal/ubuntu-my-copy-archive/ubuntu |
312 | |
313 | If the archive is private, the url may be different as private PPAs |
314 | are published to a secure location. |
315 | |
316 | >>> login("celso.providelo@canonical.com") |
317 | - >>> print cprov_archive.archive_url |
318 | + >>> print(cprov_archive.archive_url) |
319 | http://ppa.launchpad.dev/cprov/ppa/ubuntu |
320 | |
321 | - >>> print cprov_private_ppa.archive_url |
322 | + >>> print(cprov_private_ppa.archive_url) |
323 | http://private-ppa.launchpad.dev/cprov/myprivateppa/ubuntu |
324 | |
325 | IArchive.allowUpdatesToReleasePocket returns whether the archive is allowed |
326 | @@ -1122,7 +1122,7 @@ |
327 | >>> len(archive_purposes) |
328 | 17 |
329 | |
330 | - >>> print sorted(set(archive_purposes)) |
331 | + >>> print(sorted(set(archive_purposes))) |
332 | ['COPY', 'PARTNER', 'PPA', 'PRIMARY'] |
333 | |
334 | 'getPPAsForUser' returns all the PPAs a given user participates in. It |
335 | @@ -1132,7 +1132,7 @@ |
336 | Celso only participates in his own PPAs. |
337 | |
338 | >>> for ppa in archive_set.getPPAsForUser(cprov): |
339 | - ... print ppa.displayname |
340 | + ... print(ppa.displayname) |
341 | PPA for Celso Providelo |
342 | PPA named myprivateppa for Celso Providelo |
343 | |
344 | @@ -1156,7 +1156,7 @@ |
345 | ... distribution=ubuntu, description='Yo !') |
346 | |
347 | >>> for ppa in archive_set.getPPAsForUser(cprov): |
348 | - ... print ppa.displayname |
349 | + ... print(ppa.displayname) |
350 | PPA for Celso Providelo |
351 | PPA for Launchpad Buildd Admins |
352 | PPA named myprivateppa for Celso Providelo |
353 | @@ -1166,14 +1166,14 @@ |
354 | it gets listed by `getPPAsForUser`. |
355 | |
356 | >>> for ppa in archive_set.getPPAsForUser(no_priv): |
357 | - ... print ppa.displayname |
358 | + ... print(ppa.displayname) |
359 | PPA for No Privileges Person |
360 | |
361 | >>> cprov_archive.newComponentUploader(no_priv, "main") |
362 | <ArchivePermission ...> |
363 | |
364 | >>> for ppa in archive_set.getPPAsForUser(no_priv): |
365 | - ... print ppa.displayname |
366 | + ... print(ppa.displayname) |
367 | PPA for Celso Providelo |
368 | PPA for No Privileges Person |
369 | |
370 | @@ -1188,14 +1188,14 @@ |
371 | 'indirect_uploader' currently can't upload to cprov's PPA: |
372 | |
373 | >>> for ppa in archive_set.getPPAsForUser(indirect_uploader): |
374 | - ... print ppa.displayname |
375 | + ... print(ppa.displayname) |
376 | |
377 | But if we make them part of the uploader_team they'll gain access: |
378 | |
379 | >>> ignored = uploader_team.addMember( |
380 | ... indirect_uploader, indirect_uploader) |
381 | >>> for ppa in archive_set.getPPAsForUser(indirect_uploader): |
382 | - ... print ppa.displayname |
383 | + ... print(ppa.displayname) |
384 | PPA for Celso Providelo |
385 | |
386 | When there is no active PPA for the team a user participates the |
387 | @@ -1213,7 +1213,7 @@ |
388 | |
389 | >>> p3as = archive_set.getPrivatePPAs() |
390 | >>> for p3a in p3as: |
391 | - ... print p3a.displayname |
392 | + ... print(p3a.displayname) |
393 | PPA named myprivateppa for Celso Providelo |
394 | |
395 | 'getLatestPPASourcePublicationsForDistribution' returns up to 5 |
396 | @@ -1233,7 +1233,8 @@ |
397 | ... archive_set.getLatestPPASourcePublicationsForDistribution( |
398 | ... ubuntu)) |
399 | ... for pub in latest_uploads: |
400 | - ... print pub.displayname, pub.status.name, pub.archive.owner.name |
401 | + ... print( |
402 | + ... pub.displayname, pub.status.name, pub.archive.owner.name) |
403 | |
404 | >>> print_latest_uploads() |
405 | cdrkit 1.0 in breezy-autotest SUPERSEDED cprov |
406 | @@ -1320,8 +1321,8 @@ |
407 | ... most_active_ppas = ( |
408 | ... archive_set.getMostActivePPAsForDistribution(ubuntu)) |
409 | ... for most_active in most_active_ppas: |
410 | - ... print most_active[ |
411 | - ... 'archive'].displayname, most_active['uploads'] |
412 | + ... print(most_active[ |
413 | + ... 'archive'].displayname, most_active['uploads']) |
414 | |
415 | >>> print_most_active_ppas() |
416 | PPA for Mark Shuttleworth 2 |
417 | @@ -1382,9 +1383,9 @@ |
418 | its relevant attributes. |
419 | |
420 | >>> def print_archive_names(archives): |
421 | - ... print 'Name Owner Private Enabled' |
422 | + ... print('Name Owner Private Enabled') |
423 | ... for a in archives: |
424 | - ... print a.name, a.owner.name, a.private, a.enabled |
425 | + ... print(a.name, a.owner.name, a.private, a.enabled) |
426 | |
427 | Anonymous lookups return only public and enabled archives for the |
428 | given purpose: |
429 | @@ -1784,7 +1785,7 @@ |
430 | The name is used as provided, so callsites should validate it when |
431 | necessary. |
432 | |
433 | - >>> print rebuild_archive.name |
434 | + >>> print(rebuild_archive.name) |
435 | test-rebuild-one |
436 | |
437 | Another difference is the lookup, we can use getByDistroPurpose(), |
438 | @@ -1802,14 +1803,14 @@ |
439 | |
440 | >>> candidate = getUtility(IArchiveSet).getByDistroPurpose( |
441 | ... ubuntutest, ArchivePurpose.COPY, name="does-not-exist") |
442 | - >>> print candidate |
443 | + >>> print(candidate) |
444 | None |
445 | |
446 | If there is a matching archive it is returned. |
447 | |
448 | >>> candidate = getUtility(IArchiveSet).getByDistroPurpose( |
449 | ... ubuntutest, ArchivePurpose.COPY, name="test-rebuild-one") |
450 | - >>> print candidate.name |
451 | + >>> print(candidate.name) |
452 | test-rebuild-one |
453 | |
454 | |
455 | @@ -1860,10 +1861,10 @@ |
456 | ... sources, cprov.archive, "release", person=mark) |
457 | |
458 | >>> mark_one = mark.archive.getPublishedSources(name=u"package1").one() |
459 | - >>> print mark_one.sourcepackagerelease.version |
460 | + >>> print(mark_one.sourcepackagerelease.version) |
461 | 1.1 |
462 | >>> mark_two = mark.archive.getPublishedSources(name=u"package2").one() |
463 | - >>> print mark_two.sourcepackagerelease.version |
464 | + >>> print(mark_two.sourcepackagerelease.version) |
465 | 1.0 |
466 | |
467 | Notice that the latest version of package_one was copied, ignoring the |
468 | @@ -1938,7 +1939,7 @@ |
469 | ... from_series="hoary", person=mark) |
470 | >>> mark_multiseries = mark.archive.getPublishedSources( |
471 | ... name=u"package-multiseries").one() |
472 | - >>> print mark_multiseries.sourcepackagerelease.version |
473 | + >>> print(mark_multiseries.sourcepackagerelease.version) |
474 | 1.0 |
475 | |
476 | We can also specify a single source to be copied with the `syncSource` |
477 | @@ -1978,7 +1979,7 @@ |
478 | ... person=mark) |
479 | >>> pack = mark.archive.getPublishedSources( |
480 | ... name="pack", exact_match=True).one() |
481 | - >>> print pack.sourcepackagerelease.version |
482 | + >>> print(pack.sourcepackagerelease.version) |
483 | 1.0 |
484 | |
485 | If the supplied package exists but not in the source archive, we get an error: |
486 | @@ -1993,7 +1994,7 @@ |
487 | >>> mark.archive.syncSource("package3", "1.0", cprov.archive, |
488 | ... "release", person=mark) |
489 | >>> mark_three = mark.archive.getPublishedSources(name=u"package3").one() |
490 | - >>> print mark_three.sourcepackagerelease.version |
491 | + >>> print(mark_three.sourcepackagerelease.version) |
492 | 1.0 |
493 | |
494 | It's also possible to copy the source and its binaries at the same time, |
495 | @@ -2053,7 +2054,7 @@ |
496 | >>> source_archive.getPublishedSources(name=u"overridden").count() |
497 | 2 |
498 | |
499 | - >>> print copy_candidate.section.name |
500 | + >>> print(copy_candidate.section.name) |
501 | python |
502 | |
503 | When syncing 'overridden_1.0' to Mark's PPA, the latest publication, |
504 | @@ -2064,5 +2065,5 @@ |
505 | ... from_archive=source_archive, to_pocket='release', person=mark) |
506 | |
507 | >>> copy = mark.archive.getPublishedSources(name=u"overridden").one() |
508 | - >>> print copy.section.name |
509 | + >>> print(copy.section.name) |
510 | python |
511 | |
512 | === modified file 'lib/lp/soyuz/doc/archiveauthtoken.txt' |
513 | --- lib/lp/soyuz/doc/archiveauthtoken.txt 2012-04-10 14:01:17 +0000 |
514 | +++ lib/lp/soyuz/doc/archiveauthtoken.txt 2018-05-27 20:15:09 +0000 |
515 | @@ -51,7 +51,7 @@ |
516 | |
517 | By default the tokens are 20 characters long. |
518 | |
519 | - >>> print len(token_with_random_string.token) |
520 | + >>> print(len(token_with_random_string.token)) |
521 | 20 |
522 | |
523 | It is not possible to create a second token when one already exists: |
524 | @@ -74,16 +74,16 @@ |
525 | |
526 | The new token is returned and reflects the data: |
527 | |
528 | - >>> print new_token.archive.displayname |
529 | + >>> print(new_token.archive.displayname) |
530 | PPA for Joe Smith |
531 | |
532 | - >>> print new_token.person.name |
533 | + >>> print(new_token.person.name) |
534 | bradsmith |
535 | |
536 | - >>> print new_token.token |
537 | + >>> print(new_token.token) |
538 | testtoken |
539 | |
540 | - >>> print new_token.archive_url |
541 | + >>> print(new_token.archive_url) |
542 | http://bradsmith:testtoken@private-ppa.launchpad.dev/joe/ppa/... |
543 | |
544 | Commit the new token to the database. |
545 | @@ -96,7 +96,7 @@ |
546 | >>> new_token.date_created is not None |
547 | True |
548 | |
549 | - >>> print new_token.date_deactivated |
550 | + >>> print(new_token.date_deactivated) |
551 | None |
552 | |
553 | |
554 | @@ -123,26 +123,26 @@ |
555 | |
556 | And retrieve the token by id and by token data: |
557 | |
558 | - >>> print token_set.get(new_token.id).token |
559 | + >>> print(token_set.get(new_token.id).token) |
560 | testtoken |
561 | |
562 | - >>> print token_set.getByToken(u"testtoken").person.name |
563 | + >>> print(token_set.getByToken(u"testtoken").person.name) |
564 | bradsmith |
565 | |
566 | It's also possible to retrieve a set of all the tokens for an archive. |
567 | |
568 | >>> tokens = token_set.getByArchive(joe_private_ppa) |
569 | - >>> print tokens.count() |
570 | + >>> print(tokens.count()) |
571 | 1 |
572 | |
573 | >>> for token in tokens: |
574 | - ... print token.person.name |
575 | + ... print(token.person.name) |
576 | bradsmith |
577 | |
578 | Tokens can also be retreived by archive and person: |
579 | |
580 | - >>> print token_set.getActiveTokenForArchiveAndPerson( |
581 | - ... new_token.archive, new_token.person).token |
582 | + >>> print(token_set.getActiveTokenForArchiveAndPerson( |
583 | + ... new_token.archive, new_token.person).token) |
584 | testtoken |
585 | |
586 | |
587 | @@ -181,7 +181,7 @@ |
588 | The IArchiveAuthTokenSet.getActiveTokenForArchiveAndPerson() method will |
589 | also not return tokens that have been deactivated: |
590 | |
591 | - >>> print token_set.getActiveTokenForArchiveAndPerson( |
592 | - ... new_token.archive, new_token.person) |
593 | + >>> print(token_set.getActiveTokenForArchiveAndPerson( |
594 | + ... new_token.archive, new_token.person)) |
595 | None |
596 | |
597 | |
598 | === modified file 'lib/lp/soyuz/doc/archivepermission.txt' |
599 | --- lib/lp/soyuz/doc/archivepermission.txt 2012-12-26 01:32:19 +0000 |
600 | +++ lib/lp/soyuz/doc/archivepermission.txt 2018-05-27 20:15:09 +0000 |
601 | @@ -48,22 +48,22 @@ |
602 | Ubuntu Team is indeed permissioned to upload to the main archive. It |
603 | has a number of useful properties that can be checked: |
604 | |
605 | - >>> print main_permission.date_created |
606 | + >>> print(main_permission.date_created) |
607 | 2006-10-16... |
608 | |
609 | - >>> print main_permission.archive.displayname |
610 | + >>> print(main_permission.archive.displayname) |
611 | Primary Archive for Ubuntu Linux |
612 | |
613 | >>> main_permission.permission |
614 | <DBItem ArchivePermissionType.UPLOAD, (1) Archive Upload Rights> |
615 | |
616 | - >>> print main_permission.person.name |
617 | + >>> print(main_permission.person.name) |
618 | ubuntu-team |
619 | |
620 | - >>> print main_permission.component_name |
621 | + >>> print(main_permission.component_name) |
622 | main |
623 | |
624 | - >>> print main_permission.source_package_name |
625 | + >>> print(main_permission.source_package_name) |
626 | None |
627 | |
628 | The checkAuthenticated() call is also able to check someone's |
629 | @@ -131,7 +131,7 @@ |
630 | >>> uploaders = permission_set.uploadersForComponent( |
631 | ... ubuntu.main_archive, main_component) |
632 | >>> for uploader in sorted(uploaders, key=operator.attrgetter("id")): |
633 | - ... print uploader.person.name |
634 | + ... print(uploader.person.name) |
635 | ubuntu-team |
636 | |
637 | The component argument can also be a string type and it's converted |
638 | @@ -153,7 +153,7 @@ |
639 | |
640 | >>> uploaders = permission_set.uploadersForComponent(ubuntu.main_archive) |
641 | >>> for uploader in sorted(uploaders, key=operator.attrgetter("id")): |
642 | - ... print uploader.person.name, uploader.component.name |
643 | + ... print(uploader.person.name, uploader.component.name) |
644 | ubuntu-team universe |
645 | ubuntu-team restricted |
646 | ubuntu-team main |
647 | @@ -166,7 +166,7 @@ |
648 | ... archive, person) |
649 | ... for permission in sorted( |
650 | ... permissions, key=operator.attrgetter("id")): |
651 | - ... print permission.component.name |
652 | + ... print(permission.component.name) |
653 | |
654 | >>> showComponentUploaders(ubuntu.main_archive, mark) |
655 | universe |
656 | @@ -202,7 +202,7 @@ |
657 | ... packages = permission_set.packagesForUploader( |
658 | ... archive, person) |
659 | ... for permission in sorted(packages, key=operator.attrgetter("id")): |
660 | - ... print permission.sourcepackagename.name |
661 | + ... print(permission.sourcepackagename.name) |
662 | |
663 | >>> carlos = getUtility(IPersonSet).getByName('carlos') |
664 | >>> showPersonsPackages(ubuntu.main_archive, carlos) |
665 | @@ -228,7 +228,7 @@ |
666 | ... archive, component) |
667 | ... for archive_admin in sorted( |
668 | ... archive_admins, key=operator.attrgetter("id")): |
669 | - ... print archive_admin.person.name |
670 | + ... print(archive_admin.person.name) |
671 | |
672 | >>> showQueueAdmins(ubuntu.main_archive, main_component) |
673 | ubuntu-team |
674 | @@ -243,7 +243,7 @@ |
675 | >>> permissions = permission_set.componentsForQueueAdmin( |
676 | ... ubuntu.main_archive, name12) |
677 | >>> for permission in sorted(permissions, key=operator.attrgetter("id")): |
678 | - ... print permission.component.name |
679 | + ... print(permission.component.name) |
680 | main |
681 | restricted |
682 | universe |
683 | @@ -253,7 +253,7 @@ |
684 | >>> permissions = permission_set.componentsForQueueAdmin( |
685 | ... ubuntu.all_distro_archives, no_team) |
686 | >>> for permission in sorted(permissions, key=operator.attrgetter("id")): |
687 | - ... print permission.component.name |
688 | + ... print(permission.component.name) |
689 | universe |
690 | multiverse |
691 | |
692 | @@ -355,7 +355,7 @@ |
693 | ... archive, pocket, distroseries=distroseries) |
694 | ... for archive_admin in sorted( |
695 | ... archive_admins, key=operator.attrgetter("id")): |
696 | - ... print archive_admin.person.name |
697 | + ... print(archive_admin.person.name) |
698 | |
699 | >>> new_permission = permission_set.newPocketQueueAdmin( |
700 | ... ubuntu.main_archive, carlos, PackagePublishingPocket.SECURITY) |
701 | |
702 | === modified file 'lib/lp/soyuz/doc/archivesubscriber.txt' |
703 | --- lib/lp/soyuz/doc/archivesubscriber.txt 2015-07-21 09:04:01 +0000 |
704 | +++ lib/lp/soyuz/doc/archivesubscriber.txt 2018-05-27 20:15:09 +0000 |
705 | @@ -81,16 +81,16 @@ |
706 | |
707 | The new subscription is returned and reflects the data: |
708 | |
709 | - >>> print new_sub.displayname |
710 | + >>> print(new_sub.displayname) |
711 | Joe Smith's access to PPA named p3a for Celso Providelo |
712 | |
713 | - >>> print new_sub.registrant.name |
714 | + >>> print(new_sub.registrant.name) |
715 | cprov |
716 | |
717 | - >>> print new_sub.description |
718 | + >>> print(new_sub.description) |
719 | subscription for joesmith |
720 | |
721 | - >>> print new_sub.status.name |
722 | + >>> print(new_sub.status.name) |
723 | CURRENT |
724 | |
725 | Subscriptions also contain some date information: |
726 | @@ -98,7 +98,7 @@ |
727 | >>> new_sub.date_created is not None |
728 | True |
729 | |
730 | - >>> print new_sub.date_expires |
731 | + >>> print(new_sub.date_expires) |
732 | None |
733 | |
734 | An email is sent to the subscribed person when the ArchiveSubscriber |
735 | @@ -253,18 +253,18 @@ |
736 | |
737 | And retrieve the subscription by subscriber and archive: |
738 | |
739 | - >>> print sub_set.getBySubscriber( |
740 | - ... new_sub.subscriber)[0].archive.displayname |
741 | + >>> print(sub_set.getBySubscriber( |
742 | + ... new_sub.subscriber)[0].archive.displayname) |
743 | PPA named p3a for Celso Providelo |
744 | |
745 | - >>> print sub_set.getByArchive(new_sub.archive)[1].subscriber.name |
746 | + >>> print(sub_set.getByArchive(new_sub.archive)[1].subscriber.name) |
747 | joesmith |
748 | |
749 | The getBySubscriber() method takes an optional archive parameter for |
750 | finding a subscription for a particular user in a particular archive: |
751 | |
752 | - >>> print sub_set.getBySubscriber( |
753 | - ... new_sub.subscriber, new_sub.archive)[0].archive.displayname |
754 | + >>> print(sub_set.getBySubscriber( |
755 | + ... new_sub.subscriber, new_sub.archive)[0].archive.displayname) |
756 | PPA named p3a for Celso Providelo |
757 | |
758 | By default the getBySubscriber() and getByArchive() methods return |
759 | @@ -272,17 +272,17 @@ |
760 | |
761 | >>> login('mark@example.com') |
762 | >>> for subscription in sub_set.getBySubscriber(new_sub.subscriber): |
763 | - ... print subscription.archive.displayname |
764 | - ... print subscription.date_created.date() |
765 | + ... print(subscription.archive.displayname) |
766 | + ... print(subscription.date_created.date()) |
767 | PPA named p3a for Celso Providelo 2009-02-26 |
768 | PPA named p3a for Mark Shuttleworth 2009-02-22 |
769 | |
770 | getByArchive() sorts by subscriber name. |
771 | |
772 | >>> for subscription in sub_set.getByArchive(mark_private_ppa): |
773 | - ... print subscription.subscriber.name |
774 | - ... print subscription.subscriber.displayname |
775 | - ... print subscription.date_created.date() |
776 | + ... print(subscription.subscriber.name) |
777 | + ... print(subscription.subscriber.displayname) |
778 | + ... print(subscription.date_created.date()) |
779 | joesmith Joe Smith 2009-02-22 |
780 | team-name-... Team Cprov 2009-02-24 |
781 | |
782 | @@ -313,8 +313,8 @@ |
783 | |
784 | >>> joesmith.join(team_cprov) |
785 | >>> for subscription in sub_set.getBySubscriber(joesmith): |
786 | - ... print subscription.archive.displayname |
787 | - ... print subscription.description |
788 | + ... print(subscription.archive.displayname) |
789 | + ... print(subscription.description) |
790 | PPA named p3a for Celso Providelo subscription for joesmith |
791 | PPA named p3a for Mark Shuttleworth Access for cprov team |
792 | |
793 | @@ -336,8 +336,8 @@ |
794 | ... token_text = token.token |
795 | ... else: |
796 | ... token_text = "None" |
797 | - ... print subscription.archive.displayname |
798 | - ... print token_text |
799 | + ... print(subscription.archive.displayname) |
800 | + ... print(token_text) |
801 | >>> print_subscriptions_with_tokens( |
802 | ... sub_set.getBySubscriberWithActiveToken(joesmith)) |
803 | PPA named p3a for Celso Providelo test_token |
804 | @@ -347,7 +347,7 @@ |
805 | for the activated tokens. |
806 | |
807 | >>> for url in joesmith.getArchiveSubscriptionURLs(joesmith): |
808 | - ... print url |
809 | + ... print(url) |
810 | http://joesmith:test_token@private-ppa.launchpad.dev/cprov/p3a/ubuntu |
811 | |
812 | This method can only be used by someone with launchpad.Edit on the context |
813 | @@ -435,10 +435,10 @@ |
814 | >>> new_sub.date_cancelled is not None |
815 | True |
816 | |
817 | - >>> print new_sub.cancelled_by.name |
818 | + >>> print(new_sub.cancelled_by.name) |
819 | cprov |
820 | |
821 | - >>> print new_sub.status.name |
822 | + >>> print(new_sub.status.name) |
823 | CANCELLED |
824 | |
825 | We can do this as an admin too: |
826 | @@ -458,8 +458,8 @@ |
827 | via the cprov_team: |
828 | |
829 | >>> for subscription in sub_set.getBySubscriber(joesmith): |
830 | - ... print subscription.archive.displayname |
831 | - ... print subscription.description |
832 | + ... print(subscription.archive.displayname) |
833 | + ... print(subscription.description) |
834 | PPA named p3a for Mark Shuttleworth Access for cprov team |
835 | |
836 | >>> subscription = sub_set.getBySubscriber(joesmith).first() |
837 | @@ -468,7 +468,7 @@ |
838 | currently include Joe: |
839 | |
840 | >>> for person, email in subscription.getNonActiveSubscribers(): |
841 | - ... print person.displayname, email.email |
842 | + ... print(person.displayname, email.email) |
843 | Celso Providelo celso.providelo@canonical.com |
844 | Joe Smith joe@example.com |
845 | John Smith john@example.com |
846 | @@ -479,7 +479,7 @@ |
847 | |
848 | >>> joesmith_token = mark_private_ppa.newAuthToken(joesmith) |
849 | >>> for person, email in subscription.getNonActiveSubscribers(): |
850 | - ... print person.displayname |
851 | + ... print(person.displayname) |
852 | Celso Providelo |
853 | John Smith |
854 | |
855 | @@ -493,7 +493,7 @@ |
856 | >>> subscription = mark_private_ppa.newSubscription( |
857 | ... harrysmith, mark, description=u"subscription for joesmith") |
858 | >>> for person, email in subscription.getNonActiveSubscribers(): |
859 | - ... print person.displayname |
860 | + ... print(person.displayname) |
861 | Harry Smith |
862 | |
863 | If Harry activates a token for his new subscription then |
864 | @@ -501,7 +501,7 @@ |
865 | "active". |
866 | |
867 | >>> harry_token = mark_private_ppa.newAuthToken(harrysmith) |
868 | - >>> print subscription.getNonActiveSubscribers().count() |
869 | + >>> print(subscription.getNonActiveSubscribers().count()) |
870 | 0 |
871 | |
872 | If the subscription is for a group which itself contains a group, all |
873 | @@ -513,7 +513,7 @@ |
874 | >>> subscription = mark_private_ppa.newSubscription( |
875 | ... launchpad_devs, mark, description=u"LP team too") |
876 | >>> for person, email in subscription.getNonActiveSubscribers(): |
877 | - ... print person.displayname |
878 | + ... print(person.displayname) |
879 | Celso Providelo |
880 | John Smith |
881 | Foo Bar |
882 | |
883 | === modified file 'lib/lp/soyuz/doc/binarypackagerelease.txt' |
884 | --- lib/lp/soyuz/doc/binarypackagerelease.txt 2014-04-24 06:45:51 +0000 |
885 | +++ lib/lp/soyuz/doc/binarypackagerelease.txt 2018-05-27 20:15:09 +0000 |
886 | @@ -50,7 +50,7 @@ |
887 | |
888 | >>> import operator |
889 | >>> for name in sorted(names, key=operator.attrgetter('name')): |
890 | - ... print name.name |
891 | + ... print(name.name) |
892 | mozilla-firefox |
893 | pmount |
894 | |
895 | @@ -135,7 +135,7 @@ |
896 | |
897 | >>> def print_files(binary): |
898 | ... for bin_file in binary.files: |
899 | - ... print bin_file.libraryfile.filename, bin_file.filetype.name |
900 | + ... print(bin_file.libraryfile.filename, bin_file.filetype.name) |
901 | |
902 | >>> print_files(a_binary) |
903 | foo-bin_666_all.deb DEB |
904 | |
905 | === modified file 'lib/lp/soyuz/doc/build-failedtoupload-workflow.txt' |
906 | --- lib/lp/soyuz/doc/build-failedtoupload-workflow.txt 2015-07-14 10:57:46 +0000 |
907 | +++ lib/lp/soyuz/doc/build-failedtoupload-workflow.txt 2018-05-27 20:15:09 +0000 |
908 | @@ -25,13 +25,13 @@ |
909 | |
910 | >>> failedtoupload_candidate = buildset.getByID(22) |
911 | |
912 | - >>> print failedtoupload_candidate.title |
913 | + >>> print(failedtoupload_candidate.title) |
914 | i386 build of cdrkit 1.0 in ubuntu breezy-autotest RELEASE |
915 | |
916 | - >>> print failedtoupload_candidate.status.name |
917 | + >>> print(failedtoupload_candidate.status.name) |
918 | FAILEDTOUPLOAD |
919 | |
920 | - >>> print failedtoupload_candidate.upload_log.filename |
921 | + >>> print(failedtoupload_candidate.upload_log.filename) |
922 | upload_22_log.txt |
923 | |
924 | FAILEDTOUPLOAD notification requires 'extra_info' argument to be filled: |
925 | @@ -79,7 +79,7 @@ |
926 | 'mark@example.com' |
927 | |
928 | >>> notification_body = build_notification.get_payload() |
929 | - >>> print notification_body #doctest: -NORMALIZE_WHITESPACE |
930 | + >>> print(notification_body) #doctest: -NORMALIZE_WHITESPACE |
931 | <BLANKLINE> |
932 | * Source Package: cdrkit |
933 | * Version: 1.0 |
934 | @@ -109,12 +109,12 @@ |
935 | |
936 | The other notifications are similar except for the footer. |
937 | |
938 | - >>> print notifications[1].get_payload() |
939 | + >>> print(notifications[1].get_payload()) |
940 | <BLANKLINE> |
941 | ... |
942 | You are receiving this email because you are a buildd administrator. |
943 | <BLANKLINE> |
944 | - >>> print notifications[2].get_payload() |
945 | + >>> print(notifications[2].get_payload()) |
946 | <BLANKLINE> |
947 | ... |
948 | You are receiving this email because you created this version of this |
949 | |
950 | === modified file 'lib/lp/soyuz/doc/build-files.txt' |
951 | --- lib/lp/soyuz/doc/build-files.txt 2013-01-22 02:06:59 +0000 |
952 | +++ lib/lp/soyuz/doc/build-files.txt 2018-05-27 20:15:09 +0000 |
953 | @@ -29,7 +29,7 @@ |
954 | * Build upload logs: '_log.txt'; |
955 | * Built files: '*deb'; |
956 | |
957 | - >>> print build.title |
958 | + >>> print(build.title) |
959 | i386 build of test-pkg 1.0 in ubuntutest breezy-autotest RELEASE |
960 | |
961 | Unsupported filename lookups also result in a `NotFoundError`. |
962 | @@ -37,19 +37,19 @@ |
963 | >>> build.getFileByName('biscuit.cookie') |
964 | Traceback (most recent call last): |
965 | ... |
966 | - NotFoundError: 'biscuit.cookie' |
967 | + NotFoundError: u'biscuit.cookie' |
968 | |
969 | And unreachable files in `NotFoundError`. |
970 | |
971 | >>> build.getFileByName('boing.changes') |
972 | Traceback (most recent call last): |
973 | ... |
974 | - NotFoundError: 'boing.changes' |
975 | + NotFoundError: u'boing.changes' |
976 | |
977 | Retrieving a binary changesfile. "test_1.0_i386.changes" is created when |
978 | SoyuzTestPublisher creates the "test" binary publication. |
979 | |
980 | - >>> print build.upload_changesfile.filename |
981 | + >>> print(build.upload_changesfile.filename) |
982 | test_1.0_i386.changes |
983 | |
984 | >>> build.upload_changesfile == build.getFileByName( |
985 | |
986 | === modified file 'lib/lp/soyuz/doc/buildd-mass-retry.txt' |
987 | --- lib/lp/soyuz/doc/buildd-mass-retry.txt 2011-12-29 05:29:36 +0000 |
988 | +++ lib/lp/soyuz/doc/buildd-mass-retry.txt 2018-05-27 20:15:09 +0000 |
989 | @@ -36,7 +36,7 @@ |
990 | >>> stdout, stderr = process.communicate() |
991 | >>> process.returncode |
992 | 0 |
993 | - >>> print stderr |
994 | + >>> print(stderr) |
995 | INFO Creating lockfile: ... |
996 | INFO Initializing Build Mass-Retry for |
997 | 'The Hoary Hedgehog Release/RELEASE' |
998 | @@ -62,7 +62,7 @@ |
999 | Let's mark the build from the previous run superseded. |
1000 | |
1001 | >>> pub.status = PackagePublishingStatus.SUPERSEDED |
1002 | - >>> print build.current_source_publication |
1003 | + >>> print(build.current_source_publication) |
1004 | None |
1005 | >>> transaction.commit() |
1006 | |
1007 | @@ -74,7 +74,7 @@ |
1008 | >>> stdout, stderr = process.communicate() |
1009 | >>> process.returncode |
1010 | 0 |
1011 | - >>> print stderr |
1012 | + >>> print(stderr) |
1013 | INFO Creating lockfile: ... |
1014 | INFO Initializing Build Mass-Retry for |
1015 | 'The Hoary Hedgehog Release/RELEASE' |
1016 | @@ -103,7 +103,7 @@ |
1017 | >>> stdout, stderr = process.communicate() |
1018 | >>> process.returncode |
1019 | 0 |
1020 | - >>> print stderr |
1021 | + >>> print(stderr) |
1022 | INFO Creating lockfile: ... |
1023 | INFO Initializing Build Mass-Retry for |
1024 | 'The Hoary Hedgehog Release for hppa (hppa)/RELEASE' |
1025 | @@ -124,7 +124,7 @@ |
1026 | >>> stdout, stderr = process.communicate() |
1027 | >>> process.returncode |
1028 | 0 |
1029 | - >>> print stderr |
1030 | + >>> print(stderr) |
1031 | INFO Creating lockfile: ... |
1032 | INFO Initializing Build Mass-Retry for |
1033 | 'The Hoary Hedgehog Release/RELEASE' |
1034 | |
1035 | === modified file 'lib/lp/soyuz/doc/closing-bugs-from-changelogs.txt' |
1036 | --- lib/lp/soyuz/doc/closing-bugs-from-changelogs.txt 2015-07-21 09:04:01 +0000 |
1037 | +++ lib/lp/soyuz/doc/closing-bugs-from-changelogs.txt 2018-05-27 20:15:09 +0000 |
1038 | @@ -115,12 +115,12 @@ |
1039 | >>> switch_dbuser('launchpad') |
1040 | >>> pmount_bug = getUtility(IBugSet).get(pmount_bug_id) |
1041 | >>> last_comment = pmount_bug.messages[-1] |
1042 | - >>> print pmount_release.creator.displayname |
1043 | + >>> print(pmount_release.creator.displayname) |
1044 | Mark Shuttleworth |
1045 | - >>> print last_comment.owner.displayname |
1046 | + >>> print(last_comment.owner.displayname) |
1047 | Launchpad Janitor |
1048 | |
1049 | - >>> print pmount_release.changelog_entry |
1050 | + >>> print(pmount_release.changelog_entry) |
1051 | pmount (0.1-1) hoary; urgency=low |
1052 | <BLANKLINE> |
1053 | * Fix description (Malone #1) |
1054 | @@ -129,7 +129,7 @@ |
1055 | <BLANKLINE> |
1056 | -- Sample Person <test@canonical.com> Tue, 7 Feb 2006 12:10:08 +0300 |
1057 | |
1058 | - >>> print last_comment.text_contents |
1059 | + >>> print(last_comment.text_contents) |
1060 | This bug was fixed in the package pmount - 0.1-1 |
1061 | <BLANKLINE> |
1062 | --------------- |
1063 | @@ -148,9 +148,9 @@ |
1064 | >>> from lp.bugs.model.bugnotification import BugNotification |
1065 | >>> notifications = BugNotification.select(orderBy='id') |
1066 | >>> for notification in notifications[-2:]: |
1067 | - ... print "From %s:\n%s\n" % ( |
1068 | + ... print("From %s:\n%s\n" % ( |
1069 | ... notification.message.owner.displayname, |
1070 | - ... notification.message.text_contents) |
1071 | + ... notification.message.text_contents)) |
1072 | From Launchpad Janitor: |
1073 | ** Changed in: pmount (Ubuntu) |
1074 | Status: New => Fix Released |
1075 | @@ -190,15 +190,15 @@ |
1076 | |
1077 | >>> def close_bugs_and_check_status(bug_id_list, queue_item): |
1078 | ... """Close bugs, reporting status before and after.""" |
1079 | - ... print "Before:" |
1080 | + ... print("Before:") |
1081 | ... for bug_id in bug_id_list: |
1082 | - ... print print_single_task_status(bug_id) |
1083 | + ... print(print_single_task_status(bug_id)) |
1084 | ... switch_dbuser(test_dbuser) |
1085 | ... close_bugs_for_queue_item(queue_item) |
1086 | ... switch_dbuser('launchpad') |
1087 | - ... print "After:" |
1088 | + ... print("After:") |
1089 | ... for bug_id in bug_id_list: |
1090 | - ... print print_single_task_status(bug_id) |
1091 | + ... print(print_single_task_status(bug_id)) |
1092 | |
1093 | |
1094 | Uploads to pocket PROPOSED should not close bugs, see bug #125279 for |
1095 | |
1096 | === modified file 'lib/lp/soyuz/doc/components-and-sections.txt' |
1097 | --- lib/lp/soyuz/doc/components-and-sections.txt 2015-10-05 06:34:17 +0000 |
1098 | +++ lib/lp/soyuz/doc/components-and-sections.txt 2018-05-27 20:15:09 +0000 |
1099 | @@ -20,7 +20,7 @@ |
1100 | |
1101 | Test some attributes: |
1102 | |
1103 | - >>> print main.id, main.name |
1104 | + >>> print(main.id, main.name) |
1105 | 1 main |
1106 | |
1107 | Check if the instance corresponds to the declared interface: |
1108 | @@ -41,7 +41,7 @@ |
1109 | Test iteration over the sampledata default components: |
1110 | |
1111 | >>> for c in component_set: |
1112 | - ... print c.name |
1113 | + ... print(c.name) |
1114 | main |
1115 | restricted |
1116 | universe |
1117 | @@ -52,18 +52,18 @@ |
1118 | |
1119 | Test __getitem__ method, retrieving a component by name: |
1120 | |
1121 | - >>> print component_set['universe'].name |
1122 | + >>> print(component_set['universe'].name) |
1123 | universe |
1124 | |
1125 | Test get method, retrieving a component by its id: |
1126 | |
1127 | - >>> print component_set.get(2).name |
1128 | + >>> print(component_set.get(2).name) |
1129 | restricted |
1130 | |
1131 | New component creation for a given name: |
1132 | |
1133 | >>> new_comp = component_set.new('test') |
1134 | - >>> print new_comp.name |
1135 | + >>> print(new_comp.name) |
1136 | test |
1137 | |
1138 | Ensuring a component (if not found, create it): |
1139 | @@ -86,7 +86,7 @@ |
1140 | |
1141 | Test some attributes: |
1142 | |
1143 | - >>> print base.id, base.name |
1144 | + >>> print(base.id, base.name) |
1145 | 1 base |
1146 | |
1147 | Check if the instance corresponds to the declared interface: |
1148 | @@ -107,7 +107,7 @@ |
1149 | Test iteration over the sampledata default sections: |
1150 | |
1151 | >>> for s in section_set: |
1152 | - ... print s.name |
1153 | + ... print(s.name) |
1154 | base |
1155 | web |
1156 | editors |
1157 | @@ -148,18 +148,18 @@ |
1158 | |
1159 | Test __getitem__ method, retrieving a section by name: |
1160 | |
1161 | - >>> print section_set['science'].name |
1162 | + >>> print(section_set['science'].name) |
1163 | science |
1164 | |
1165 | Test get method, retrieving a section by its id: |
1166 | |
1167 | - >>> print section_set.get(2).name |
1168 | + >>> print(section_set.get(2).name) |
1169 | web |
1170 | |
1171 | New section creation for a given name: |
1172 | |
1173 | >>> new_sec = section_set.new('test') |
1174 | - >>> print new_sec.name |
1175 | + >>> print(new_sec.name) |
1176 | test |
1177 | |
1178 | Ensuring a section (if not found, create it): |
1179 | |
1180 | === modified file 'lib/lp/soyuz/doc/distribution.txt' |
1181 | --- lib/lp/soyuz/doc/distribution.txt 2015-01-07 00:35:41 +0000 |
1182 | +++ lib/lp/soyuz/doc/distribution.txt 2018-05-27 20:15:09 +0000 |
1183 | @@ -72,7 +72,7 @@ |
1184 | and its 'contents description' (see package-cache.txt). |
1185 | |
1186 | >>> for owner in [cprov, mark, no_priv]: |
1187 | - ... print "%s: %s" % (owner.name, owner.archive.description) |
1188 | + ... print("%s: %s" % (owner.name, owner.archive.description)) |
1189 | cprov: packages to help my friends. |
1190 | mark: packages to help the humanity (you know, ubuntu) |
1191 | no-priv: I am not allowed to say, I have no privs. |
1192 | @@ -198,7 +198,7 @@ |
1193 | >>> warty = ubuntu['warty'] |
1194 | >>> pocket_release = PackagePublishingPocket.RELEASE |
1195 | >>> src_pub = cprov_src.copyTo(warty, pocket_release, cprov.archive) |
1196 | - >>> print src_pub.status.name |
1197 | + >>> print(src_pub.status.name) |
1198 | PENDING |
1199 | |
1200 | >>> [pending_ppa] = ubuntu.getPendingPublicationPPAs() |
1201 | @@ -255,7 +255,7 @@ |
1202 | listed in the PPA pending-publication results. |
1203 | |
1204 | >>> for pub in pending_binaries: |
1205 | - ... print pub.status.name |
1206 | + ... print(pub.status.name) |
1207 | PENDING |
1208 | PENDING |
1209 | |
1210 | @@ -302,7 +302,7 @@ |
1211 | |
1212 | >>> ubuntutest = getUtility(IDistributionSet)['ubuntutest'] |
1213 | >>> for archive in ubuntutest.all_distro_archives: |
1214 | - ... print archive.purpose.title |
1215 | + ... print(archive.purpose.title) |
1216 | Primary Archive |
1217 | Partner Archive |
1218 | |
1219 | @@ -310,11 +310,11 @@ |
1220 | component name. If the component is unknown, None is returned. |
1221 | |
1222 | >>> partner_archive = ubuntutest.getArchiveByComponent('partner') |
1223 | - >>> print partner_archive.displayname |
1224 | + >>> print(partner_archive.displayname) |
1225 | Partner Archive for Ubuntu Test |
1226 | |
1227 | >>> other_archive = ubuntutest.getArchiveByComponent('dodgycomponent') |
1228 | - >>> print other_archive |
1229 | + >>> print(other_archive) |
1230 | None |
1231 | |
1232 | Multiple components, specially the debian-compatibility ones points to |
1233 | @@ -322,13 +322,13 @@ |
1234 | their packages in the correct archive. |
1235 | |
1236 | >>> main_archive = ubuntutest.getArchiveByComponent('main') |
1237 | - >>> print main_archive.displayname |
1238 | + >>> print(main_archive.displayname) |
1239 | Primary Archive for Ubuntu Test |
1240 | |
1241 | >>> non_free_archive = ubuntutest.getArchiveByComponent('non-free') |
1242 | - >>> print non_free_archive.displayname |
1243 | + >>> print(non_free_archive.displayname) |
1244 | Primary Archive for Ubuntu Test |
1245 | |
1246 | >>> contrib_archive = ubuntutest.getArchiveByComponent('contrib') |
1247 | - >>> print contrib_archive.displayname |
1248 | + >>> print(contrib_archive.displayname) |
1249 | Primary Archive for Ubuntu Test |
1250 | |
1251 | === modified file 'lib/lp/soyuz/doc/distroarchseries.txt' |
1252 | --- lib/lp/soyuz/doc/distroarchseries.txt 2015-04-20 15:59:52 +0000 |
1253 | +++ lib/lp/soyuz/doc/distroarchseries.txt 2018-05-27 20:15:09 +0000 |
1254 | @@ -34,7 +34,7 @@ |
1255 | Enabled is a boolean flag that says whether the arch will receive new builds |
1256 | and publish them. |
1257 | |
1258 | - >>> print hoary_i386.enabled |
1259 | + >>> print(hoary_i386.enabled) |
1260 | True |
1261 | |
1262 | `DistroSeries.enabled_architectures` is a `ResultSet` containing the |
1263 | @@ -70,7 +70,7 @@ |
1264 | 1 |
1265 | >>> results = warty_i386.searchBinaryPackages(text=u'a') |
1266 | >>> for dasbp in results: |
1267 | - ... print "%s: %s" % (dasbp.__class__.__name__, dasbp.name) |
1268 | + ... print("%s: %s" % (dasbp.__class__.__name__, dasbp.name)) |
1269 | DistroArchSeriesBinaryPackageRelease: at |
1270 | DistroArchSeriesBinaryPackageRelease: mozilla-firefox |
1271 | DistroArchSeriesBinaryPackageRelease: mozilla-firefox |
1272 | @@ -142,7 +142,7 @@ |
1273 | >>> pmount_hoary_i386.publishing_history.count() |
1274 | 3 |
1275 | |
1276 | - >>> print pub.status.name, pub.datesuperseded is not None |
1277 | + >>> print(pub.status.name, pub.datesuperseded is not None) |
1278 | SUPERSEDED True |
1279 | |
1280 | |
1281 | @@ -169,7 +169,7 @@ |
1282 | ... result += 'ppa' |
1283 | ... if arch.official or arch.supports_virtualized: |
1284 | ... result += ')' |
1285 | - ... print result |
1286 | + ... print(result) |
1287 | |
1288 | >>> print_architectures(warty.architectures) |
1289 | The Warty Warthog Release for hppa (hppa) |
1290 | @@ -228,7 +228,7 @@ |
1291 | The architecture also has a 'chroot_url' attribute directly referencing |
1292 | the file. |
1293 | |
1294 | - >>> print hoary.getDistroArchSeries('hppa').chroot_url |
1295 | + >>> print(hoary.getDistroArchSeries('hppa').chroot_url) |
1296 | http://.../filename... |
1297 | >>> hoary.getDistroArchSeries('hppa').chroot_url == \ |
1298 | ... chroot.http_url |
1299 | @@ -236,7 +236,7 @@ |
1300 | |
1301 | If there is no chroot, chroot_url will be None. |
1302 | |
1303 | - >>> print hoary.getDistroArchSeries('i386').chroot_url |
1304 | + >>> print(hoary.getDistroArchSeries('i386').chroot_url) |
1305 | None |
1306 | |
1307 | `DistroSeries.buildable_architectures` results are ordered |
1308 | |
1309 | === modified file 'lib/lp/soyuz/doc/distroarchseriesbinarypackage.txt' |
1310 | --- lib/lp/soyuz/doc/distroarchseriesbinarypackage.txt 2016-05-19 05:02:59 +0000 |
1311 | +++ lib/lp/soyuz/doc/distroarchseriesbinarypackage.txt 2018-05-27 20:15:09 +0000 |
1312 | @@ -16,7 +16,7 @@ |
1313 | |
1314 | `DistroArchSeriesBinaryPackage`s have a title property: |
1315 | |
1316 | - >>> print pmount_hoary_i386.title |
1317 | + >>> print(pmount_hoary_i386.title) |
1318 | pmount binary package in Ubuntu Hoary i386 |
1319 | |
1320 | First, we create a new version of pmount, and a version of mozilla- |
1321 | @@ -160,7 +160,7 @@ |
1322 | ... getUtility(IDistributionSet)['debian']['woody']['i386']) |
1323 | >>> pmount_woody_i386 = DistroArchSeriesBinaryPackage( |
1324 | ... deb_wdy_i386, pmount_name) |
1325 | - >>> print pmount_woody_i386.distro_source_package |
1326 | + >>> print(pmount_woody_i386.distro_source_package) |
1327 | None |
1328 | |
1329 | Check the publishing record of packages returned by 'currentrelease' and |
1330 | |
1331 | === modified file 'lib/lp/soyuz/doc/distroarchseriesbinarypackagerelease.txt' |
1332 | --- lib/lp/soyuz/doc/distroarchseriesbinarypackagerelease.txt 2014-11-27 22:13:36 +0000 |
1333 | +++ lib/lp/soyuz/doc/distroarchseriesbinarypackagerelease.txt 2018-05-27 20:15:09 +0000 |
1334 | @@ -11,18 +11,18 @@ |
1335 | Grab the relevant DARs and BPRs: |
1336 | |
1337 | >>> warty = DistroArchSeries.get(1) |
1338 | - >>> print warty.distroseries.name |
1339 | + >>> print(warty.distroseries.name) |
1340 | warty |
1341 | >>> hoary = DistroArchSeries.get(6) |
1342 | - >>> print hoary.distroseries.name |
1343 | + >>> print(hoary.distroseries.name) |
1344 | hoary |
1345 | |
1346 | >>> mf = BinaryPackageRelease.get(12) |
1347 | - >>> print mf.binarypackagename.name |
1348 | + >>> print(mf.binarypackagename.name) |
1349 | mozilla-firefox |
1350 | |
1351 | >>> pm = BinaryPackageRelease.get(15) |
1352 | - >>> print pm.binarypackagename.name |
1353 | + >>> print(pm.binarypackagename.name) |
1354 | pmount |
1355 | |
1356 | Assemble our DARBPRs for fun and profit: |
1357 | @@ -33,13 +33,16 @@ |
1358 | >>> pm_hoary = DARBPR(hoary, pm) |
1359 | |
1360 | >>> for darbpr in [mf_warty, mf_hoary, pm_warty, pm_hoary]: |
1361 | - ... print darbpr.name, darbpr.version, darbpr._latest_publishing_record() |
1362 | + ... print( |
1363 | + ... darbpr.name, darbpr.version, darbpr._latest_publishing_record()) |
1364 | mozilla-firefox 0.9 <BinaryPackagePublishingHistory at 0x...> |
1365 | mozilla-firefox 0.9 None |
1366 | pmount 0.1-1 <BinaryPackagePublishingHistory at 0x...> |
1367 | pmount 0.1-1 <BinaryPackagePublishingHistory at 0x...> |
1368 | |
1369 | - >>> print mf_warty.status.title, pm_warty.status.title, pm_hoary.status.title |
1370 | + >>> print( |
1371 | + ... mf_warty.status.title, pm_warty.status.title, |
1372 | + ... pm_hoary.status.title) |
1373 | Published Superseded Published |
1374 | |
1375 | |
1376 | @@ -51,15 +54,15 @@ |
1377 | >>> warty_i386 = getUtility(IDistributionSet)['ubuntu']['warty']['i386'] |
1378 | |
1379 | >>> warty_i386_pmount = warty_i386.getBinaryPackage('pmount') |
1380 | - >>> print warty_i386_pmount.title |
1381 | + >>> print(warty_i386_pmount.title) |
1382 | pmount binary package in Ubuntu Warty i386 |
1383 | |
1384 | >>> pmount_release_in_warty = warty_i386_pmount['0.1-1'] |
1385 | - >>> print pmount_release_in_warty.title |
1386 | + >>> print(pmount_release_in_warty.title) |
1387 | pmount 0.1-1 (i386 binary) in ubuntu warty |
1388 | |
1389 | >>> parent = ( |
1390 | ... pmount_release_in_warty.distroarchseriesbinarypackage) |
1391 | - >>> print parent.title |
1392 | + >>> print(parent.title) |
1393 | pmount binary package in Ubuntu Warty i386 |
1394 | |
1395 | |
1396 | === modified file 'lib/lp/soyuz/doc/distroseriesbinarypackage.txt' |
1397 | --- lib/lp/soyuz/doc/distroseriesbinarypackage.txt 2014-11-27 22:13:36 +0000 |
1398 | +++ lib/lp/soyuz/doc/distroseriesbinarypackage.txt 2018-05-27 20:15:09 +0000 |
1399 | @@ -21,29 +21,29 @@ |
1400 | |
1401 | It has a name, summary, description and title: |
1402 | |
1403 | - >>> print firefox_dsbp.name |
1404 | + >>> print(firefox_dsbp.name) |
1405 | mozilla-firefox |
1406 | |
1407 | - >>> print firefox_dsbp.summary |
1408 | + >>> print(firefox_dsbp.summary) |
1409 | Mozilla Firefox Web Browser |
1410 | |
1411 | - >>> print firefox_dsbp.description |
1412 | + >>> print(firefox_dsbp.description) |
1413 | Mozilla Firefox Web Browser is ..... |
1414 | |
1415 | - >>> print firefox_dsbp.title |
1416 | + >>> print(firefox_dsbp.title) |
1417 | Binary package "mozilla-firefox" in ubuntu warty |
1418 | |
1419 | - >>> print firefox_dsbp.distribution.name |
1420 | + >>> print(firefox_dsbp.distribution.name) |
1421 | ubuntu |
1422 | |
1423 | It provides the current publishings for the binary package in the |
1424 | distro series (ordered by architecture then datecreated): |
1425 | |
1426 | >>> for published in firefox_dsbp.current_publishings: |
1427 | - ... print "%s %s in %s" % ( |
1428 | + ... print("%s %s in %s" % ( |
1429 | ... published.distroarchseriesbinarypackagerelease.name, |
1430 | ... published.distroarchseriesbinarypackagerelease.version, |
1431 | - ... published.distroarchseries.architecturetag) |
1432 | + ... published.distroarchseries.architecturetag)) |
1433 | mozilla-firefox 0.9 in hppa |
1434 | mozilla-firefox 0.9 in i386 |
1435 | mozilla-firefox 1.0 in i386 |
1436 | @@ -56,7 +56,7 @@ |
1437 | |
1438 | It also provides access to the last DistributionSourcePackageRelease: |
1439 | |
1440 | - >>> print firefox_dsbp.last_sourcepackagerelease.title |
1441 | + >>> print(firefox_dsbp.last_sourcepackagerelease.title) |
1442 | iceweasel 1.0 source package in Ubuntu |
1443 | |
1444 | If a DistroSeriesBinaryPackage doesn't have a DistroSeriesPackageCache, |
1445 | @@ -75,11 +75,11 @@ |
1446 | >>> firefox_hoary_dsbp.current_publishings |
1447 | [] |
1448 | |
1449 | - >>> print firefox_hoary_dsbp.last_published |
1450 | + >>> print(firefox_hoary_dsbp.last_published) |
1451 | None |
1452 | |
1453 | In this case, the last DistributionSourcePackageRelease will also be None: |
1454 | |
1455 | - >>> print firefox_hoary_dsbp.last_sourcepackagerelease |
1456 | + >>> print(firefox_hoary_dsbp.last_sourcepackagerelease) |
1457 | None |
1458 | |
1459 | |
1460 | === modified file 'lib/lp/soyuz/doc/distroseriesqueue-notify.txt' |
1461 | --- lib/lp/soyuz/doc/distroseriesqueue-notify.txt 2016-03-03 16:16:16 +0000 |
1462 | +++ lib/lp/soyuz/doc/distroseriesqueue-notify.txt 2018-05-27 20:15:09 +0000 |
1463 | @@ -10,7 +10,7 @@ |
1464 | |
1465 | >>> from lp.soyuz.interfaces.queue import IPackageUploadSet |
1466 | >>> netapplet_upload = getUtility(IPackageUploadSet)[3] |
1467 | - >>> print netapplet_upload.displayname |
1468 | + >>> print(netapplet_upload.displayname) |
1469 | netapplet |
1470 | |
1471 | Set up some library files for the netapplet source package. These are |
1472 | @@ -84,7 +84,7 @@ |
1473 | |
1474 | The mail body contains a list of files that were accepted: |
1475 | |
1476 | - >>> print notification.get_payload(0) # doctest: -NORMALIZE_WHITESPACE |
1477 | + >>> print(notification.get_payload(0)) # doctest: -NORMALIZE_WHITESPACE |
1478 | From nobody ... |
1479 | ... |
1480 | NEW: netapplet_1.0-1.dsc |
1481 | @@ -156,7 +156,7 @@ |
1482 | ... key=to_lower) |
1483 | |
1484 | >>> for msg in msgs: |
1485 | - ... print msg['To'] |
1486 | + ... print(msg['To']) |
1487 | Daniel Silverstone <daniel.silverstone@canonical.com> |
1488 | Foo Bar <foo.bar@canonical.com> |
1489 | autotest_changes@ubuntu.com |
1490 | @@ -165,26 +165,26 @@ |
1491 | uploader and the Debian derivatives address for the package uploaded. |
1492 | |
1493 | >>> for msg in msgs: |
1494 | - ... print extract_addresses(msg['Bcc']) |
1495 | - ['Root <root@localhost>'] |
1496 | - ['Root <root@localhost>'] |
1497 | - ['netapplet_derivatives@packages.qa.debian.org', 'Root <root@localhost>'] |
1498 | + ... print(extract_addresses(msg['Bcc'])) |
1499 | + [u'Root <root@localhost>'] |
1500 | + [u'Root <root@localhost>'] |
1501 | + [u'netapplet_derivatives@packages.qa.debian.org', u'Root <root@localhost>'] |
1502 | |
1503 | The mail 'From:' addresses are the uploader (for acknowledgements sent to |
1504 | the uploader and the changer) and the changer. |
1505 | |
1506 | >>> for msg in msgs: |
1507 | - ... print msg['From'] |
1508 | + ... print(msg['From']) |
1509 | Root <root@localhost> |
1510 | Root <root@localhost> |
1511 | Daniel Silverstone <daniel.silverstone@canonical.com> |
1512 | |
1513 | - >>> print msgs[0]['Subject'] |
1514 | + >>> print(msgs[0]['Subject']) |
1515 | [ubuntu/breezy-autotest] netapplet 0.99.6-1 (Accepted) |
1516 | |
1517 | The mail body contains the same list of files again: |
1518 | |
1519 | - >>> print msgs[0].get_payload(0) # doctest: -NORMALIZE_WHITESPACE |
1520 | + >>> print(msgs[0].get_payload(0)) # doctest: -NORMALIZE_WHITESPACE |
1521 | From nobody ... |
1522 | ... |
1523 | OK: netapplet_1.0-1.dsc |
1524 | @@ -237,24 +237,24 @@ |
1525 | |
1526 | The mail headers are the same as before: |
1527 | |
1528 | - >>> print changer_notification['To'] |
1529 | + >>> print(changer_notification['To']) |
1530 | Daniel Silverstone <daniel.silverstone@canonical.com> |
1531 | - >>> print signer_notification['To'] |
1532 | + >>> print(signer_notification['To']) |
1533 | Foo Bar <foo.bar@canonical.com> |
1534 | |
1535 | - >>> print changer_notification['Bcc'] |
1536 | + >>> print(changer_notification['Bcc']) |
1537 | Root <root@localhost> |
1538 | - >>> print signer_notification['Bcc'] |
1539 | + >>> print(signer_notification['Bcc']) |
1540 | Root <root@localhost> |
1541 | |
1542 | - >>> print changer_notification['Subject'] |
1543 | + >>> print(changer_notification['Subject']) |
1544 | [ubuntu/breezy-autotest] netapplet 0.99.6-1 (New) |
1545 | - >>> print signer_notification['Subject'] |
1546 | + >>> print(signer_notification['Subject']) |
1547 | [ubuntu/breezy-autotest] netapplet 0.99.6-1 (New) |
1548 | |
1549 | The mail body contains the same list of files again: |
1550 | |
1551 | - >>> print changer_notification.get_payload(0) |
1552 | + >>> print(changer_notification.get_payload(0)) |
1553 | ... # doctest: -NORMALIZE_WHITESPACE |
1554 | From nobody ... |
1555 | ... |
1556 | @@ -271,7 +271,7 @@ |
1557 | You are receiving this email because you are the most recent person |
1558 | listed in this package's changelog. |
1559 | <BLANKLINE> |
1560 | - >>> print signer_notification.get_payload(0) |
1561 | + >>> print(signer_notification.get_payload(0)) |
1562 | ... # doctest: -NORMALIZE_WHITESPACE |
1563 | From nobody ... |
1564 | ... |
1565 | |
1566 | === modified file 'lib/lp/soyuz/doc/distroseriesqueue-translations.txt' |
1567 | --- lib/lp/soyuz/doc/distroseriesqueue-translations.txt 2016-11-17 16:46:04 +0000 |
1568 | +++ lib/lp/soyuz/doc/distroseriesqueue-translations.txt 2018-05-27 20:15:09 +0000 |
1569 | @@ -108,7 +108,7 @@ |
1570 | |
1571 | >>> from lp.registry.model.sourcepackage import SourcePackage |
1572 | >>> dapper_pmount = SourcePackage(pmount_sourcepackagename, dapper) |
1573 | - >>> print len(dapper_pmount.getLatestTranslationsUploads()) |
1574 | + >>> print(len(dapper_pmount.getLatestTranslationsUploads())) |
1575 | 0 |
1576 | |
1577 | >>> success = pmount_upload.do_accept(build=build) |
1578 | @@ -124,7 +124,7 @@ |
1579 | # no longer exists and this content is impossible to check at the moment |
1580 | # since no email is generated because the recipients are not LP Persons. |
1581 | # (So why is it being checked in the first place?) |
1582 | -#>>> print pmount_upload.getNotificationSummary() |
1583 | +#>>> print(pmount_upload.getNotificationSummary()) |
1584 | #NEW: pmount_0.9.7-2ubuntu2_amd64.deb |
1585 | #OK: pmount_0.9.7-2ubuntu2_amd64_translations.tar.gz |
1586 | |
1587 | @@ -133,7 +133,7 @@ |
1588 | |
1589 | >>> latest_translations_uploads = list( |
1590 | ... dapper_pmount.getLatestTranslationsUploads()) |
1591 | - >>> print len(latest_translations_uploads) |
1592 | + >>> print(len(latest_translations_uploads)) |
1593 | 1 |
1594 | |
1595 | We'll get back to that uploaded file later. |
1596 | @@ -184,9 +184,9 @@ |
1597 | the spph creator is the requester. |
1598 | |
1599 | >>> for entry in translation_import_queue.getAllEntries(target=ubuntu): |
1600 | - ... print '%s/%s by %s: %s' % ( |
1601 | + ... print('%s/%s by %s: %s' % ( |
1602 | ... entry.distroseries.name, entry.sourcepackagename.name, |
1603 | - ... entry.importer.name, entry.path) |
1604 | + ... entry.importer.name, entry.path)) |
1605 | dapper/pmount by john-doe: po/es_ES.po |
1606 | dapper/pmount by john-doe: po/ca.po |
1607 | dapper/pmount by john-doe: po/de.po |
1608 | @@ -243,13 +243,13 @@ |
1609 | As we can see from the translation import queue content, as the publication |
1610 | has no creator specified, it falls back to rosetta-admins as the requester. |
1611 | |
1612 | - >>> print spph.creator |
1613 | + >>> print(spph.creator) |
1614 | None |
1615 | |
1616 | >>> for entry in translation_import_queue.getAllEntries(target=ubuntu): |
1617 | - ... print '%s/%s by %s: %s' % ( |
1618 | + ... print('%s/%s by %s: %s' % ( |
1619 | ... entry.distroseries.name, entry.sourcepackagename.name, |
1620 | - ... entry.importer.name, entry.path) |
1621 | + ... entry.importer.name, entry.path)) |
1622 | dapper/pmount by rosetta-admins: po/es_ES.po |
1623 | dapper/pmount by rosetta-admins: po/ca.po |
1624 | dapper/pmount by rosetta-admins: po/de.po |
1625 | @@ -281,9 +281,9 @@ |
1626 | As we can see from the translation import queue content. |
1627 | |
1628 | >>> for entry in translation_import_queue.getAllEntries(target=ubuntu): |
1629 | - ... print '%s/%s by %s: %s' % ( |
1630 | + ... print('%s/%s by %s: %s' % ( |
1631 | ... entry.distroseries.name, entry.sourcepackagename.name, |
1632 | - ... entry.importer.name, entry.path) |
1633 | + ... entry.importer.name, entry.path)) |
1634 | dapper/pmount by rosetta-admins: po/es_ES.po |
1635 | dapper/pmount by rosetta-admins: po/ca.po |
1636 | dapper/pmount by rosetta-admins: po/de.po |
1637 | @@ -394,7 +394,7 @@ |
1638 | ... if entry.name.endswith('.po') or entry.name.endswith('.pot') |
1639 | ... ]) |
1640 | >>> for filename in translation_files: |
1641 | - ... print filename |
1642 | + ... print(filename) |
1643 | ./source/po/ca.po |
1644 | ./source/po/cs.po |
1645 | ./source/po/de.po |
1646 | |
1647 | === modified file 'lib/lp/soyuz/doc/distroseriesqueue.txt' |
1648 | --- lib/lp/soyuz/doc/distroseriesqueue.txt 2015-09-04 12:19:07 +0000 |
1649 | +++ lib/lp/soyuz/doc/distroseriesqueue.txt 2018-05-27 20:15:09 +0000 |
1650 | @@ -90,14 +90,14 @@ |
1651 | ... item.setAccepted() |
1652 | ... item.syncUpdate() |
1653 | ... except QueueInconsistentStateError as info: |
1654 | - ... print info |
1655 | + ... print(info) |
1656 | |
1657 | >>> accepted_queue = hoary.getPackageUploads(PackageUploadStatus.ACCEPTED) |
1658 | |
1659 | >>> from lp.services.log.logger import FakeLogger |
1660 | >>> for item in accepted_queue: |
1661 | ... for source in item.sources: |
1662 | - ... print source.sourcepackagerelease.name |
1663 | + ... print(source.sourcepackagerelease.name) |
1664 | ... pub_records = item.realiseUpload(FakeLogger()) |
1665 | ed |
1666 | DEBUG Publishing source ed/0.2-20 to ubuntu/hoary in ubuntu |
1667 | @@ -110,7 +110,7 @@ |
1668 | >>> for release in SourcePackagePublishingHistory.selectBy( |
1669 | ... distroseries=hoary, status=PackagePublishingStatus.PENDING): |
1670 | ... if release.sourcepackagerelease.sourcepackagename.name == "ed": |
1671 | - ... print release.sourcepackagerelease.version |
1672 | + ... print(release.sourcepackagerelease.version) |
1673 | 0.2-20 |
1674 | |
1675 | |
1676 | @@ -177,19 +177,19 @@ |
1677 | >>> qitem.date_created |
1678 | datetime.datetime(...) |
1679 | |
1680 | - >>> print qitem.changesfile.filename |
1681 | + >>> print(qitem.changesfile.filename) |
1682 | mozilla-firefox_0.9_i386.changes |
1683 | |
1684 | - >>> print qitem.sourcepackagerelease.name |
1685 | - mozilla-firefox |
1686 | - |
1687 | - >>> print qitem.displayname |
1688 | - mozilla-firefox |
1689 | - |
1690 | - >>> print qitem.displayversion |
1691 | + >>> print(qitem.sourcepackagerelease.name) |
1692 | + mozilla-firefox |
1693 | + |
1694 | + >>> print(qitem.displayname) |
1695 | + mozilla-firefox |
1696 | + |
1697 | + >>> print(qitem.displayversion) |
1698 | 0.9 |
1699 | |
1700 | - >>> print qitem.displayarchs |
1701 | + >>> print(qitem.displayarchs) |
1702 | i386 |
1703 | |
1704 | >>> qitem.sourcepackagerelease |
1705 | @@ -203,19 +203,19 @@ |
1706 | >>> custom_item.date_created |
1707 | datetime.datetime(...) |
1708 | |
1709 | - >>> print custom_item.changesfile.filename |
1710 | - netapplet-1.0.0.tar.gz |
1711 | - |
1712 | - >>> print custom_item.displayname |
1713 | - netapplet-1.0.0.tar.gz |
1714 | - |
1715 | - >>> print custom_item.displayversion |
1716 | + >>> print(custom_item.changesfile.filename) |
1717 | + netapplet-1.0.0.tar.gz |
1718 | + |
1719 | + >>> print(custom_item.displayname) |
1720 | + netapplet-1.0.0.tar.gz |
1721 | + |
1722 | + >>> print(custom_item.displayversion) |
1723 | - |
1724 | |
1725 | - >>> print custom_item.displayarchs |
1726 | + >>> print(custom_item.displayarchs) |
1727 | raw-translations |
1728 | |
1729 | - >>> print custom_item.sourcepackagerelease |
1730 | + >>> print(custom_item.sourcepackagerelease) |
1731 | None |
1732 | |
1733 | The method getBuildByBuildIDs() will return all the PackageUploadBuild |
1734 | @@ -223,7 +223,7 @@ |
1735 | |
1736 | >>> ids = (18,19) |
1737 | >>> for package_upload_build in qset.getBuildByBuildIDs(ids): |
1738 | - ... print package_upload_build.packageupload.displayname |
1739 | + ... print(package_upload_build.packageupload.displayname) |
1740 | mozilla-firefox |
1741 | pmount |
1742 | |
1743 | @@ -299,7 +299,7 @@ |
1744 | >>> items = breezy_autotest.getPackageUploads(PackageUploadStatus.NEW) |
1745 | >>> for item in items: |
1746 | ... item.setAccepted() |
1747 | - ... print item.displayname, item.status.name |
1748 | + ... print(item.displayname, item.status.name) |
1749 | netapplet-1.0.0.tar.gz ACCEPTED |
1750 | netapplet-1.0.0.tar.gz ACCEPTED |
1751 | alsa-utils ACCEPTED |
1752 | @@ -314,7 +314,7 @@ |
1753 | ... PackageUploadStatus.ACCEPTED) |
1754 | >>> for item in items: |
1755 | ... item.status = PassthroughStatusValue(PackageUploadStatus.NEW) |
1756 | - ... print item.displayname, item.status.name |
1757 | + ... print(item.displayname, item.status.name) |
1758 | netapplet-1.0.0.tar.gz NEW |
1759 | netapplet-1.0.0.tar.gz NEW |
1760 | alsa-utils NEW |
1761 | @@ -353,9 +353,9 @@ |
1762 | >>> try: |
1763 | ... item.setAccepted() |
1764 | ... except QueueInconsistentStateError as e: |
1765 | - ... print item.displayname, e |
1766 | + ... print(item.displayname, e) |
1767 | ... else: |
1768 | - ... print item.displayname, 'ACCEPTED' |
1769 | + ... print(item.displayname, 'ACCEPTED') |
1770 | mozilla-firefox Component "hell" is not allowed in breezy-autotest |
1771 | |
1772 | Check how we treat source upload duplications in UNAPPROVED queue (NEW |
1773 | @@ -447,7 +447,7 @@ |
1774 | normally accepted. |
1775 | |
1776 | >>> item.setAccepted() |
1777 | - >>> print item.status.name |
1778 | + >>> print(item.status.name) |
1779 | ACCEPTED |
1780 | |
1781 | Roll back modified data: |
1782 | @@ -467,9 +467,9 @@ |
1783 | >>> from operator import attrgetter |
1784 | >>> def print_queue_items(queue_items): |
1785 | ... for queue_item in queue_items: |
1786 | - ... print "%s %s %s" % ( |
1787 | + ... print("%s %s %s" % ( |
1788 | ... queue_item.displayname, queue_item.displayversion, |
1789 | - ... queue_item.displayarchs) |
1790 | + ... queue_item.displayarchs)) |
1791 | |
1792 | >>> queue_items = breezy_autotest.getPackageUploads( |
1793 | ... PackageUploadStatus.NEW, name=u'pmount', version=u'0.1-1', |
1794 | @@ -577,9 +577,9 @@ |
1795 | >>> [item] = breezy_autotest.getPackageUploads( |
1796 | ... PackageUploadStatus.NEW, name=u'alsa-utils') |
1797 | >>> [source] = item.sources |
1798 | - >>> print "%s/%s" % ( |
1799 | + >>> print("%s/%s" % ( |
1800 | ... source.sourcepackagerelease.component.name, |
1801 | - ... source.sourcepackagerelease.section.name) |
1802 | + ... source.sourcepackagerelease.section.name)) |
1803 | main/base |
1804 | |
1805 | Overriding to a component not in the allowed_components list results in |
1806 | @@ -589,9 +589,9 @@ |
1807 | >>> universe = getUtility(IComponentSet)['universe'] |
1808 | >>> main = getUtility(IComponentSet)['main'] |
1809 | >>> web = getUtility(ISectionSet)['web'] |
1810 | - >>> print item.overrideSource( |
1811 | + >>> print(item.overrideSource( |
1812 | ... new_component=restricted, new_section=web, |
1813 | - ... allowed_components=(universe,)) |
1814 | + ... allowed_components=(universe,))) |
1815 | Traceback (most recent call last): |
1816 | ... |
1817 | QueueAdminUnauthorizedError: No rights to override to restricted |
1818 | @@ -599,9 +599,9 @@ |
1819 | Allowing "restricted" still won't work because the original component |
1820 | is "main": |
1821 | |
1822 | - >>> print item.overrideSource( |
1823 | + >>> print(item.overrideSource( |
1824 | ... new_component=restricted, new_section=web, |
1825 | - ... allowed_components=(restricted,)) |
1826 | + ... allowed_components=(restricted,))) |
1827 | Traceback (most recent call last): |
1828 | ... |
1829 | QueueAdminUnauthorizedError: No rights to override from main |
1830 | @@ -609,13 +609,13 @@ |
1831 | Specifying both main and restricted allows the override to restricted/web. |
1832 | overrideSource() returns True if it completed the task. |
1833 | |
1834 | - >>> print item.overrideSource( |
1835 | + >>> print(item.overrideSource( |
1836 | ... new_component=restricted, new_section=web, |
1837 | - ... allowed_components=(main,restricted)) |
1838 | + ... allowed_components=(main,restricted))) |
1839 | True |
1840 | - >>> print "%s/%s" % ( |
1841 | + >>> print("%s/%s" % ( |
1842 | ... source.sourcepackagerelease.component.name, |
1843 | - ... source.sourcepackagerelease.section.name) |
1844 | + ... source.sourcepackagerelease.section.name)) |
1845 | restricted/web |
1846 | |
1847 | Similarly for binaries: |
1848 | @@ -624,10 +624,10 @@ |
1849 | ... PackageUploadStatus.NEW, name=u'pmount') |
1850 | >>> [build] = item.builds |
1851 | >>> [binary_package] = build.build.binarypackages |
1852 | - >>> print "%s/%s/%s" % ( |
1853 | + >>> print("%s/%s/%s" % ( |
1854 | ... binary_package.component.name, |
1855 | ... binary_package.section.name, |
1856 | - ... binary_package.priority.title) |
1857 | + ... binary_package.priority.title)) |
1858 | main/base/Important |
1859 | |
1860 | >>> from lp.soyuz.enums import PackagePublishingPriority |
1861 | @@ -636,25 +636,25 @@ |
1862 | ... "section": web, |
1863 | ... "priority": PackagePublishingPriority.EXTRA, |
1864 | ... }] |
1865 | - >>> print item.overrideBinaries( |
1866 | - ... binary_changes, allowed_components=(universe,)) |
1867 | + >>> print(item.overrideBinaries( |
1868 | + ... binary_changes, allowed_components=(universe,))) |
1869 | Traceback (most recent call last): |
1870 | ... |
1871 | QueueAdminUnauthorizedError: No rights to override to restricted |
1872 | |
1873 | - >>> print item.overrideBinaries( |
1874 | - ... binary_changes, allowed_components=(restricted,)) |
1875 | + >>> print(item.overrideBinaries( |
1876 | + ... binary_changes, allowed_components=(restricted,))) |
1877 | Traceback (most recent call last): |
1878 | ... |
1879 | QueueAdminUnauthorizedError: No rights to override from main |
1880 | |
1881 | - >>> print item.overrideBinaries( |
1882 | - ... binary_changes, allowed_components=(main,restricted)) |
1883 | + >>> print(item.overrideBinaries( |
1884 | + ... binary_changes, allowed_components=(main, restricted))) |
1885 | True |
1886 | - >>> print "%s/%s/%s" % ( |
1887 | + >>> print("%s/%s/%s" % ( |
1888 | ... binary_package.component.name, |
1889 | ... binary_package.section.name, |
1890 | - ... binary_package.priority.title) |
1891 | + ... binary_package.priority.title)) |
1892 | restricted/web/Extra |
1893 | |
1894 | |
1895 | @@ -703,8 +703,8 @@ |
1896 | |
1897 | >>> add_static_xlat_upload() |
1898 | |
1899 | - >>> print warty.getPackageUploads( |
1900 | - ... custom_type=static_xlat).count() |
1901 | + >>> print(warty.getPackageUploads( |
1902 | + ... custom_type=static_xlat).count()) |
1903 | 1 |
1904 | |
1905 | There is also a created_since_date filter that will only return packages |
1906 | @@ -720,7 +720,7 @@ |
1907 | |
1908 | >>> add_static_xlat_upload() |
1909 | >>> uploads = warty.getPackageUploads(custom_type=static_xlat) |
1910 | - >>> print uploads.count() |
1911 | + >>> print(uploads.count()) |
1912 | 2 |
1913 | |
1914 | Commit a transaction to ensure new DB objects get a later timestamp. |
1915 | @@ -783,13 +783,13 @@ |
1916 | >>> queue_source = items[1].sources[0] |
1917 | >>> [build] = queue_source.sourcepackagerelease.builds |
1918 | |
1919 | - >>> print build.title |
1920 | + >>> print(build.title) |
1921 | i386 build of alsa-utils 1.0.9a-4ubuntu1 in ubuntu hoary RELEASE |
1922 | |
1923 | - >>> print build.status.name |
1924 | + >>> print(build.status.name) |
1925 | NEEDSBUILD |
1926 | |
1927 | - >>> print build.buildqueue_record.lastscore |
1928 | + >>> print(build.buildqueue_record.lastscore) |
1929 | 10 |
1930 | |
1931 | Let's reject something in the queue: |
1932 | |
1933 | === modified file 'lib/lp/soyuz/doc/fakepackager.txt' |
1934 | --- lib/lp/soyuz/doc/fakepackager.txt 2018-02-09 17:35:14 +0000 |
1935 | +++ lib/lp/soyuz/doc/fakepackager.txt 2018-05-27 20:15:09 +0000 |
1936 | @@ -14,7 +14,7 @@ |
1937 | |
1938 | >>> packager = FakePackager('biscuit', '1.0') |
1939 | |
1940 | - >>> print packager.sandbox_path |
1941 | + >>> print(packager.sandbox_path) |
1942 | /tmp/fakepackager-... |
1943 | |
1944 | >>> os.path.exists(packager.sandbox_path) |
1945 | @@ -23,18 +23,18 @@ |
1946 | Source 'name' and 'version' and 'gpg_key_fingerprint' are set according to |
1947 | the arguments passed in the initialization. |
1948 | |
1949 | - >>> print packager.name |
1950 | + >>> print(packager.name) |
1951 | biscuit |
1952 | |
1953 | - >>> print packager.version |
1954 | + >>> print(packager.version) |
1955 | 1.0 |
1956 | |
1957 | - >>> print packager.gpg_key_fingerprint |
1958 | + >>> print(packager.gpg_key_fingerprint) |
1959 | None |
1960 | |
1961 | The upstream directory is known but not yet created. |
1962 | |
1963 | - >>> print packager.upstream_directory |
1964 | + >>> print(packager.upstream_directory) |
1965 | /tmp/fakepackager-.../biscuit-1.0 |
1966 | |
1967 | >>> os.path.exists(packager.upstream_directory) |
1968 | @@ -87,12 +87,12 @@ |
1969 | >>> packager.buildSource(signed=False) |
1970 | |
1971 | >>> for changesfile in packager.listAvailableUploads(): |
1972 | - ... print changesfile |
1973 | + ... print(changesfile) |
1974 | /tmp/fakepackager-.../biscuit_1.0-1_source.changes |
1975 | |
1976 | >>> changesfile_path = packager.listAvailableUploads()[0] |
1977 | >>> changesfile = open(changesfile_path) |
1978 | - >>> print changesfile.read() |
1979 | + >>> print(changesfile.read()) |
1980 | Format: ... |
1981 | Date: ... |
1982 | Source: biscuit |
1983 | @@ -135,7 +135,7 @@ |
1984 | |
1985 | The error was raised because no signing key was set. |
1986 | |
1987 | - >>> print packager.gpg_key_fingerprint |
1988 | + >>> print(packager.gpg_key_fingerprint) |
1989 | None |
1990 | |
1991 | A GPG key can only be set on initialization so we will have to create a |
1992 | @@ -148,7 +148,7 @@ |
1993 | |
1994 | GPG key set, now we are able to build a signed version. |
1995 | |
1996 | - >>> print packager.gpg_key_fingerprint |
1997 | + >>> print(packager.gpg_key_fingerprint) |
1998 | 0xFD311613D941C6DE55737D310E3498675D147547 |
1999 | |
2000 | FakePackager also allows us to include as many versions it needs |
2001 | @@ -164,7 +164,7 @@ |
2002 | basically checking we pass the right arguments to it. |
2003 | |
2004 | >>> changesfile_path = packager.listAvailableUploads()[1] |
2005 | - >>> print os.path.basename(changesfile_path) |
2006 | + >>> print(os.path.basename(changesfile_path)) |
2007 | biscuit_1.0-3_source.changes |
2008 | |
2009 | >>> content = open(changesfile_path).read() |
2010 | @@ -202,14 +202,14 @@ |
2011 | corresponding sandbox directory. |
2012 | |
2013 | >>> for changesfile in packager.listAvailableUploads(): |
2014 | - ... print changesfile |
2015 | + ... print(changesfile) |
2016 | /tmp/fakepackager-.../biscuit_1.0-1_source.changes |
2017 | /tmp/fakepackager-.../biscuit_1.0-3_source.changes |
2018 | /tmp/fakepackager-.../biscuit_1.0-4_source.changes |
2019 | /tmp/fakepackager-.../biscuit_1.0-5_source.changes |
2020 | |
2021 | >>> for changesfile in zeca_packager.listAvailableUploads(): |
2022 | - ... print changesfile |
2023 | + ... print(changesfile) |
2024 | /tmp/fakepackager-.../zeca_1.0-1_source.changes |
2025 | /tmp/fakepackager-.../zeca_1.0-2_source.changes |
2026 | |
2027 | @@ -272,18 +272,18 @@ |
2028 | automatically accepted, builds are created, the upload is published and |
2029 | the source publishing record created are returned. |
2030 | |
2031 | - >>> print ubuntu.getSourcePackage('zeca') |
2032 | + >>> print(ubuntu.getSourcePackage('zeca')) |
2033 | None |
2034 | |
2035 | >>> zeca_pub = zeca_packager.uploadSourceVersion('1.0-1') |
2036 | |
2037 | - >>> print zeca_pub.displayname, zeca_pub.status.name |
2038 | + >>> print(zeca_pub.displayname, zeca_pub.status.name) |
2039 | zeca 1.0-1 in hoary PENDING |
2040 | |
2041 | >>> len(zeca_pub.getBuilds()) |
2042 | 2 |
2043 | |
2044 | - >>> print ubuntu.getSourcePackage('zeca').currentrelease.version |
2045 | + >>> print(ubuntu.getSourcePackage('zeca').currentrelease.version) |
2046 | 1.0-1 |
2047 | |
2048 | New uploaded versions will immediately show up as the current |
2049 | @@ -294,7 +294,7 @@ |
2050 | >>> len(zeca_pub.getBuilds()) |
2051 | 2 |
2052 | |
2053 | - >>> print ubuntu.getSourcePackage('zeca').currentrelease.version |
2054 | + >>> print(ubuntu.getSourcePackage('zeca').currentrelease.version) |
2055 | 1.0-2 |
2056 | |
2057 | We can change the upload policy for a specific upload, for instance to |
2058 | @@ -305,7 +305,7 @@ |
2059 | >>> len(biscuit_pub.getBuilds()) |
2060 | 2 |
2061 | |
2062 | - >>> print ubuntu.getSourcePackage('biscuit').currentrelease.version |
2063 | + >>> print(ubuntu.getSourcePackage('biscuit').currentrelease.version) |
2064 | 1.0-1 |
2065 | |
2066 | Since we are using Foo Bar's GPG key to sign packages, in order to test |
2067 | @@ -313,7 +313,7 @@ |
2068 | |
2069 | >>> from lp.registry.interfaces.person import IPersonSet |
2070 | >>> foobar = getUtility(IPersonSet).getByName('name16') |
2071 | - >>> print foobar.archive |
2072 | + >>> print(foobar.archive) |
2073 | None |
2074 | |
2075 | >>> from lp.soyuz.enums import ArchivePurpose |
2076 | @@ -326,10 +326,10 @@ |
2077 | >>> ppa_pub = packager.uploadSourceVersion( |
2078 | ... '1.0-5', archive=foobar.archive) |
2079 | |
2080 | - >>> print ppa_pub.archive.displayname |
2081 | + >>> print(ppa_pub.archive.displayname) |
2082 | PPA for Foo Bar |
2083 | |
2084 | - >>> print ppa_pub.displayname, ppa_pub.status.name |
2085 | + >>> print(ppa_pub.displayname, ppa_pub.status.name) |
2086 | biscuit 1.0-5 in hoary PENDING |
2087 | |
2088 | >>> len(ppa_pub.getBuilds()) |
2089 | |
2090 | === modified file 'lib/lp/soyuz/doc/gina-multiple-arch.txt' |
2091 | --- lib/lp/soyuz/doc/gina-multiple-arch.txt 2016-01-26 15:47:37 +0000 |
2092 | +++ lib/lp/soyuz/doc/gina-multiple-arch.txt 2018-05-27 20:15:09 +0000 |
2093 | @@ -80,7 +80,7 @@ |
2094 | >>> gina_proc = [sys.executable, 'scripts/gina.py', '-q', |
2095 | ... 'dapper', 'dapper-updates'] |
2096 | >>> proc = subprocess.Popen(gina_proc, stderr=subprocess.PIPE) |
2097 | - >>> print proc.stderr.read() |
2098 | + >>> print(proc.stderr.read()) |
2099 | WARNING ... |
2100 | WARNING No source package bdftopcf (0.99.0-1) listed for bdftopcf (0.99.0-1), scrubbing archive... |
2101 | WARNING The archive for dapper-updates/universe doesn't contain a directory for powerpc, skipping |
2102 | @@ -104,17 +104,17 @@ |
2103 | |
2104 | >>> SourcePackageRelease.select().count() - orig_spr_count |
2105 | 2 |
2106 | - >>> print SSPPH.select().count() - orig_sspph_count |
2107 | + >>> print(SSPPH.select().count() - orig_sspph_count) |
2108 | 2 |
2109 | |
2110 | Each source package has its own maintainer (in this case, fabbione and |
2111 | porridge): |
2112 | |
2113 | - >>> print Person.select().count() - orig_person_count |
2114 | - 2 |
2115 | - >>> print TeamParticipation.select().count() - orig_tp_count |
2116 | - 2 |
2117 | - >>> print EmailAddress.select().count() - orig_email_count |
2118 | + >>> print(Person.select().count() - orig_person_count) |
2119 | + 2 |
2120 | + >>> print(TeamParticipation.select().count() - orig_tp_count) |
2121 | + 2 |
2122 | + >>> print(EmailAddress.select().count() - orig_email_count) |
2123 | 2 |
2124 | |
2125 | There are 4 binary packages generated by the two builds of the two |
2126 | @@ -135,9 +135,9 @@ |
2127 | >>> n = SourcePackageName.selectOneBy(name="ekg") |
2128 | >>> ekg = SourcePackageRelease.selectOneBy(sourcepackagenameID=n.id, |
2129 | ... version="1:1.5-4ubuntu1.2") |
2130 | - >>> print ekg.section.name |
2131 | + >>> print(ekg.section.name) |
2132 | net |
2133 | - >>> print ekg.component.name |
2134 | + >>> print(ekg.component.name) |
2135 | main |
2136 | |
2137 | And that one of the packages in main is here too: |
2138 | @@ -145,13 +145,13 @@ |
2139 | >>> n = BinaryPackageName.selectOneBy(name="libgadu-dev") |
2140 | >>> ekg = BinaryPackageRelease.selectOneBy(binarypackagenameID=n.id, |
2141 | ... version="1:1.5-4ubuntu1.2") |
2142 | - >>> print ekg.section.name |
2143 | + >>> print(ekg.section.name) |
2144 | libdevel |
2145 | - >>> print ekg.component.name |
2146 | + >>> print(ekg.component.name) |
2147 | main |
2148 | - >>> print ekg.architecturespecific |
2149 | + >>> print(ekg.architecturespecific) |
2150 | True |
2151 | - >>> print ekg.build.processor.name |
2152 | + >>> print(ekg.build.processor.name) |
2153 | 386 |
2154 | |
2155 | Check that the package it generates in universe was successfully |
2156 | @@ -162,11 +162,11 @@ |
2157 | >>> n = BinaryPackageName.selectOneBy(name="ekg") |
2158 | >>> ekg = BinaryPackageRelease.selectOneBy(binarypackagenameID=n.id, |
2159 | ... version="1:1.5-4ubuntu1.2") |
2160 | - >>> print ekg.section.name |
2161 | + >>> print(ekg.section.name) |
2162 | net |
2163 | - >>> print ekg.component.name |
2164 | + >>> print(ekg.component.name) |
2165 | universe |
2166 | - >>> print ekg.priority == PackagePublishingPriority.OPTIONAL |
2167 | + >>> print(ekg.priority == PackagePublishingPriority.OPTIONAL) |
2168 | True |
2169 | |
2170 | The bdftopcf package is in a bit of a fix. Its binary package is present |
2171 | @@ -178,15 +178,15 @@ |
2172 | >>> n = BinaryPackageName.selectOneBy(name="bdftopcf") |
2173 | >>> ekg = BinaryPackageRelease.selectOneBy(binarypackagenameID=n.id, |
2174 | ... version="0.99.0-1") |
2175 | - >>> print ekg.section.name |
2176 | + >>> print(ekg.section.name) |
2177 | x11 |
2178 | - >>> print ekg.component.name |
2179 | + >>> print(ekg.component.name) |
2180 | universe |
2181 | - >>> print ekg.build.source_package_release.sourcepackagename.name |
2182 | + >>> print(ekg.build.source_package_release.sourcepackagename.name) |
2183 | bdftopcf |
2184 | - >>> print ekg.build.source_package_release.component.name |
2185 | + >>> print(ekg.build.source_package_release.component.name) |
2186 | main |
2187 | - >>> print ekg.build.source_package_release.version |
2188 | + >>> print(ekg.build.source_package_release.version) |
2189 | 0.99.0-1 |
2190 | |
2191 | Check that we publishing bdftopcf into the correct distroarchseries: |
2192 | @@ -195,12 +195,12 @@ |
2193 | >>> dar = DistroArchSeries.selectOneBy(distroseriesID=dapper.id, |
2194 | ... processor_id=processor.id, architecturetag="i386", |
2195 | ... official=True, ownerID=celebs.launchpad_developers.id) |
2196 | - >>> print dar.architecturetag |
2197 | + >>> print(dar.architecturetag) |
2198 | i386 |
2199 | >>> for entry in SBPPH.selectBy(distroarchseriesID=dar.id, |
2200 | ... orderBy="binarypackagerelease"): |
2201 | ... package = entry.binarypackagerelease |
2202 | - ... print package.binarypackagename.name, package.version |
2203 | + ... print(package.binarypackagename.name, package.version) |
2204 | bdftopcf 0.99.0-1 |
2205 | ekg 1:1.5-4ubuntu1.2 |
2206 | libgadu-dev 1:1.5-4ubuntu1.2 |
2207 | |
2208 | === modified file 'lib/lp/soyuz/doc/gina.txt' |
2209 | --- lib/lp/soyuz/doc/gina.txt 2017-01-13 12:24:45 +0000 |
2210 | +++ lib/lp/soyuz/doc/gina.txt 2018-05-27 20:15:09 +0000 |
2211 | @@ -129,7 +129,7 @@ |
2212 | |
2213 | Check STDERR for the errors we expected: |
2214 | |
2215 | - >>> print proc.stderr.read() |
2216 | + >>> print(proc.stderr.read()) |
2217 | ERROR Error processing package files for clearlooks |
2218 | ... |
2219 | ExecutionError: Error 2 unpacking source |
2220 | @@ -210,12 +210,12 @@ |
2221 | >>> x11p = SourcePackageRelease.selectOneBy(sourcepackagenameID=n.id, |
2222 | ... version="6.8.99.7-2") |
2223 | |
2224 | - >>> print x11p.builddependsindep |
2225 | + >>> print(x11p.builddependsindep) |
2226 | debhelper (>= 4.0.0) |
2227 | |
2228 | Check if the changelog message was stored correcly: |
2229 | |
2230 | - >>> print x11p.changelog_entry #doctest: -NORMALIZE_WHITESPACE |
2231 | + >>> print(x11p.changelog_entry) #doctest: -NORMALIZE_WHITESPACE |
2232 | x11proto-damage (6.8.99.7-2) breezy; urgency=low |
2233 | <BLANKLINE> |
2234 | * Add dependency on x11proto-fixes-dev. |
2235 | @@ -228,7 +228,7 @@ |
2236 | |
2237 | Check that the changelog was uploaded to the librarian correctly: |
2238 | |
2239 | - >>> print x11p.changelog.read() |
2240 | + >>> print(x11p.changelog.read()) |
2241 | x11proto-damage (6.8.99.7-2) breezy; urgency=low |
2242 | <BLANKLINE> |
2243 | * Add dependency on x11proto-fixes-dev. |
2244 | @@ -243,7 +243,7 @@ |
2245 | |
2246 | Same for the copyright: |
2247 | |
2248 | - >>> print x11p.copyright |
2249 | + >>> print(x11p.copyright) |
2250 | $Id: COPYING,v 1.2 2003/11/05 05:39:58 keithp Exp $ |
2251 | <BLANKLINE> |
2252 | Copyright ... 2003 Keith Packard |
2253 | @@ -255,7 +255,7 @@ |
2254 | |
2255 | >>> n = SourcePackageName.selectOneBy(name="libcap") |
2256 | >>> cap = SourcePackageRelease.selectOneBy(sourcepackagenameID=n.id) |
2257 | - >>> print cap.dsc |
2258 | + >>> print(cap.dsc) |
2259 | -----BEGIN PGP SIGNED MESSAGE----- |
2260 | Hash: SHA1 |
2261 | <BLANKLINE> |
2262 | @@ -278,9 +278,9 @@ |
2263 | FVJMGmGr+2YLZfF+oRUKcug= |
2264 | =bw+A |
2265 | -----END PGP SIGNATURE----- |
2266 | - >>> print cap.maintainer.displayname |
2267 | + >>> print(cap.maintainer.displayname) |
2268 | Michael Vogt |
2269 | - >>> print cap.dsc_binaries |
2270 | + >>> print(cap.dsc_binaries) |
2271 | libcap-dev, libcap-bin, libcap1 |
2272 | |
2273 | Test ubuntu-meta in breezy, which was forcefully imported. |
2274 | @@ -288,8 +288,8 @@ |
2275 | >>> n = SourcePackageName.selectOneBy(name="ubuntu-meta") |
2276 | >>> um = SourcePackageRelease.selectOneBy(sourcepackagenameID=n.id, |
2277 | ... version="0.80") |
2278 | - >>> print um.section.name, um.architecturehintlist, \ |
2279 | - ... um.upload_distroseries.name |
2280 | + >>> print(um.section.name, um.architecturehintlist, |
2281 | + ... um.upload_distroseries.name) |
2282 | base any breezy |
2283 | |
2284 | And check that its files actually ended up in the librarian (these sha1sums |
2285 | @@ -299,7 +299,7 @@ |
2286 | >>> files = SourcePackageReleaseFile.selectBy( |
2287 | ... sourcepackagereleaseID=cap.id, orderBy="libraryfile") |
2288 | >>> for f in files: |
2289 | - ... print f.libraryfile.content.sha1 |
2290 | + ... print(f.libraryfile.content.sha1) |
2291 | 107d5478e72385f714523bad5359efedb5dcc8b2 |
2292 | 0083da007d44c02fd861c1d21579f716490cab02 |
2293 | e6661aec051ccb201061839d275f2282968d8b93 |
2294 | @@ -309,20 +309,20 @@ |
2295 | |
2296 | >>> n = SourcePackageName.selectOneBy(name="python-pam") |
2297 | >>> pp = SourcePackageRelease.selectOneBy(sourcepackagenameID=n.id) |
2298 | - >>> print pp.component.name |
2299 | + >>> print(pp.component.name) |
2300 | main |
2301 | |
2302 | In the hoary Sources, its section is listed as underworld/python. Ensure |
2303 | this is cut up correctly: |
2304 | |
2305 | - >>> print pp.section.name |
2306 | + >>> print(pp.section.name) |
2307 | python |
2308 | |
2309 | Make sure that we only imported one db1-compat source package. |
2310 | |
2311 | >>> n = SourcePackageName.selectOneBy(name="db1-compat") |
2312 | >>> db1 = SourcePackageRelease.selectOneBy(sourcepackagenameID=n.id) |
2313 | - >>> print db1.section.name |
2314 | + >>> print(db1.section.name) |
2315 | libs |
2316 | |
2317 | |
2318 | @@ -333,9 +333,9 @@ |
2319 | |
2320 | >>> for pub in SSPPH.selectBy( |
2321 | ... sourcepackagereleaseID=db1.id, orderBy='distroseries'): |
2322 | - ... print "%s %s %s" % ( |
2323 | + ... print("%s %s %s" % ( |
2324 | ... pub.distroseries.name, pub.section.name, |
2325 | - ... pub.archive.purpose.name) |
2326 | + ... pub.archive.purpose.name)) |
2327 | hoary libs PRIMARY |
2328 | breezy oldlibs PRIMARY |
2329 | |
2330 | @@ -348,13 +348,13 @@ |
2331 | - We had 2 errors (out of 10 Sources stanzas) in breezy: python-sqllite |
2332 | and util-linux (again, poor thing). |
2333 | |
2334 | - >>> print SSPPH.select().count() - orig_sspph_count |
2335 | + >>> print(SSPPH.select().count() - orig_sspph_count) |
2336 | 21 |
2337 | |
2338 | >>> new_count = SSPPH.selectBy( |
2339 | ... componentID=1, |
2340 | ... pocket=PackagePublishingPocket.RELEASE).count() |
2341 | - >>> print new_count - orig_sspph_main_count |
2342 | + >>> print(new_count - orig_sspph_main_count) |
2343 | 21 |
2344 | |
2345 | |
2346 | @@ -382,11 +382,11 @@ |
2347 | >>> from lp.soyuz.model.binarypackagename import BinaryPackageName |
2348 | >>> n = BinaryPackageName.selectOneBy(name="rioutil") |
2349 | >>> rio = BinaryPackageRelease.selectOneBy(binarypackagenameID=n.id) |
2350 | - >>> print rio.shlibdeps |
2351 | + >>> print(rio.shlibdeps) |
2352 | librioutil 1 rioutil |
2353 | - >>> print rio.version |
2354 | + >>> print(rio.version) |
2355 | 1.4.4-1.0.1 |
2356 | - >>> print rio.build.source_package_release.version |
2357 | + >>> print(rio.build.source_package_release.version) |
2358 | 1.4.4-1 |
2359 | |
2360 | Test all the data got to the ed BPR intact, and that the missing |
2361 | @@ -394,28 +394,28 @@ |
2362 | |
2363 | >>> n = BinaryPackageName.selectOneBy(name="ed") |
2364 | >>> ed = BinaryPackageRelease.selectOneBy(binarypackagenameID=n.id) |
2365 | - >>> print ed.version |
2366 | + >>> print(ed.version) |
2367 | 0.2-20 |
2368 | - >>> print ed.build.processor.name |
2369 | + >>> print(ed.build.processor.name) |
2370 | 386 |
2371 | - >>> print ed.build.status |
2372 | + >>> print(ed.build.status) |
2373 | Successfully built |
2374 | - >>> print ed.build.distro_arch_series.processor.name |
2375 | + >>> print(ed.build.distro_arch_series.processor.name) |
2376 | 386 |
2377 | - >>> print ed.build.distro_arch_series.architecturetag |
2378 | + >>> print(ed.build.distro_arch_series.architecturetag) |
2379 | i386 |
2380 | - >>> print ed.priority |
2381 | + >>> print(ed.priority) |
2382 | Extra |
2383 | - >>> print ed.section.name |
2384 | + >>> print(ed.section.name) |
2385 | editors |
2386 | - >>> print ed.summary |
2387 | + >>> print(ed.summary) |
2388 | The classic unix line editor. |
2389 | |
2390 | We now check if the Breezy publication record has the correct priority: |
2391 | |
2392 | >>> ed_pub = SBPPH.selectOneBy(binarypackagereleaseID=ed.id, |
2393 | ... distroarchseriesID=breezy_i386.id) |
2394 | - >>> print ed_pub.priority |
2395 | + >>> print(ed_pub.priority) |
2396 | Standard |
2397 | |
2398 | Check binary package libgjc-dev in Breezy. Its version number must differ from |
2399 | @@ -424,11 +424,11 @@ |
2400 | >>> n = BinaryPackageName.selectOneBy(name="libgcj-dev") |
2401 | >>> lib = BinaryPackageRelease.selectOneBy(binarypackagenameID=n.id, |
2402 | ... version="4:4.0.1-3") |
2403 | - >>> print lib.version |
2404 | + >>> print(lib.version) |
2405 | 4:4.0.1-3 |
2406 | - >>> print lib.build.source_package_release.version |
2407 | + >>> print(lib.build.source_package_release.version) |
2408 | 1.28 |
2409 | - >>> print lib.build.source_package_release.maintainer.displayname |
2410 | + >>> print(lib.build.source_package_release.maintainer.displayname) |
2411 | Debian GCC maintainers |
2412 | |
2413 | Check if the udeb was properly parsed and identified: |
2414 | @@ -436,17 +436,17 @@ |
2415 | >>> n = BinaryPackageName.selectOneBy(name="archive-copier") |
2416 | >>> ac = BinaryPackageRelease.selectOneBy(binarypackagenameID=n.id, |
2417 | ... version="0.1.5") |
2418 | - >>> print ac.version |
2419 | + >>> print(ac.version) |
2420 | 0.1.5 |
2421 | - >>> print ac.priority |
2422 | + >>> print(ac.priority) |
2423 | Standard |
2424 | - >>> print ac.section.name |
2425 | + >>> print(ac.section.name) |
2426 | debian-installer |
2427 | - >>> print ac.build.source_package_release.version |
2428 | + >>> print(ac.build.source_package_release.version) |
2429 | 0.1.5 |
2430 | - >>> print ac.build.source_package_release.maintainer.name |
2431 | + >>> print(ac.build.source_package_release.maintainer.name) |
2432 | cjwatson |
2433 | - >>> print ac.build.processor.name |
2434 | + >>> print(ac.build.processor.name) |
2435 | 386 |
2436 | |
2437 | We check that the binary package publishing override facility works: |
2438 | @@ -456,9 +456,9 @@ |
2439 | ... version="2.1.3-7") |
2440 | >>> for pub in BinaryPackagePublishingHistory.selectBy( |
2441 | ... binarypackagereleaseID=db1.id, orderBy='distroarchseries'): |
2442 | - ... print "%s %s %s" % ( |
2443 | + ... print("%s %s %s" % ( |
2444 | ... pub.distroarchseries.distroseries.name, pub.priority, |
2445 | - ... pub.archive.purpose.name) |
2446 | + ... pub.archive.purpose.name)) |
2447 | hoary Required PRIMARY |
2448 | breezy Optional PRIMARY |
2449 | |
2450 | @@ -475,13 +475,13 @@ |
2451 | |
2452 | >>> from sqlobject import LIKE |
2453 | >>> p = Person.selectOne(LIKE(Person.q.name, u"cjwatson%")) |
2454 | - >>> print p.name |
2455 | + >>> print(p.name) |
2456 | cjwatson |
2457 | - >>> print Person.select().count() - orig_person_count |
2458 | - 13 |
2459 | - >>> print TeamParticipation.select().count() - orig_tp_count |
2460 | - 13 |
2461 | - >>> print EmailAddress.select().count() - orig_email_count |
2462 | + >>> print(Person.select().count() - orig_person_count) |
2463 | + 13 |
2464 | + >>> print(TeamParticipation.select().count() - orig_tp_count) |
2465 | + 13 |
2466 | + >>> print(EmailAddress.select().count() - orig_email_count) |
2467 | 13 |
2468 | |
2469 | |
2470 | @@ -514,7 +514,7 @@ |
2471 | >>> gina_proc = [sys.executable, 'scripts/gina.py', '-q', |
2472 | ... 'hoary', 'breezy'] |
2473 | >>> proc = subprocess.Popen(gina_proc, stderr=subprocess.PIPE) |
2474 | - >>> print proc.stderr.read() |
2475 | + >>> print(proc.stderr.read()) |
2476 | ERROR Error processing package files for clearlooks |
2477 | ... |
2478 | ExecutionError: Error 2 unpacking source |
2479 | @@ -566,11 +566,11 @@ |
2480 | |
2481 | >>> SourcePackageRelease.select().count() - orig_spr_count |
2482 | 17 |
2483 | - >>> print Person.select().count() - orig_person_count |
2484 | - 13 |
2485 | - >>> print TeamParticipation.select().count() - orig_tp_count |
2486 | - 13 |
2487 | - >>> print EmailAddress.select().count() - orig_email_count |
2488 | + >>> print(Person.select().count() - orig_person_count) |
2489 | + 13 |
2490 | + >>> print(TeamParticipation.select().count() - orig_tp_count) |
2491 | + 13 |
2492 | + >>> print(EmailAddress.select().count() - orig_email_count) |
2493 | 13 |
2494 | >>> BinaryPackageRelease.select().count() - orig_bpr_count |
2495 | 40 |
2496 | @@ -581,7 +581,7 @@ |
2497 | |
2498 | >>> SBPPH.select().count() - orig_sbpph_count |
2499 | 47 |
2500 | - >>> print SSPPH.select().count() - orig_sspph_count |
2501 | + >>> print(SSPPH.select().count() - orig_sspph_count) |
2502 | 23 |
2503 | |
2504 | Check that the overrides we did were correctly issued. We can't use |
2505 | @@ -596,7 +596,7 @@ |
2506 | ... """ % sqlvalues( |
2507 | ... x11p, breezy, active_publishing_status), |
2508 | ... orderBy=["-datecreated"])[0] |
2509 | - >>> print x11_pub.section.name |
2510 | + >>> print(x11_pub.section.name) |
2511 | net |
2512 | >>> ed_pub = SBPPH.select(""" |
2513 | ... binarypackagerelease = %s AND |
2514 | @@ -605,7 +605,7 @@ |
2515 | ... """ % sqlvalues( |
2516 | ... ed, breezy_i386, active_publishing_status), |
2517 | ... orderBy=["-datecreated"])[0] |
2518 | - >>> print ed_pub.priority |
2519 | + >>> print(ed_pub.priority) |
2520 | Extra |
2521 | >>> n = SourcePackageName.selectOneBy(name="archive-copier") |
2522 | >>> ac = SourcePackageRelease.selectOneBy(sourcepackagenameID=n.id, |
2523 | @@ -617,7 +617,7 @@ |
2524 | ... """ % sqlvalues( |
2525 | ... ac, breezy, active_publishing_status), |
2526 | ... orderBy=["-datecreated"])[0] |
2527 | - >>> print ac_pub.component.name |
2528 | + >>> print(ac_pub.component.name) |
2529 | universe |
2530 | |
2531 | |
2532 | @@ -666,16 +666,16 @@ |
2533 | All the publishings will also have the 'partner' component and the |
2534 | partner archive: |
2535 | |
2536 | - >>> print set(sspph.component.name for sspph in source_difference) |
2537 | - set([u'partner']) |
2538 | - |
2539 | - >>> print set(sbpph.component.name for sbpph in binary_difference) |
2540 | - set([u'partner']) |
2541 | - |
2542 | - >>> print set(sspph.archive.purpose.name for sspph in source_difference) |
2543 | + >>> print(set(sspph.component.name for sspph in source_difference)) |
2544 | + set([u'partner']) |
2545 | + |
2546 | + >>> print(set(sbpph.component.name for sbpph in binary_difference)) |
2547 | + set([u'partner']) |
2548 | + |
2549 | + >>> print(set(sspph.archive.purpose.name for sspph in source_difference)) |
2550 | set(['PARTNER']) |
2551 | |
2552 | - >>> print set(sbpph.archive.purpose.name for sbpph in binary_difference) |
2553 | + >>> print(set(sbpph.archive.purpose.name for sbpph in binary_difference)) |
2554 | set(['PARTNER']) |
2555 | |
2556 | |
2557 | @@ -752,7 +752,7 @@ |
2558 | >>> lenny_sources.count() |
2559 | 12 |
2560 | |
2561 | - >>> print set([pub.status.name for pub in lenny_sources]) |
2562 | + >>> print(set([pub.status.name for pub in lenny_sources])) |
2563 | set(['PUBLISHED']) |
2564 | |
2565 | As mentioned before, lenny/i386 is empty, no binaries were imported. |
2566 | @@ -778,7 +778,7 @@ |
2567 | ... sys.executable, 'scripts/gina.py', 'lenny', 'partner'] |
2568 | >>> proc = subprocess.Popen(gina_proc, stderr=subprocess.PIPE) |
2569 | |
2570 | - >>> print proc.stderr.read() |
2571 | + >>> print(proc.stderr.read()) |
2572 | INFO Creating lockfile: /var/lock/launchpad-gina.lock |
2573 | ... |
2574 | INFO === Processing debian/lenny/release === |
2575 | @@ -797,7 +797,7 @@ |
2576 | |
2577 | >>> gina_proc = [sys.executable, 'scripts/gina.py', '-q', 'bogus'] |
2578 | >>> proc = subprocess.Popen(gina_proc, stderr=subprocess.PIPE) |
2579 | - >>> print proc.stderr.read() |
2580 | + >>> print(proc.stderr.read()) |
2581 | ERROR Failed to analyze archive for bogoland |
2582 | ... |
2583 | MangledArchiveError: No archive directory for bogoland/main |
2584 | |
2585 | === modified file 'lib/lp/soyuz/doc/hasbuildrecords.txt' |
2586 | --- lib/lp/soyuz/doc/hasbuildrecords.txt 2012-09-27 02:53:00 +0000 |
2587 | +++ lib/lp/soyuz/doc/hasbuildrecords.txt 2018-05-27 20:15:09 +0000 |
2588 | @@ -43,7 +43,7 @@ |
2589 | >>> builds = ubuntu['warty'].getBuildRecords(name=u'firefox', |
2590 | ... arch_tag='i386') |
2591 | >>> for build in builds: |
2592 | - ... print build.title |
2593 | + ... print(build.title) |
2594 | i386 build of mozilla-firefox 0.9 in ubuntu warty RELEASE |
2595 | |
2596 | |
2597 | |
2598 | === modified file 'lib/lp/soyuz/doc/package-cache-script.txt' |
2599 | --- lib/lp/soyuz/doc/package-cache-script.txt 2016-06-02 11:37:23 +0000 |
2600 | +++ lib/lp/soyuz/doc/package-cache-script.txt 2018-05-27 20:15:09 +0000 |
2601 | @@ -43,9 +43,9 @@ |
2602 | >>> process.returncode |
2603 | 0 |
2604 | |
2605 | - >>> print stdout |
2606 | + >>> print(stdout) |
2607 | |
2608 | - >>> print stderr |
2609 | + >>> print(stderr) |
2610 | INFO Creating lockfile: /var/lock/launchpad-update-cache.lock |
2611 | INFO Updating ubuntu package counters |
2612 | INFO Updating ubuntu main archives |
2613 | |
2614 | === modified file 'lib/lp/soyuz/doc/package-cache.txt' |
2615 | --- lib/lp/soyuz/doc/package-cache.txt 2016-05-26 16:25:52 +0000 |
2616 | +++ lib/lp/soyuz/doc/package-cache.txt 2018-05-27 20:15:09 +0000 |
2617 | @@ -33,7 +33,7 @@ |
2618 | 10 |
2619 | |
2620 | >>> for name in sorted([cache.name for cache in ubuntu_caches]): |
2621 | - ... print name |
2622 | + ... print(name) |
2623 | alsa-utils |
2624 | cnews |
2625 | commercialpackage |
2626 | @@ -67,7 +67,7 @@ |
2627 | 5 |
2628 | |
2629 | >>> for name in sorted([cache.name for cache in warty_caches]): |
2630 | - ... print name |
2631 | + ... print(name) |
2632 | at |
2633 | foobar |
2634 | linux-2.6.12 |
2635 | @@ -168,7 +168,7 @@ |
2636 | DEBUG Considering sources linux-source-2.6.15, mozilla-firefox, netapplet |
2637 | ... |
2638 | |
2639 | - >>> print updates |
2640 | + >>> print(updates) |
2641 | 10 |
2642 | |
2643 | Now we see that the 'cdrkit' source is part of the caches and can be |
2644 | @@ -250,7 +250,7 @@ |
2645 | DEBUG Considering binaries mozilla-firefox, mozilla-firefox-data, pmount |
2646 | DEBUG Committing |
2647 | |
2648 | - >>> print updates |
2649 | + >>> print(updates) |
2650 | 6 |
2651 | |
2652 | Transaction behaves exactly the same as for Source Caches, except that |
2653 | @@ -285,15 +285,15 @@ |
2654 | With empty cache contents in Archive table we can't even find a PPA by |
2655 | owner name. |
2656 | |
2657 | - >>> print ubuntu.searchPPAs(text=u'cprov').count() |
2658 | + >>> print(ubuntu.searchPPAs(text=u'cprov').count()) |
2659 | 0 |
2660 | |
2661 | Sampledata contains stub counters. |
2662 | |
2663 | - >>> print cprov.archive.sources_cached |
2664 | + >>> print(cprov.archive.sources_cached) |
2665 | 3 |
2666 | |
2667 | - >>> print cprov.archive.binaries_cached |
2668 | + >>> print(cprov.archive.binaries_cached) |
2669 | 3 |
2670 | |
2671 | We have to issue 'updateArchiveCache' to include the owner 'name' and |
2672 | @@ -304,13 +304,13 @@ |
2673 | Now Celso's PPA can be found via searches and the package counters got |
2674 | reset, reflecting that nothing is cached in the database yet. |
2675 | |
2676 | - >>> print ubuntu.searchPPAs(text=u'cprov')[0].displayname |
2677 | + >>> print(ubuntu.searchPPAs(text=u'cprov')[0].displayname) |
2678 | PPA for Celso Providelo |
2679 | |
2680 | - >>> print cprov.archive.sources_cached |
2681 | + >>> print(cprov.archive.sources_cached) |
2682 | 0 |
2683 | |
2684 | - >>> print cprov.archive.binaries_cached |
2685 | + >>> print(cprov.archive.binaries_cached) |
2686 | 0 |
2687 | |
2688 | The sampledata contains no package caches, so attempts to find 'pmount' |
2689 | @@ -348,28 +348,28 @@ |
2690 | >>> cprov.archive.sources_cached == source_updates |
2691 | True |
2692 | |
2693 | - >>> print cprov.archive.sources_cached |
2694 | + >>> print(cprov.archive.sources_cached) |
2695 | 3 |
2696 | |
2697 | >>> cprov.archive.binaries_cached == binary_updates |
2698 | True |
2699 | |
2700 | - >>> print cprov.archive.binaries_cached |
2701 | + >>> print(cprov.archive.binaries_cached) |
2702 | 2 |
2703 | |
2704 | - >>> print ubuntu.searchPPAs(text=u'cprov')[0].displayname |
2705 | - PPA for Celso Providelo |
2706 | - |
2707 | - >>> print ubuntu.searchPPAs(text=u'pmount')[0].displayname |
2708 | - PPA for Celso Providelo |
2709 | - |
2710 | - >>> print ubuntu.searchPPAs(text=u'firefox')[0].displayname |
2711 | - PPA for Celso Providelo |
2712 | - |
2713 | - >>> print ubuntu.searchPPAs(text=u'warty')[0].displayname |
2714 | - PPA for Celso Providelo |
2715 | - |
2716 | - >>> print ubuntu.searchPPAs(text=u'shortdesc')[0].displayname |
2717 | + >>> print(ubuntu.searchPPAs(text=u'cprov')[0].displayname) |
2718 | + PPA for Celso Providelo |
2719 | + |
2720 | + >>> print(ubuntu.searchPPAs(text=u'pmount')[0].displayname) |
2721 | + PPA for Celso Providelo |
2722 | + |
2723 | + >>> print(ubuntu.searchPPAs(text=u'firefox')[0].displayname) |
2724 | + PPA for Celso Providelo |
2725 | + |
2726 | + >>> print(ubuntu.searchPPAs(text=u'warty')[0].displayname) |
2727 | + PPA for Celso Providelo |
2728 | + |
2729 | + >>> print(ubuntu.searchPPAs(text=u'shortdesc')[0].displayname) |
2730 | PPA for Celso Providelo |
2731 | |
2732 | The method which populates the archive caches also cleans the texts up |
2733 | @@ -401,13 +401,13 @@ |
2734 | |
2735 | >>> cprov.archive.updateArchiveCache() |
2736 | |
2737 | - >>> print cprov.archive.sources_cached |
2738 | - 0 |
2739 | - |
2740 | - >>> print cprov.archive.binaries_cached |
2741 | - 0 |
2742 | - |
2743 | - >>> print cprov.archive.package_description_cache |
2744 | + >>> print(cprov.archive.sources_cached) |
2745 | + 0 |
2746 | + |
2747 | + >>> print(cprov.archive.binaries_cached) |
2748 | + 0 |
2749 | + |
2750 | + >>> print(cprov.archive.package_description_cache) |
2751 | celso providelo cprov |
2752 | |
2753 | We insert a new source cache with texts containing punctuation and |
2754 | @@ -435,13 +435,13 @@ |
2755 | Only one source cached and the 'package_description_cache' only contains |
2756 | unique and lowercase words free of any punctuation. |
2757 | |
2758 | - >>> print cprov.archive.sources_cached |
2759 | + >>> print(cprov.archive.sources_cached) |
2760 | 1 |
2761 | |
2762 | - >>> print cprov.archive.binaries_cached |
2763 | + >>> print(cprov.archive.binaries_cached) |
2764 | 0 |
2765 | |
2766 | - >>> print cprov.archive.package_description_cache |
2767 | + >>> print(cprov.archive.package_description_cache) |
2768 | ding providelo celso cdrkit cdrkit-bin dong ubuntu cdrkit-extra cprov |
2769 | |
2770 | Let's remove the unclean cache and update Celso's PPA cache, so |
2771 | @@ -484,7 +484,7 @@ |
2772 | ... commit_chunk=3) |
2773 | DEBUG Considering sources unique-from-factory-... |
2774 | ... |
2775 | - >>> print updates |
2776 | + >>> print(updates) |
2777 | 1 |
2778 | >>> ubuntu.searchSourcePackages(branch.sourcepackagename.name).count() |
2779 | 1 |
2780 | @@ -552,8 +552,8 @@ |
2781 | ... binaryname="pending-binary", pub_source=pending_source, |
2782 | ... status=PackagePublishingStatus.PENDING) |
2783 | |
2784 | - >>> print len( |
2785 | - ... set(pub.binarypackagerelease.name for pub in pending_binaries)) |
2786 | + >>> print(len( |
2787 | + ... set(pub.binarypackagerelease.name for pub in pending_binaries))) |
2788 | 1 |
2789 | |
2790 | And one source with a single binary in PUBLISHED status. |
2791 | @@ -566,8 +566,8 @@ |
2792 | ... binaryname="published-binary", pub_source=published_source, |
2793 | ... status=PackagePublishingStatus.PUBLISHED) |
2794 | |
2795 | - >>> print len( |
2796 | - ... set(pub.binarypackagerelease.name for pub in published_binaries)) |
2797 | + >>> print(len(set( |
2798 | + ... pub.binarypackagerelease.name for pub in published_binaries))) |
2799 | 1 |
2800 | |
2801 | >>> switch_dbuser(test_dbuser) |
2802 | @@ -593,8 +593,8 @@ |
2803 | ... binaryname="pending-binary", pub_source=deleted_source, |
2804 | ... status=PackagePublishingStatus.DELETED) |
2805 | |
2806 | - >>> print len( |
2807 | - ... set(pub.binarypackagerelease.name for pub in deleted_binaries)) |
2808 | + >>> print(len( |
2809 | + ... set(pub.binarypackagerelease.name for pub in deleted_binaries))) |
2810 | 1 |
2811 | |
2812 | >>> switch_dbuser(test_dbuser) |
2813 | @@ -649,10 +649,10 @@ |
2814 | >>> foobar_binary.cache == primary_cache |
2815 | True |
2816 | |
2817 | - >>> print foobar_binary.summary |
2818 | + >>> print(foobar_binary.summary) |
2819 | main foobar |
2820 | |
2821 | - >>> print foobar_binary.description |
2822 | + >>> print(foobar_binary.description) |
2823 | main foobar description |
2824 | |
2825 | The DistroArchSeriesBinaryPackage. |
2826 | @@ -663,10 +663,10 @@ |
2827 | >>> foobar_arch_binary.cache == primary_cache |
2828 | True |
2829 | |
2830 | - >>> print foobar_arch_binary.summary |
2831 | + >>> print(foobar_arch_binary.summary) |
2832 | main foobar |
2833 | |
2834 | - >>> print foobar_arch_binary.description |
2835 | + >>> print(foobar_arch_binary.description) |
2836 | main foobar description |
2837 | |
2838 | This lookup mechanism will continue to work even after we have added a |
2839 | @@ -737,14 +737,14 @@ |
2840 | ... archive=archive) |
2841 | ... binary_caches = DistroSeriesPackageCache.selectBy( |
2842 | ... archive=archive) |
2843 | - ... print '%d sources cached [%d]' % ( |
2844 | - ... archive.sources_cached, source_caches.count()) |
2845 | - ... print '%d binaries cached [%d]' % ( |
2846 | - ... archive.binaries_cached, binary_caches.count()) |
2847 | + ... print('%d sources cached [%d]' % ( |
2848 | + ... archive.sources_cached, source_caches.count())) |
2849 | + ... print('%d binaries cached [%d]' % ( |
2850 | + ... archive.binaries_cached, binary_caches.count())) |
2851 | >>> def print_search_results(text, user=None): |
2852 | ... with lp_dbuser(): |
2853 | ... for ppa in ubuntu.searchPPAs(text, user=user): |
2854 | - ... print ppa.displayname |
2855 | + ... print(ppa.displayname) |
2856 | |
2857 | >>> rebuild_caches(cprov.archive) |
2858 | |
2859 | |
2860 | === modified file 'lib/lp/soyuz/doc/package-diff.txt' |
2861 | --- lib/lp/soyuz/doc/package-diff.txt 2017-01-13 13:33:17 +0000 |
2862 | +++ lib/lp/soyuz/doc/package-diff.txt 2018-05-27 20:15:09 +0000 |
2863 | @@ -50,7 +50,7 @@ |
2864 | >>> verifyObject(IPerson, package_diff.requester) |
2865 | True |
2866 | |
2867 | - >>> print package_diff.requester.displayname |
2868 | + >>> print(package_diff.requester.displayname) |
2869 | Celso Providelo |
2870 | |
2871 | * 'from_source', which maps to a `ISourcePackageRelease`, the base |
2872 | @@ -61,7 +61,7 @@ |
2873 | >>> verifyObject(ISourcePackageRelease, package_diff.from_source) |
2874 | True |
2875 | |
2876 | - >>> print package_diff.from_source.title |
2877 | + >>> print(package_diff.from_source.title) |
2878 | pmount - 0.1-1 |
2879 | |
2880 | * 'to_source', which maps to a `ISourcePackageRelease`, the result |
2881 | @@ -70,31 +70,31 @@ |
2882 | >>> verifyObject(ISourcePackageRelease, package_diff.to_source) |
2883 | True |
2884 | |
2885 | - >>> print package_diff.to_source.title |
2886 | + >>> print(package_diff.to_source.title) |
2887 | pmount - 0.1-2 |
2888 | |
2889 | The PackageDiff record is not yet 'performed', so 'status' is PENDING |
2890 | and both 'date_fullfiled' and 'diff_content' fields are empty. |
2891 | |
2892 | - >>> print package_diff.date_fulfilled |
2893 | - None |
2894 | - |
2895 | - >>> print package_diff.diff_content |
2896 | - None |
2897 | - |
2898 | - >>> print package_diff.status.name |
2899 | + >>> print(package_diff.date_fulfilled) |
2900 | + None |
2901 | + |
2902 | + >>> print(package_diff.diff_content) |
2903 | + None |
2904 | + |
2905 | + >>> print(package_diff.status.name) |
2906 | PENDING |
2907 | |
2908 | IPackageDiff offers a property that return the 'title' of the diff |
2909 | request. |
2910 | |
2911 | - >>> print package_diff.title |
2912 | + >>> print(package_diff.title) |
2913 | diff from 0.1-1 to 0.1-2 |
2914 | |
2915 | IPackageDiff has a property which indicates whether a diff content |
2916 | should be private or not. See section 'PackageDiff privacy' below. |
2917 | |
2918 | - >>> print package_diff.private |
2919 | + >>> print(package_diff.private) |
2920 | False |
2921 | |
2922 | An attempt to record an already recorded DiffRequest will result in an |
2923 | @@ -167,7 +167,7 @@ |
2924 | ... '1.0-8', policy='sync') |
2925 | |
2926 | >>> [diff] = biscuit_eight_pub.sourcepackagerelease.package_diffs |
2927 | - >>> print diff.title |
2928 | + >>> print(diff.title) |
2929 | diff from 1.0-1 to 1.0-8 |
2930 | |
2931 | We will release ubuntu/hoary so we can upload to post-RELEASE pockets |
2932 | @@ -185,7 +185,7 @@ |
2933 | ... '1.0-9', policy='sync', suite='hoary-updates') |
2934 | |
2935 | >>> [diff] = biscuit_nine_pub.sourcepackagerelease.package_diffs |
2936 | - >>> print diff.title |
2937 | + >>> print(diff.title) |
2938 | diff from 1.0-8 to 1.0-9 |
2939 | |
2940 | Now version 1.0-12 gets uploaded to the just opened distroseries. It |
2941 | @@ -199,7 +199,7 @@ |
2942 | ... '1.0-12', policy='sync', suite='breezy-autotest') |
2943 | |
2944 | >>> [diff] = biscuit_twelve_pub.sourcepackagerelease.package_diffs |
2945 | - >>> print diff.title |
2946 | + >>> print(diff.title) |
2947 | diff from 1.0-8 to 1.0-12 |
2948 | |
2949 | The subsequent version uploaded to hoary-updates will get a diff |
2950 | @@ -211,7 +211,7 @@ |
2951 | ... '1.0-10', policy='sync', suite='hoary-updates') |
2952 | |
2953 | >>> [diff] = biscuit_ten_pub.sourcepackagerelease.package_diffs |
2954 | - >>> print diff.title |
2955 | + >>> print(diff.title) |
2956 | diff from 1.0-9 to 1.0-10 |
2957 | |
2958 | An upload to other pocket, in this case hoary-proposed, will get a diff |
2959 | @@ -223,7 +223,7 @@ |
2960 | ... '1.0-11', policy='sync', suite='hoary-proposed') |
2961 | |
2962 | >>> [diff] = biscuit_eleven_pub.sourcepackagerelease.package_diffs |
2963 | - >>> print diff.title |
2964 | + >>> print(diff.title) |
2965 | diff from 1.0-8 to 1.0-11 |
2966 | |
2967 | For testing diffs in the PPA context we need to activate the PPA for |
2968 | @@ -245,7 +245,7 @@ |
2969 | ... '1.0-2', archive=foobar.archive) |
2970 | |
2971 | >>> [diff] = biscuit_two_pub.sourcepackagerelease.package_diffs |
2972 | - >>> print diff.title |
2973 | + >>> print(diff.title) |
2974 | diff from 1.0-8 (in Ubuntu) to 1.0-2 |
2975 | |
2976 | A subsequent upload in the PPA context will get a diff against 1.0-2, |
2977 | @@ -257,7 +257,7 @@ |
2978 | ... '1.0-3', archive=foobar.archive) |
2979 | |
2980 | >>> [diff] = biscuit_three_pub.sourcepackagerelease.package_diffs |
2981 | - >>> print diff.title |
2982 | + >>> print(diff.title) |
2983 | diff from 1.0-2 to 1.0-3 |
2984 | |
2985 | |
2986 | @@ -318,13 +318,13 @@ |
2987 | 'date_fullfiled' and 'diff_content' fields, are empty and 'status' is |
2988 | PENDING. |
2989 | |
2990 | - >>> print diff.status.name |
2991 | + >>> print(diff.status.name) |
2992 | PENDING |
2993 | |
2994 | - >>> print diff.date_fulfilled |
2995 | + >>> print(diff.date_fulfilled) |
2996 | None |
2997 | |
2998 | - >>> print diff.diff_content |
2999 | + >>> print(diff.diff_content) |
3000 | None |
3001 | |
3002 | Performing the diff. |
3003 | @@ -338,16 +338,16 @@ |
3004 | >>> diff.date_fulfilled is not None |
3005 | True |
3006 | |
3007 | - >>> print diff.status.name |
3008 | + >>> print(diff.status.name) |
3009 | COMPLETED |
3010 | |
3011 | - >>> print diff.diff_content.filename |
3012 | + >>> print(diff.diff_content.filename) |
3013 | biscuit_1.0-1_1.0-8.diff.gz |
3014 | |
3015 | - >>> print diff.diff_content.mimetype |
3016 | + >>> print(diff.diff_content.mimetype) |
3017 | application/gzipped-patch |
3018 | |
3019 | - >>> print diff.diff_content.restricted |
3020 | + >>> print(diff.diff_content.restricted) |
3021 | False |
3022 | |
3023 | Since it stores the diff results in the librarian we need to commit the |
3024 | @@ -358,7 +358,7 @@ |
3025 | Now we can compare the package diff outcome to the debdiff output |
3026 | (obtained manually on the shell) for the packages in question. |
3027 | |
3028 | - >>> print get_diff_results(diff) |
3029 | + >>> print(get_diff_results(diff)) |
3030 | --- biscuit-1.0/contents |
3031 | +++ biscuit-1.0/contents |
3032 | @@ -2,0 +3 @@ |
3033 | @@ -392,10 +392,10 @@ |
3034 | >>> resp = con.getresponse() |
3035 | >>> headers = dict(resp.getheaders()) |
3036 | |
3037 | - >>> print headers['content-encoding'] |
3038 | + >>> print(headers['content-encoding']) |
3039 | gzip |
3040 | |
3041 | - >>> print headers['content-type'] |
3042 | + >>> print(headers['content-type']) |
3043 | text/plain |
3044 | |
3045 | |
3046 | @@ -424,8 +424,8 @@ |
3047 | ... diff_first_id = diffs[0].id |
3048 | ... for diff in diff_set: |
3049 | ... id_diff = diff.id - diff_first_id |
3050 | - ... print diff.from_source.name, diff.title, \ |
3051 | - ... diff.date_fulfilled is not None, id_diff |
3052 | + ... print(diff.from_source.name, diff.title, |
3053 | + ... diff.date_fulfilled is not None, id_diff) |
3054 | |
3055 | >>> print_diffs(packagediff_set) |
3056 | biscuit diff from 1.0-2 to 1.0-3 False 0 |
3057 | @@ -492,7 +492,7 @@ |
3058 | >>> staging_ppa_pub = packager.uploadSourceVersion( |
3059 | ... '1.0-1', archive=foobar.archive) |
3060 | >>> [diff] = staging_ppa_pub.sourcepackagerelease.package_diffs |
3061 | - >>> print diff.title |
3062 | + >>> print(diff.title) |
3063 | diff from 1.0-1 (in Ubuntu) to 1.0-1 |
3064 | |
3065 | Commit the transaction for make the uploaded files available in |
3066 | @@ -509,17 +509,17 @@ |
3067 | The PackageDiff request was correctly performed and the result is a |
3068 | empty library file, which is what the user expects. |
3069 | |
3070 | - >>> print diff.status.name |
3071 | + >>> print(diff.status.name) |
3072 | COMPLETED |
3073 | |
3074 | >>> diff.date_fulfilled is not None |
3075 | True |
3076 | |
3077 | - >>> print diff.diff_content.filename |
3078 | + >>> print(diff.diff_content.filename) |
3079 | staging_1.0-1_1.0-1.diff.gz |
3080 | |
3081 | >>> get_diff_results(diff) |
3082 | - '' |
3083 | + u'' |
3084 | |
3085 | Now we will simulate a version collision when generating the diff. |
3086 | |
3087 | @@ -545,7 +545,7 @@ |
3088 | >>> collision_ppa_pub = packager.uploadSourceVersion( |
3089 | ... '1.0-1', archive=foobar.archive) |
3090 | >>> [diff] = collision_ppa_pub.sourcepackagerelease.package_diffs |
3091 | - >>> print diff.title |
3092 | + >>> print(diff.title) |
3093 | diff from 1.0-1 (in Ubuntu) to 1.0-1 |
3094 | |
3095 | Note that, despite of having the same name and version, the diff.gz |
3096 | @@ -563,7 +563,7 @@ |
3097 | |
3098 | >>> distinct_files = [filename for filename, md5 in file_set] |
3099 | >>> for filename in sorted(distinct_files): |
3100 | - ... print filename |
3101 | + ... print(filename) |
3102 | collision_1.0-1.diff.gz |
3103 | collision_1.0-1.diff.gz |
3104 | collision_1.0-1.dsc |
3105 | @@ -586,16 +586,16 @@ |
3106 | The package-diff subsystem has dealt with the filename conflicts and |
3107 | the diff was properly generated. |
3108 | |
3109 | - >>> print diff.status.name |
3110 | + >>> print(diff.status.name) |
3111 | COMPLETED |
3112 | |
3113 | >>> diff.date_fulfilled is not None |
3114 | True |
3115 | |
3116 | - >>> print diff.diff_content.filename |
3117 | + >>> print(diff.diff_content.filename) |
3118 | collision_1.0-1_1.0-1.diff.gz |
3119 | |
3120 | - >>> print get_diff_results(diff) |
3121 | + >>> print(get_diff_results(diff)) |
3122 | --- collision-1.0/contents |
3123 | +++ collision-1.0/contents |
3124 | @@ -2,0 +3 @@ |
3125 | @@ -633,7 +633,7 @@ |
3126 | >>> fillLibrarianFile(dsc.libraryfile.id) |
3127 | |
3128 | >>> [broken_diff] = pub.sourcepackagerelease.package_diffs |
3129 | - >>> print broken_diff.title |
3130 | + >>> print(broken_diff.title) |
3131 | diff from 1.0-1 (in Ubuntu) to 1.0-2 |
3132 | |
3133 | With a tainted DSC 'debdiff' cannot do much and fails, resulting in a |
3134 | @@ -642,13 +642,13 @@ |
3135 | >>> broken_diff.performDiff() |
3136 | >>> transaction.commit() |
3137 | |
3138 | - >>> print broken_diff.status.name |
3139 | + >>> print(broken_diff.status.name) |
3140 | FAILED |
3141 | |
3142 | >>> broken_diff.date_fulfilled is None |
3143 | True |
3144 | |
3145 | - >>> print broken_diff.diff_content |
3146 | + >>> print(broken_diff.diff_content) |
3147 | None |
3148 | |
3149 | |
3150 | @@ -664,27 +664,27 @@ |
3151 | explain how this mechanism works. |
3152 | |
3153 | >>> [diff] = biscuit_two_pub.sourcepackagerelease.package_diffs |
3154 | - >>> print diff.title |
3155 | + >>> print(diff.title) |
3156 | diff from 1.0-8 (in Ubuntu) to 1.0-2 |
3157 | |
3158 | The chosen diff is for a source uploaded to a public PPA. |
3159 | |
3160 | - >>> print diff.to_source.upload_archive.displayname |
3161 | + >>> print(diff.to_source.upload_archive.displayname) |
3162 | PPA for Foo Bar |
3163 | |
3164 | - >>> print diff.to_source.upload_archive.private |
3165 | + >>> print(diff.to_source.upload_archive.private) |
3166 | False |
3167 | |
3168 | Thus it's also considered public and the generated 'diff_content' is |
3169 | stored in the public librarian. |
3170 | |
3171 | - >>> print diff.private |
3172 | + >>> print(diff.private) |
3173 | False |
3174 | |
3175 | >>> diff.performDiff() |
3176 | >>> transaction.commit() |
3177 | |
3178 | - >>> print diff.diff_content.restricted |
3179 | + >>> print(diff.diff_content.restricted) |
3180 | False |
3181 | |
3182 | If the diff is attached to a private PPA, the diff becomes 'private' and |
3183 | @@ -695,11 +695,11 @@ |
3184 | >>> removeSecurityProxy(diff.to_source).upload_archive = private_ppa |
3185 | >>> removeSecurityProxy(biscuit_two_pub).archive = private_ppa |
3186 | |
3187 | - >>> print diff.private |
3188 | + >>> print(diff.private) |
3189 | True |
3190 | |
3191 | >>> diff.performDiff() |
3192 | >>> transaction.commit() |
3193 | |
3194 | - >>> print diff.diff_content.restricted |
3195 | + >>> print(diff.diff_content.restricted) |
3196 | True |
3197 | |
3198 | === modified file 'lib/lp/soyuz/doc/package-meta-classes.txt' |
3199 | --- lib/lp/soyuz/doc/package-meta-classes.txt 2014-11-10 00:53:02 +0000 |
3200 | +++ lib/lp/soyuz/doc/package-meta-classes.txt 2018-05-27 20:15:09 +0000 |
3201 | @@ -87,8 +87,8 @@ |
3202 | |
3203 | >>> def print_builds(builds): |
3204 | ... for build in builds: |
3205 | - ... print "%s in %s" % (build.source_package_release.name, |
3206 | - ... build.archive.displayname) |
3207 | + ... print("%s in %s" % (build.source_package_release.name, |
3208 | + ... build.archive.displayname)) |
3209 | |
3210 | Now we can query the builds: |
3211 | |
3212 | |
3213 | === modified file 'lib/lp/soyuz/doc/package-relationship-pages.txt' |
3214 | --- lib/lp/soyuz/doc/package-relationship-pages.txt 2012-01-06 11:08:30 +0000 |
3215 | +++ lib/lp/soyuz/doc/package-relationship-pages.txt 2018-05-27 20:15:09 +0000 |
3216 | @@ -28,8 +28,10 @@ |
3217 | Note that iterations over PackageRelationshipSet are sorted |
3218 | alphabetically according to the relationship 'name': |
3219 | |
3220 | - >>> [relationship.name for relationship in relationship_set] |
3221 | - ['foobar', 'test'] |
3222 | + >>> for relationship in relationship_set: |
3223 | + ... print(relationship.name) |
3224 | + foobar |
3225 | + test |
3226 | |
3227 | It will cause all the relationship contents to be rendered in this order. |
3228 | |
3229 | |
3230 | === modified file 'lib/lp/soyuz/doc/packagecopyrequest.txt' |
3231 | --- lib/lp/soyuz/doc/packagecopyrequest.txt 2012-12-26 01:32:19 +0000 |
3232 | +++ lib/lp/soyuz/doc/packagecopyrequest.txt 2018-05-27 20:15:09 +0000 |
3233 | @@ -57,7 +57,7 @@ |
3234 | components are not set by default. Also, the date started and completed are |
3235 | not set either since this is a new package copy request. |
3236 | |
3237 | - >>> print new_pcr |
3238 | + >>> print(new_pcr) |
3239 | Package copy request |
3240 | source = primary/breezy-autotest/-/RELEASE |
3241 | target = our-sample-copy-archive/breezy-autotest/-/RELEASE |
3242 | |
3243 | === modified file 'lib/lp/soyuz/doc/packageupload-lookups.txt' |
3244 | --- lib/lp/soyuz/doc/packageupload-lookups.txt 2014-07-14 16:47:15 +0000 |
3245 | +++ lib/lp/soyuz/doc/packageupload-lookups.txt 2018-05-27 20:15:09 +0000 |
3246 | @@ -47,10 +47,10 @@ |
3247 | ... changesfile = build.upload_changesfile |
3248 | ... if package_upload is None or changesfile is None: |
3249 | ... builds_missing_upload.append(builds) |
3250 | - ... print '* %s' % archive.displayname |
3251 | - ... print '%d of %d sources and %d of %d builds missing uploads' % ( |
3252 | + ... print('* %s' % archive.displayname) |
3253 | + ... print('%d of %d sources and %d of %d builds missing uploads' % ( |
3254 | ... len(sources_missing_upload), len(sources), |
3255 | - ... len(builds_missing_upload), len(builds)) |
3256 | + ... len(builds_missing_upload), len(builds))) |
3257 | |
3258 | As we can see from the results below, most of our sampledata are |
3259 | sources and builds directly imported into the system, not |
3260 | @@ -110,27 +110,27 @@ |
3261 | The `SourcePackageRelease` 'package_upload' and 'upload_changesfile' |
3262 | |
3263 | >>> original_source_upload = source.sourcepackagerelease.package_upload |
3264 | - >>> print original_source_upload |
3265 | + >>> print(original_source_upload) |
3266 | <PackageUpload ...> |
3267 | |
3268 | >>> source_changesfile = source.sourcepackagerelease.upload_changesfile |
3269 | >>> original_source_upload.changesfile == source_changesfile |
3270 | True |
3271 | |
3272 | - >>> print source_changesfile.filename |
3273 | + >>> print(source_changesfile.filename) |
3274 | testing_1.0_source.changes |
3275 | |
3276 | The `Build` 'package_upload' and 'upload_changesfile' |
3277 | |
3278 | >>> original_build_upload = build.package_upload |
3279 | - >>> print original_build_upload |
3280 | + >>> print(original_build_upload) |
3281 | <...PackageUpload ...> |
3282 | |
3283 | >>> build_changesfile = build.upload_changesfile |
3284 | >>> original_build_upload.changesfile == build_changesfile |
3285 | True |
3286 | |
3287 | - >>> print build_changesfile.filename |
3288 | + >>> print(build_changesfile.filename) |
3289 | testing-bin_1.0_i386.changes |
3290 | |
3291 | The `PackageUpload` lookups are not restricted to the status of the |
3292 | |
3293 | === modified file 'lib/lp/soyuz/doc/pocketchroot.txt' |
3294 | --- lib/lp/soyuz/doc/pocketchroot.txt 2015-10-06 06:48:01 +0000 |
3295 | +++ lib/lp/soyuz/doc/pocketchroot.txt 2018-05-27 20:15:09 +0000 |
3296 | @@ -25,30 +25,30 @@ |
3297 | Check if getPocketChroot returns None for unknown chroots: |
3298 | |
3299 | >>> p_chroot = hoary_i386.getPocketChroot() |
3300 | - >>> print p_chroot |
3301 | + >>> print(p_chroot) |
3302 | None |
3303 | |
3304 | Check if getChroot returns the 'default' argument on not found chroots: |
3305 | |
3306 | - >>> hoary_i386.getChroot(default='duuuuh') |
3307 | - 'duuuuh' |
3308 | + >>> print(hoary_i386.getChroot(default='duuuuh')) |
3309 | + duuuuh |
3310 | |
3311 | Invoke addOrUpdateChroot for missing chroot, so it will insert a new |
3312 | record in PocketChroot: |
3313 | |
3314 | >>> p_chroot1 = hoary_i386.addOrUpdateChroot(chroot=chroot1) |
3315 | - >>> print p_chroot1.distroarchseries.architecturetag |
3316 | + >>> print(p_chroot1.distroarchseries.architecturetag) |
3317 | i386 |
3318 | - >>> print p_chroot1.chroot.id, |
3319 | + >>> print(p_chroot1.chroot.id) |
3320 | 1 |
3321 | |
3322 | Invoke addOrUpdateChroot on an existing PocketChroot, it will update |
3323 | the chroot: |
3324 | |
3325 | >>> p_chroot2 = hoary_i386.addOrUpdateChroot(chroot=chroot2) |
3326 | - >>> print p_chroot2.distroarchseries.architecturetag |
3327 | + >>> print(p_chroot2.distroarchseries.architecturetag) |
3328 | i386 |
3329 | - >>> print p_chroot2.chroot.id, |
3330 | + >>> print(p_chroot2.chroot.id) |
3331 | 2 |
3332 | >>> p_chroot2 == p_chroot1 |
3333 | True |
3334 | |
3335 | === modified file 'lib/lp/soyuz/doc/publishing-security.txt' |
3336 | --- lib/lp/soyuz/doc/publishing-security.txt 2012-04-10 14:01:17 +0000 |
3337 | +++ lib/lp/soyuz/doc/publishing-security.txt 2018-05-27 20:15:09 +0000 |
3338 | @@ -25,21 +25,21 @@ |
3339 | the public PPA: |
3340 | |
3341 | >>> login(ANONYMOUS) |
3342 | - >>> print public_ppa.getPublishedSources().first().displayname |
3343 | + >>> print(public_ppa.getPublishedSources().first().displayname) |
3344 | foo 666 in breezy-autotest |
3345 | |
3346 | >>> binary_pub = public_ppa.getAllPublishedBinaries()[0] |
3347 | - >>> print binary_pub.displayname |
3348 | + >>> print(binary_pub.displayname) |
3349 | foo-bin 666 in breezy-autotest i386 |
3350 | |
3351 | A regular user can see them too: |
3352 | |
3353 | >>> login('no-priv@canonical.com') |
3354 | - >>> print public_ppa.getPublishedSources().first().displayname |
3355 | + >>> print(public_ppa.getPublishedSources().first().displayname) |
3356 | foo 666 in breezy-autotest |
3357 | |
3358 | >>> binary_pub = public_ppa.getAllPublishedBinaries()[0] |
3359 | - >>> print binary_pub.displayname |
3360 | + >>> print(binary_pub.displayname) |
3361 | foo-bin 666 in breezy-autotest i386 |
3362 | |
3363 | But when querying the private PPA, anonymous access will be refused: |
3364 | @@ -71,20 +71,20 @@ |
3365 | But the owner can see them. |
3366 | |
3367 | >>> ignored = login_person(private_ppa.owner) |
3368 | - >>> print public_ppa.getPublishedSources().first().displayname |
3369 | + >>> print(public_ppa.getPublishedSources().first().displayname) |
3370 | foo 666 in breezy-autotest |
3371 | |
3372 | >>> binary_pub = private_ppa.getAllPublishedBinaries()[0] |
3373 | - >>> print binary_pub.displayname |
3374 | + >>> print(binary_pub.displayname) |
3375 | foo-bin 666 in breezy-autotest i386 |
3376 | |
3377 | As can an administrator. |
3378 | |
3379 | >>> login('admin@canonical.com') |
3380 | - >>> print public_ppa.getPublishedSources().first().displayname |
3381 | + >>> print(public_ppa.getPublishedSources().first().displayname) |
3382 | foo 666 in breezy-autotest |
3383 | |
3384 | >>> binary_pub = private_ppa.getAllPublishedBinaries()[0] |
3385 | - >>> print binary_pub.displayname |
3386 | + >>> print(binary_pub.displayname) |
3387 | foo-bin 666 in breezy-autotest i386 |
3388 | |
3389 | |
3390 | === modified file 'lib/lp/soyuz/doc/publishing.txt' |
3391 | --- lib/lp/soyuz/doc/publishing.txt 2018-04-19 00:02:19 +0000 |
3392 | +++ lib/lp/soyuz/doc/publishing.txt 2018-05-27 20:15:09 +0000 |
3393 | @@ -63,27 +63,27 @@ |
3394 | This is mostly as a convenience to API users so that we don't need to export |
3395 | tiny 2-column content classes and force the users to retrieve those. |
3396 | |
3397 | - >>> print spph.source_package_name |
3398 | + >>> print(spph.source_package_name) |
3399 | pmount |
3400 | |
3401 | - >>> print spph.source_package_version |
3402 | + >>> print(spph.source_package_version) |
3403 | 0.1-1 |
3404 | |
3405 | - >>> print spph.component_name |
3406 | + >>> print(spph.component_name) |
3407 | main |
3408 | |
3409 | - >>> print spph.section_name |
3410 | + >>> print(spph.section_name) |
3411 | base |
3412 | |
3413 | Other properties are shortcuts to the source package's properties: |
3414 | |
3415 | - >>> print spph.package_creator |
3416 | - <Person at ... mark (Mark Shuttleworth)> |
3417 | - |
3418 | - >>> print spph.package_maintainer |
3419 | - <Person at ... mark (Mark Shuttleworth)> |
3420 | - |
3421 | - >>> print spph.package_signer |
3422 | + >>> print(spph.package_creator) |
3423 | + <Person at ... mark (Mark Shuttleworth)> |
3424 | + |
3425 | + >>> print(spph.package_maintainer) |
3426 | + <Person at ... mark (Mark Shuttleworth)> |
3427 | + |
3428 | + >>> print(spph.package_signer) |
3429 | <Person at ... name16 (Foo Bar)> |
3430 | |
3431 | The signer can also be None for packages that were synced (e.g. from Debian): |
3432 | @@ -91,7 +91,7 @@ |
3433 | >>> from lp.services.propertycache import get_property_cache |
3434 | >>> spph.sourcepackagerelease.signing_key_owner = None |
3435 | >>> spph.sourcepackagerelease.signing_key_fingerprint = None |
3436 | - >>> print spph.package_signer |
3437 | + >>> print(spph.package_signer) |
3438 | None |
3439 | |
3440 | There is also a method that returns the .changes file URL. This is proxied |
3441 | @@ -104,13 +104,13 @@ |
3442 | |
3443 | The pmount source has no packageupload in the sampledata: |
3444 | |
3445 | - >>> print spph.changesFileUrl() |
3446 | + >>> print(spph.changesFileUrl()) |
3447 | None |
3448 | |
3449 | The iceweasel source has good data: |
3450 | |
3451 | >>> pub = spph.archive.getPublishedSources(name=u"iceweasel").first() |
3452 | - >>> print pub.changesFileUrl() |
3453 | + >>> print(pub.changesFileUrl()) |
3454 | http://.../ubuntu/+archive/primary/+files/mozilla-firefox_0.9_i386.changes |
3455 | |
3456 | There is also a helper property to determine whether the current release for |
3457 | @@ -118,7 +118,7 @@ |
3458 | returned if there is no package in the distroseries primary archive with a |
3459 | later version. |
3460 | |
3461 | - >>> print pub.newer_distroseries_version |
3462 | + >>> print(pub.newer_distroseries_version) |
3463 | None |
3464 | |
3465 | If we publish iceweasel 1.1 in the same distroseries, then the distroseries |
3466 | @@ -133,14 +133,14 @@ |
3467 | ... sourcename='iceweasel') |
3468 | |
3469 | >>> del get_property_cache(pub).newer_distroseries_version |
3470 | - >>> print pub.newer_distroseries_version.title |
3471 | + >>> print(pub.newer_distroseries_version.title) |
3472 | iceweasel 1.1 source package in Ubuntu |
3473 | |
3474 | We can calculate the newer_distroseries_version for many spph objects at once. |
3475 | |
3476 | >>> del get_property_cache(pub).newer_distroseries_version |
3477 | >>> pub.distroseries.setNewerDistroSeriesVersions([pub]) |
3478 | - >>> print get_property_cache(pub).newer_distroseries_version.title |
3479 | + >>> print(get_property_cache(pub).newer_distroseries_version.title) |
3480 | iceweasel 1.1 source package in Ubuntu |
3481 | |
3482 | A helper is also included to create a summary of the build statuses for |
3483 | @@ -159,10 +159,10 @@ |
3484 | |
3485 | >>> import operator |
3486 | >>> def print_build_status_summary(summary): |
3487 | - ... print summary['status'].title |
3488 | + ... print(summary['status'].title) |
3489 | ... for build in sorted( |
3490 | ... summary['builds'], key=operator.attrgetter('title')): |
3491 | - ... print build.title |
3492 | + ... print(build.title) |
3493 | >>> build_status_summary = spph.getStatusSummaryForBuilds() |
3494 | >>> print_build_status_summary(build_status_summary) |
3495 | FULLYBUILT_PENDING |
3496 | @@ -176,7 +176,7 @@ |
3497 | >>> ps = getUtility(IPublishingSet) |
3498 | >>> unpublished_builds = ps.getUnpublishedBuildsForSources([spph]) |
3499 | >>> for _, b, _ in sorted(unpublished_builds, key=lambda b:b[1].title): |
3500 | - ... print b.title |
3501 | + ... print(b.title) |
3502 | hppa build of abc 666 in ubuntutest breezy-autotest RELEASE |
3503 | i386 build of abc 666 in ubuntutest breezy-autotest RELEASE |
3504 | |
3505 | @@ -209,7 +209,7 @@ |
3506 | Nor will it be included in the unpublished builds: |
3507 | |
3508 | >>> for _, build, _ in ps.getUnpublishedBuildsForSources([spph]): |
3509 | - ... print build.title |
3510 | + ... print(build.title) |
3511 | i386 build of abc 666 in ubuntutest breezy-autotest RELEASE |
3512 | |
3513 | By default, only FULLYBUILT builds are included in the returned |
3514 | @@ -218,14 +218,14 @@ |
3515 | >>> builds[1].updateStatus( |
3516 | ... BuildStatus.SUPERSEDED, force_invalid_transition=True) |
3517 | >>> for _, build, _ in ps.getUnpublishedBuildsForSources([spph]): |
3518 | - ... print build.title |
3519 | + ... print(build.title) |
3520 | |
3521 | But the returned build-states can be set explicitly: |
3522 | |
3523 | >>> for _, build, _ in ps.getUnpublishedBuildsForSources( |
3524 | ... [spph], |
3525 | ... build_states=[BuildStatus.FULLYBUILT, BuildStatus.SUPERSEDED]): |
3526 | - ... print build.title |
3527 | + ... print(build.title) |
3528 | i386 build of abc 666 in ubuntutest breezy-autotest RELEASE |
3529 | |
3530 | Just switch it back to FULLYBUILT before continuing: |
3531 | @@ -248,7 +248,7 @@ |
3532 | There are no longer any unpublished builds for the source package: |
3533 | |
3534 | >>> for _, build, _ in ps.getUnpublishedBuildsForSources([spph]): |
3535 | - ... print build.title |
3536 | + ... print(build.title) |
3537 | |
3538 | If a build is deleted, it does not cause the build status summary to change: |
3539 | |
3540 | @@ -293,16 +293,16 @@ |
3541 | >>> verifyObject(IBinaryPackagePublishingHistory, bpph) |
3542 | True |
3543 | |
3544 | - >>> print bpph.binary_package_name |
3545 | + >>> print(bpph.binary_package_name) |
3546 | def-bin |
3547 | |
3548 | - >>> print bpph.binary_package_version |
3549 | + >>> print(bpph.binary_package_version) |
3550 | 666 |
3551 | |
3552 | - >>> print bpph.component_name |
3553 | + >>> print(bpph.component_name) |
3554 | main |
3555 | |
3556 | - >>> print bpph.section_name |
3557 | + >>> print(bpph.section_name) |
3558 | base |
3559 | |
3560 | |
3561 | @@ -314,7 +314,7 @@ |
3562 | ... IBinaryPackagePublishingHistory) |
3563 | >>> spph = SourcePackagePublishingHistory.get(10) |
3564 | |
3565 | - >>> print spph.displayname |
3566 | + >>> print(spph.displayname) |
3567 | alsa-utils 1.0.8-1ubuntu1 in warty |
3568 | |
3569 | |
3570 | @@ -358,10 +358,10 @@ |
3571 | >>> modified_spph.datesuperseded == transaction_timestamp |
3572 | True |
3573 | |
3574 | - >>> print modified_spph.removed_by.name |
3575 | + >>> print(modified_spph.removed_by.name) |
3576 | mark |
3577 | |
3578 | - >>> print modified_spph.removal_comment |
3579 | + >>> print(modified_spph.removal_comment) |
3580 | testing deletion |
3581 | |
3582 | requstObsolescence takes no additional arguments: |
3583 | @@ -405,11 +405,11 @@ |
3584 | ... status=PackagePublishingStatus.PUBLISHED, |
3585 | ... pocket=PackagePublishingPocket.PROPOSED) |
3586 | |
3587 | - >>> print source.displayname |
3588 | + >>> print(source.displayname) |
3589 | ghi 666 in breezy-autotest |
3590 | |
3591 | >>> for bin in binaries: |
3592 | - ... print bin.displayname |
3593 | + ... print(bin.displayname) |
3594 | ghi-bin 666 in breezy-autotest i386 |
3595 | ghi-bin 666 in breezy-autotest hppa |
3596 | |
3597 | @@ -425,7 +425,7 @@ |
3598 | without retrieving its binaries. |
3599 | |
3600 | >>> for build in source.getBuilds(): |
3601 | - ... print build.title |
3602 | + ... print(build.title) |
3603 | hppa build of ghi 666 in ubuntutest breezy-autotest PROPOSED |
3604 | i386 build of ghi 666 in ubuntutest breezy-autotest PROPOSED |
3605 | |
3606 | @@ -472,11 +472,11 @@ |
3607 | a chance to verify it's contents and include it in the destination |
3608 | archive index. |
3609 | |
3610 | - >>> print copied_source.status.name |
3611 | + >>> print(copied_source.status.name) |
3612 | PENDING |
3613 | |
3614 | >>> for bin in copied_binaries: |
3615 | - ... print bin.status.name |
3616 | + ... print(bin.status.name) |
3617 | PENDING |
3618 | PENDING |
3619 | |
3620 | @@ -499,12 +499,12 @@ |
3621 | previous broken implementation in this area. |
3622 | |
3623 | >>> for bin in source.getPublishedBinaries(): |
3624 | - ... print bin.displayname, bin.pocket.name, bin.status.name |
3625 | + ... print(bin.displayname, bin.pocket.name, bin.status.name) |
3626 | ghi-bin 666 in breezy-autotest hppa PROPOSED PUBLISHED |
3627 | ghi-bin 666 in breezy-autotest i386 PROPOSED PUBLISHED |
3628 | |
3629 | >>> for bin in copied_source.getPublishedBinaries(): |
3630 | - ... print bin.displayname, bin.pocket.name, bin.status.name |
3631 | + ... print(bin.displayname, bin.pocket.name, bin.status.name) |
3632 | ghi-bin 666 in breezy-autotest hppa UPDATES PENDING |
3633 | ghi-bin 666 in breezy-autotest i386 UPDATES PENDING |
3634 | |
3635 | @@ -588,7 +588,7 @@ |
3636 | but also the override just done. |
3637 | |
3638 | >>> for pub in copied_source.getPublishedBinaries(): |
3639 | - ... print pub.displayname, pub.component.name |
3640 | + ... print(pub.displayname, pub.component.name) |
3641 | ghi-bin 666 in breezy-autotest hppa universe |
3642 | ghi-bin 666 in breezy-autotest hppa main |
3643 | ghi-bin 666 in breezy-autotest i386 main |
3644 | @@ -601,7 +601,7 @@ |
3645 | publications and the hppa one is the overridden one. |
3646 | |
3647 | >>> for pub in copied_source.getBuiltBinaries(): |
3648 | - ... print pub.displayname, pub.component.name |
3649 | + ... print(pub.displayname, pub.component.name) |
3650 | ghi-bin 666 in breezy-autotest hppa universe |
3651 | ghi-bin 666 in breezy-autotest i386 main |
3652 | |
3653 | @@ -630,11 +630,11 @@ |
3654 | ... status=PackagePublishingStatus.PUBLISHED, |
3655 | ... pocket=PackagePublishingPocket.PROPOSED) |
3656 | |
3657 | - >>> print source_all.displayname |
3658 | + >>> print(source_all.displayname) |
3659 | pirulito 666 in breezy-autotest |
3660 | |
3661 | >>> for bin in binaries_all: |
3662 | - ... print bin.displayname |
3663 | + ... print(bin.displayname) |
3664 | pirulito 666 in breezy-autotest i386 |
3665 | pirulito 666 in breezy-autotest hppa |
3666 | |
3667 | @@ -643,7 +643,7 @@ |
3668 | |
3669 | >>> copied_source_all = source_all.copyTo(distroseries, pocket, archive) |
3670 | |
3671 | - >>> print copied_source_all.displayname |
3672 | + >>> print(copied_source_all.displayname) |
3673 | pirulito 666 in breezy-autotest |
3674 | |
3675 | Architecture independent binaries, however, when copied results in |
3676 | @@ -664,7 +664,7 @@ |
3677 | The same binary is published in both supported architecture. |
3678 | |
3679 | >>> for bin in binary_copies: |
3680 | - ... print bin.displayname |
3681 | + ... print(bin.displayname) |
3682 | pirulito 666 in breezy-autotest hppa |
3683 | pirulito 666 in breezy-autotest i386 |
3684 | |
3685 | @@ -675,7 +675,7 @@ |
3686 | >>> copied_binaries_all = copied_source_all.getPublishedBinaries() |
3687 | |
3688 | >>> for bin in copied_binaries_all: |
3689 | - ... print bin.displayname |
3690 | + ... print(bin.displayname) |
3691 | pirulito 666 in breezy-autotest hppa |
3692 | pirulito 666 in breezy-autotest i386 |
3693 | |
3694 | @@ -690,7 +690,7 @@ |
3695 | |
3696 | >>> [built_binary] = copied_source_all.getBuiltBinaries() |
3697 | |
3698 | - >>> print built_binary.displayname |
3699 | + >>> print(built_binary.displayname) |
3700 | pirulito 666 in breezy-autotest i386 |
3701 | |
3702 | |
3703 | @@ -711,11 +711,11 @@ |
3704 | ... pub_source=ppa_source, |
3705 | ... status=PackagePublishingStatus.PUBLISHED) |
3706 | |
3707 | - >>> print ppa_source.displayname, ppa_source.archive.displayname |
3708 | + >>> print(ppa_source.displayname, ppa_source.archive.displayname) |
3709 | jkl 666 in breezy-autotest PPA for Celso Providelo |
3710 | |
3711 | >>> for bin in ppa_binaries: |
3712 | - ... print bin.displayname, bin.archive.displayname |
3713 | + ... print(bin.displayname, bin.archive.displayname) |
3714 | jkl-bin 666 in breezy-autotest i386 PPA for Celso Providelo |
3715 | jkl-bin 666 in breezy-autotest hppa PPA for Celso Providelo |
3716 | |
3717 | @@ -803,7 +803,7 @@ |
3718 | >>> copied_source = ppa_source.copyTo(series, pocket, archive) |
3719 | |
3720 | >>> ppa_binary_i386 = ppa_binaries[0] |
3721 | - >>> print ppa_binary_i386.displayname |
3722 | + >>> print(ppa_binary_i386.displayname) |
3723 | mno-bin 999 in breezy-autotest i386 |
3724 | |
3725 | >>> copied_binary = ppa_binary_i386.copyTo(series, pocket, archive) |
3726 | @@ -811,11 +811,11 @@ |
3727 | The source and binary are present in hoary-test: |
3728 | |
3729 | >>> copied_source = SourcePackagePublishingHistory.get(copied_source.id) |
3730 | - >>> print copied_source.displayname |
3731 | + >>> print(copied_source.displayname) |
3732 | mno 999 in hoary-test |
3733 | |
3734 | >>> for bin in copied_source.getPublishedBinaries(): |
3735 | - ... print bin.displayname |
3736 | + ... print(bin.displayname) |
3737 | mno-bin 999 in hoary-test amd64 |
3738 | mno-bin 999 in hoary-test i386 |
3739 | |
3740 | @@ -834,7 +834,7 @@ |
3741 | Using the same Ubuntu source publishing example as above: |
3742 | |
3743 | >>> for file in source.getSourceAndBinaryLibraryFiles(): |
3744 | - ... print file.filename |
3745 | + ... print(file.filename) |
3746 | ghi-bin_666_hppa.deb |
3747 | ghi-bin_666_i386.deb |
3748 | ghi_666.dsc |
3749 | @@ -851,7 +851,7 @@ |
3750 | ... status=PackagePublishingStatus.PUBLISHED) |
3751 | |
3752 | >>> for file in ppa_source.getSourceAndBinaryLibraryFiles(): |
3753 | - ... print file.filename |
3754 | + ... print(file.filename) |
3755 | pqr-bin_666_all.deb |
3756 | pqr_666.dsc |
3757 | |
3758 | @@ -880,7 +880,7 @@ |
3759 | ... IBinaryPackageFile) |
3760 | |
3761 | >>> bpph = BinaryPackagePublishingHistory.get(15) |
3762 | - >>> print bpph.displayname |
3763 | + >>> print(bpph.displayname) |
3764 | mozilla-firefox 0.9 in woody i386 |
3765 | |
3766 | >>> IBinaryPackagePublishingHistory.providedBy(bpph) |
3767 | @@ -896,7 +896,7 @@ |
3768 | Binary publishing records also have a download count, which contains |
3769 | the number of downloads of this binary package release in this archive. |
3770 | |
3771 | - >>> print bpph.getDownloadCount() |
3772 | + >>> print(bpph.getDownloadCount()) |
3773 | 0 |
3774 | |
3775 | >>> from datetime import date |
3776 | @@ -911,7 +911,7 @@ |
3777 | >>> bpph.archive.updatePackageDownloadCount( |
3778 | ... bpph.binarypackagerelease, date(2010, 2, 21), uk, 4) |
3779 | |
3780 | - >>> print bpph.getDownloadCount() |
3781 | + >>> print(bpph.getDownloadCount()) |
3782 | 16 |
3783 | |
3784 | We can also use getDownloadCounts to find the raw download counts per |
3785 | @@ -1005,7 +1005,7 @@ |
3786 | ... component=test_source_pub.component, |
3787 | ... section=test_source_pub.section, |
3788 | ... pocket=test_source_pub.pocket) |
3789 | - >>> print ppa_pub.component.name |
3790 | + >>> print(ppa_pub.component.name) |
3791 | main |
3792 | |
3793 | IPublishingSet is an essential component for |
3794 | @@ -1019,7 +1019,7 @@ |
3795 | >>> len(cprov_sources) |
3796 | 8 |
3797 | >>> for spph in cprov_sources: |
3798 | - ... print spph.displayname |
3799 | + ... print(spph.displayname) |
3800 | cdrkit 1.0 in breezy-autotest |
3801 | iceweasel 1.0 in warty |
3802 | jkl 666 in hoary-test |
3803 | @@ -1071,7 +1071,7 @@ |
3804 | |
3805 | >>> build = binaries[0].binarypackagerelease.build |
3806 | >>> source_pub = build.source_package_release.publishings[0] |
3807 | - >>> print build.archive.name |
3808 | + >>> print(build.archive.name) |
3809 | otherppa |
3810 | |
3811 | # Copy the source into Celso's PPA, ensuring that the binaries |
3812 | @@ -1148,19 +1148,19 @@ |
3813 | >>> (source_pub, binary_pub, binary, binary_name, |
3814 | ... arch) = cprov_binaries.last() |
3815 | |
3816 | - >>> print source_pub.displayname |
3817 | + >>> print(source_pub.displayname) |
3818 | pqr 666 in breezy-autotest |
3819 | |
3820 | - >>> print binary_pub.displayname |
3821 | + >>> print(binary_pub.displayname) |
3822 | pqr-bin 666 in breezy-autotest i386 |
3823 | |
3824 | - >>> print binary.title |
3825 | + >>> print(binary.title) |
3826 | pqr-bin-666 |
3827 | |
3828 | - >>> print binary_name.name |
3829 | + >>> print(binary_name.name) |
3830 | pqr-bin |
3831 | |
3832 | - >>> print arch.displayname |
3833 | + >>> print(arch.displayname) |
3834 | ubuntutest Breezy Badger Autotest i386 |
3835 | |
3836 | We can retrieve all files related with Celso's PPA publications. |
3837 | @@ -1181,7 +1181,7 @@ |
3838 | >>> ordered_filenames = sorted( |
3839 | ... file.filename for source, file, content in cprov_files) |
3840 | |
3841 | - >>> print ordered_filenames[0] |
3842 | + >>> print(ordered_filenames[0]) |
3843 | firefox_0.9.2.orig.tar.gz |
3844 | |
3845 | We can also retrieve just the binary files related with Celso's PPA |
3846 | @@ -1201,7 +1201,7 @@ |
3847 | True |
3848 | |
3849 | >>> for source, file, content in binary_files: |
3850 | - ... print file.filename |
3851 | + ... print(file.filename) |
3852 | mozilla-firefox_0.9_i386.deb |
3853 | jkl-bin_666_all.deb |
3854 | jkl-bin_666_all.deb |
3855 | @@ -1231,19 +1231,19 @@ |
3856 | |
3857 | >>> source_pub, upload, source, file, content = a_change |
3858 | |
3859 | - >>> print source_pub.displayname |
3860 | + >>> print(source_pub.displayname) |
3861 | iceweasel 1.0 in warty |
3862 | |
3863 | - >>> print upload.displayname |
3864 | + >>> print(upload.displayname) |
3865 | iceweasel |
3866 | |
3867 | - >>> print source.title |
3868 | + >>> print(source.title) |
3869 | iceweasel - 1.0 |
3870 | |
3871 | - >>> print file.filename |
3872 | + >>> print(file.filename) |
3873 | mozilla-firefox_0.9_i386.changes |
3874 | |
3875 | - >>> print content.md5 |
3876 | + >>> print(content.md5) |
3877 | e4a7193a8f72fa2755e2162512069093 |
3878 | |
3879 | Last but not least the publishing set class allows for the bulk deletion |
3880 | @@ -1253,7 +1253,7 @@ |
3881 | ... cprov.archive.getPublishedSources( |
3882 | ... status=PackagePublishingStatus.PUBLISHED), |
3883 | ... key=operator.attrgetter('id')) |
3884 | - >>> print len(cprov_sources) |
3885 | + >>> print(len(cprov_sources)) |
3886 | 6 |
3887 | |
3888 | We will delete the first two source publishing history records and |
3889 | @@ -1283,7 +1283,7 @@ |
3890 | >>> cprov_sources = list( |
3891 | ... cprov.archive.getPublishedSources( |
3892 | ... status=PackagePublishingStatus.PUBLISHED)) |
3893 | - >>> print len(cprov_sources) |
3894 | + >>> print(len(cprov_sources)) |
3895 | 4 |
3896 | |
3897 | Analogously, the number of associated published binaries will be less |
3898 | @@ -1315,7 +1315,7 @@ |
3899 | >>> cprov_published_sources = cprov.archive.getPublishedSources( |
3900 | ... status=PackagePublishingStatus.PUBLISHED) |
3901 | >>> for spph in cprov_published_sources: |
3902 | - ... print spph.displayname |
3903 | + ... print(spph.displayname) |
3904 | jkl 666 in breezy-autotest |
3905 | mno 999 in breezy-autotest |
3906 | pmount 0.1-1 in warty |
3907 | @@ -1363,9 +1363,9 @@ |
3908 | ... given_ids = [obj.id for obj in given] |
3909 | ... returned_ids = [obj.id for obj in returned] |
3910 | ... if given_ids == returned_ids: |
3911 | - ... print 'Matches' |
3912 | + ... print('Matches') |
3913 | ... else: |
3914 | - ... print 'Mismatch:', given_ids, returned_ids |
3915 | + ... print('Mismatch:', given_ids, returned_ids) |
3916 | |
3917 | >>> compare_ids(cprov_published_sources, decorated_set) |
3918 | Matches |
3919 | @@ -1413,7 +1413,7 @@ |
3920 | |
3921 | >>> all_cprov_sources = cprov.archive.getPublishedSources() |
3922 | >>> for spph in all_cprov_sources: |
3923 | - ... print spph.displayname |
3924 | + ... print(spph.displayname) |
3925 | cdrkit 1.0 in breezy-autotest |
3926 | foo 666 in breezy-autotest |
3927 | iceweasel 1.0 in warty |
3928 | @@ -1430,7 +1430,7 @@ |
3929 | >>> pub_with_changes = all_cprov_sources[2] |
3930 | >>> the_source = pub_with_changes.sourcepackagerelease |
3931 | >>> the_change = the_source.upload_changesfile |
3932 | - >>> print the_change.filename |
3933 | + >>> print(the_change.filename) |
3934 | mozilla-firefox_0.9_i386.changes |
3935 | |
3936 | The same control-publication is reachable in the dictionary returned |
3937 | @@ -1439,7 +1439,7 @@ |
3938 | >>> decorated_changes = ArchiveSourcePublications(all_cprov_sources) |
3939 | >>> changes_by_source = decorated_changes.getChangesFileBySource() |
3940 | >>> decorated_change = changes_by_source.get(pub_with_changes) |
3941 | - >>> print decorated_change.filename |
3942 | + >>> print(decorated_change.filename) |
3943 | mozilla-firefox_0.9_i386.changes |
3944 | |
3945 | Enough internals! What really matters for callsites is that, when |
3946 | @@ -1451,7 +1451,7 @@ |
3947 | |
3948 | >>> decorated_pub = list(decorated_set)[1] |
3949 | |
3950 | - >>> print decorated_pub |
3951 | + >>> print(decorated_pub) |
3952 | <...ArchiveSourcePublication ...> |
3953 | |
3954 | >>> verifyObject(ISourcePackagePublishingHistory, decorated_pub) |
3955 | @@ -1471,13 +1471,13 @@ |
3956 | >>> pub_with_changes = cprov_published_sources[1] |
3957 | >>> the_source = pub_with_changes.sourcepackagerelease |
3958 | >>> changesfile = the_source.upload_changesfile |
3959 | - >>> print '%s (%s)' % (changesfile.filename, changesfile.content.md5) |
3960 | + >>> print('%s (%s)' % (changesfile.filename, changesfile.content.md5)) |
3961 | mno_999_source.changes (6168e17ba012fc3db6dc77e255243bd1) |
3962 | |
3963 | >>> decorated_pub_with_changes = list(decorated_set)[1] |
3964 | >>> decorated_source = decorated_pub_with_changes.sourcepackagerelease |
3965 | >>> changesfile = decorated_source.upload_changesfile |
3966 | - >>> print '%s (%s)' % (changesfile.filename, changesfile.content.md5) |
3967 | + >>> print('%s (%s)' % (changesfile.filename, changesfile.content.md5)) |
3968 | mno_999_source.changes (6168e17ba012fc3db6dc77e255243bd1) |
3969 | |
3970 | `ArchiveSourcePublication` also has a decorated version of the |
3971 | @@ -1517,18 +1517,18 @@ |
3972 | Create a small function for displaying the results: |
3973 | |
3974 | >>> def print_build_summary(summary): |
3975 | - ... print "%s\n%s\nRelevant builds:\n%s" % ( |
3976 | + ... print("%s\n%s\nRelevant builds:\n%s" % ( |
3977 | ... summary['status'].title, |
3978 | ... summary['status'].description, |
3979 | ... "\n".join( |
3980 | ... " - %s" % build.title for build in summary['builds']) |
3981 | - ... ) |
3982 | + ... )) |
3983 | |
3984 | >>> def print_build_summaries(summaries): |
3985 | ... count = 0 |
3986 | ... for source_id, summary in sorted(summaries.items()): |
3987 | ... count += 1 |
3988 | - ... print "Source number: %s" % count |
3989 | + ... print("Source number: %s" % count) |
3990 | ... print_build_summary(summary) |
3991 | |
3992 | And then grab the build summaries for firefox and foo: |
3993 | |
3994 | === modified file 'lib/lp/soyuz/doc/sampledata-setup.txt' |
3995 | --- lib/lp/soyuz/doc/sampledata-setup.txt 2011-12-28 17:03:06 +0000 |
3996 | +++ lib/lp/soyuz/doc/sampledata-setup.txt 2018-05-27 20:15:09 +0000 |
3997 | @@ -14,10 +14,10 @@ |
3998 | >>> return_code, output, error = run_script( |
3999 | ... 'utilities/soyuz-sampledata-setup.py') |
4000 | |
4001 | - >>> print return_code |
4002 | + >>> print(return_code) |
4003 | 0 |
4004 | |
4005 | - >>> print error |
4006 | + >>> print(error) |
4007 | INFO ... |
4008 | INFO Done. |
4009 | |
4010 | |
4011 | === modified file 'lib/lp/soyuz/doc/sourcepackagerelease.txt' |
4012 | --- lib/lp/soyuz/doc/sourcepackagerelease.txt 2016-02-29 18:48:23 +0000 |
4013 | +++ lib/lp/soyuz/doc/sourcepackagerelease.txt 2018-05-27 20:15:09 +0000 |
4014 | @@ -27,7 +27,7 @@ |
4015 | SourcePackageRelease is published in. |
4016 | |
4017 | >>> for archive in spr.published_archives: |
4018 | - ... print archive.displayname |
4019 | + ... print(archive.displayname) |
4020 | Primary Archive for Ubuntu Linux |
4021 | PPA for Celso Providelo |
4022 | |
4023 | @@ -241,12 +241,12 @@ |
4024 | ... archive=cprov_private_ppa) |
4025 | |
4026 | >>> test_sourcepackagerelease = private_publication.sourcepackagerelease |
4027 | - >>> print test_sourcepackagerelease.title |
4028 | + >>> print(test_sourcepackagerelease.title) |
4029 | foo - 666 |
4030 | |
4031 | >>> published_archives = test_sourcepackagerelease.published_archives |
4032 | >>> for archive in published_archives: |
4033 | - ... print archive.displayname |
4034 | + ... print(archive.displayname) |
4035 | PPA named pppa for Celso Providelo |
4036 | |
4037 | 'foo - 666' sourcepackagerelease is only published in Celso's Private |
4038 | @@ -282,7 +282,7 @@ |
4039 | |
4040 | >>> published_archives = test_sourcepackagerelease.published_archives |
4041 | >>> for archive in published_archives: |
4042 | - ... print archive.displayname |
4043 | + ... print(archive.displayname) |
4044 | Primary Archive for Ubuntu Linux |
4045 | PPA named pppa for Celso Providelo |
4046 | |
4047 | @@ -328,6 +328,6 @@ |
4048 | |
4049 | >>> published_archives = test_sourcepackagerelease.published_archives |
4050 | >>> for archive in published_archives: |
4051 | - ... print archive.displayname |
4052 | + ... print(archive.displayname) |
4053 | Primary Archive for Ubuntu Linux |
4054 | PPA named pppa for Celso Providelo |
4055 | |
4056 | === modified file 'lib/lp/soyuz/doc/soyuz-set-of-uploads.txt' |
4057 | --- lib/lp/soyuz/doc/soyuz-set-of-uploads.txt 2018-05-06 08:52:34 +0000 |
4058 | +++ lib/lp/soyuz/doc/soyuz-set-of-uploads.txt 2018-05-27 20:15:09 +0000 |
4059 | @@ -249,7 +249,7 @@ |
4060 | ... if len(rejected_contents) > 0: |
4061 | ... # Clean up rejected entry |
4062 | ... shutil.rmtree(os.path.join(rejected_dir, leafname)) |
4063 | - ... print "Rejected uploads: %s" % rejected_contents |
4064 | + ... print("Rejected uploads: %s" % ", ".join(rejected_contents)) |
4065 | ... return |
4066 | ... |
4067 | ... assert len(os.listdir(failed_dir)) == 0, ( |
4068 | @@ -260,7 +260,7 @@ |
4069 | ... assert simple_publish(distro=distro), ( |
4070 | ... "Should publish at least one item") |
4071 | ... if loglevel is None or loglevel <= logging.INFO: |
4072 | - ... print "Upload complete." |
4073 | + ... print("Upload complete.") |
4074 | |
4075 | >>> from lp.testing.mail_helpers import ( |
4076 | ... pop_notifications, |
4077 | @@ -274,10 +274,10 @@ |
4078 | ... line. |
4079 | ... """ |
4080 | ... for message in pop_notifications(commit=False): |
4081 | - ... print "To:", sort_addresses(message['to']) |
4082 | - ... print "Subject:", message['subject'] |
4083 | - ... print message.get_payload()[0].as_string() |
4084 | - ... print '' |
4085 | + ... print("To:", sort_addresses(message['to'])) |
4086 | + ... print("Subject:", message['subject']) |
4087 | + ... print(message.get_payload()[0].as_string()) |
4088 | + ... print() |
4089 | |
4090 | The 'bar' package' is an arch-all package. We have four stages to the |
4091 | bar test. Each stage should be simple enough. First we have a new |
4092 | @@ -301,7 +301,7 @@ |
4093 | |
4094 | >>> stub.test_emails = [] |
4095 | >>> simulate_upload('bar_1.0-3', loglevel=logging.ERROR) |
4096 | - Rejected uploads: ['bar_1.0-3'] |
4097 | + Rejected uploads: bar_1.0-3 |
4098 | |
4099 | >>> read_email() |
4100 | To: Daniel Silverstone <daniel.silverstone@canonical.com> |
4101 | @@ -579,7 +579,7 @@ |
4102 | INFO Upload was rejected: |
4103 | INFO foo_1.0-3.dsc: Version older than that in the archive. 1.0-3 <= 2.9-2 |
4104 | ... |
4105 | - Rejected uploads: ['foo_1.0-3'] |
4106 | + Rejected uploads: foo_1.0-3 |
4107 | |
4108 | Note that the ancestry pointed in the rejection message (2.9-2) is what |
4109 | we expect. |
4110 | @@ -625,9 +625,9 @@ |
4111 | ... args.append("-P") |
4112 | ... script = os.path.join(config.root, "scripts", "publish-distro.py") |
4113 | ... result, stdout, stderr = run_script(script, args) |
4114 | - ... print stderr |
4115 | + ... print(stderr) |
4116 | ... if result != 0: |
4117 | - ... print "Script returned", result |
4118 | + ... print("Script returned", result) |
4119 | |
4120 | >>> def release_file_has_uncompressed_packages(path): |
4121 | ... """Does the release file include uncompressed Packages?""" |
4122 | |
4123 | === modified file 'lib/lp/soyuz/doc/soyuz-upload.txt' |
4124 | --- lib/lp/soyuz/doc/soyuz-upload.txt 2018-05-06 08:52:34 +0000 |
4125 | +++ lib/lp/soyuz/doc/soyuz-upload.txt 2018-05-27 20:15:09 +0000 |
4126 | @@ -170,7 +170,7 @@ |
4127 | >>> key_data = open(key_path).read() |
4128 | >>> key = gpg_handler.importPublicKey(key_data) |
4129 | >>> assert key is not None |
4130 | - >>> print key.fingerprint |
4131 | + >>> print(key.fingerprint) |
4132 | 33C0A61893A5DC5EB325B29E415A12CAC2F30234 |
4133 | |
4134 | |
4135 | @@ -254,11 +254,11 @@ |
4136 | the other three still in incoming. |
4137 | |
4138 | >>> for i in range(4): |
4139 | - ... find_upload_dir_result(i + 1) |
4140 | - 'rejected' |
4141 | - 'incoming' |
4142 | - 'incoming' |
4143 | - 'incoming' |
4144 | + ... print(find_upload_dir_result(i + 1)) |
4145 | + rejected |
4146 | + incoming |
4147 | + incoming |
4148 | + incoming |
4149 | |
4150 | |
4151 | Now continue with the real upload. |
4152 | @@ -272,8 +272,8 @@ |
4153 | |
4154 | >>> stdout, stderr = process.communicate() |
4155 | >>> if process.returncode != 0: |
4156 | - ... print stdout |
4157 | - ... print stderr |
4158 | + ... print(stdout) |
4159 | + ... print(stderr) |
4160 | |
4161 | |
4162 | Let's check if packages were uploaded correctly. |
4163 | @@ -342,9 +342,11 @@ |
4164 | Check the four uploads all ended up where we expected. |
4165 | |
4166 | >>> for i in range(0, 4): |
4167 | - ... find_upload_dir_result(i + 1) |
4168 | - 'rejected' |
4169 | - 'failed' |
4170 | + ... print(find_upload_dir_result(i + 1)) |
4171 | + rejected |
4172 | + None |
4173 | + None |
4174 | + failed |
4175 | |
4176 | Also check the upload folders contain all the files we uploaded. |
4177 | |
4178 | @@ -369,7 +371,7 @@ |
4179 | |
4180 | >>> from lp.soyuz.model.queue import PackageUploadSource |
4181 | >>> for name in package_names: |
4182 | - ... print name |
4183 | + ... print(name) |
4184 | ... spn = SourcePackageName.selectOneBy(name=name) |
4185 | ... spr = SourcePackageRelease.selectOneBy(sourcepackagenameID=spn.id) |
4186 | ... us = PackageUploadSource.selectOneBy( |
4187 | @@ -415,7 +417,7 @@ |
4188 | ... L.append("%s %s" % (queue_item.sourcepackagerelease.name, |
4189 | ... 'ACCEPTED')) |
4190 | >>> L.sort() |
4191 | - >>> print "\n".join(L) |
4192 | + >>> print("\n".join(L)) |
4193 | drdsl ACCEPTED |
4194 | etherwake ACCEPTED |
4195 | |
4196 | @@ -438,9 +440,9 @@ |
4197 | ... spr = SourcePackageRelease.selectOneBy(sourcepackagenameID=spn.id) |
4198 | ... sspph = SSPPH.selectOneBy(sourcepackagereleaseID=spr.id) |
4199 | ... if sspph: |
4200 | - ... print name, sspph.status.title |
4201 | + ... print(name, sspph.status.title) |
4202 | ... else: |
4203 | - ... print name, 'not Published' |
4204 | + ... print(name, 'not Published') |
4205 | drdsl Pending |
4206 | etherwake Pending |
4207 | |
4208 | @@ -453,7 +455,7 @@ |
4209 | ... stdout=subprocess.PIPE, |
4210 | ... stderr=subprocess.PIPE) |
4211 | >>> stdout, stderr = process.communicate() |
4212 | - >>> print stdout |
4213 | + >>> print(stdout) |
4214 | <BLANKLINE> |
4215 | |
4216 | >>> transaction.commit() |
4217 | @@ -473,10 +475,10 @@ |
4218 | ... for key in sorted(deb822): |
4219 | ... value = deb822.get_as_string(key) |
4220 | ... if not value or value[0] == '\n': |
4221 | - ... print '%s:%s' % (key, value) |
4222 | + ... print('%s:%s' % (key, value)) |
4223 | ... else: |
4224 | - ... print '%s: %s' % (key, value) |
4225 | |
4226 | + ... print('%s: %s' % (key, value)) |
4227 | + ... print() |
4228 | |
4229 | Check the generation of a correct Sources tag file for the main |
4230 | component of ubuntutest/breezy-autotest, containing the only the |
4231 | @@ -490,7 +492,7 @@ |
4232 | ... "source/Sources.gz") as sources_file: |
4233 | ... for source in Sources.iter_paragraphs(sources_file): |
4234 | ... pprint_deb822(source) |
4235 | - ... print 'END' |
4236 | + ... print('END') |
4237 | Architecture: any |
4238 | Binary: etherwake |
4239 | Build-Depends: debhelper (>> 2.0) |
4240 | @@ -535,7 +537,7 @@ |
4241 | >>> for pub in SSPPH.selectBy( |
4242 | ... sourcepackagereleaseID=etherwake_drspr.sourcepackagerelease.id, |
4243 | ... orderBy=['id']): |
4244 | - ... print pub.status.name, pub.component.name, pub.pocket.name |
4245 | + ... print(pub.status.name, pub.component.name, pub.pocket.name) |
4246 | PUBLISHED universe RELEASE |
4247 | PENDING multiverse RELEASE |
4248 | |
4249 | @@ -564,7 +566,7 @@ |
4250 | >>> for pub in SSPPH.selectBy( |
4251 | ... sourcepackagereleaseID=etherwake_drspr.sourcepackagerelease.id, |
4252 | ... orderBy=['id']): |
4253 | - ... print pub.status.name, pub.component.name, pub.pocket.name |
4254 | + ... print(pub.status.name, pub.component.name, pub.pocket.name) |
4255 | SUPERSEDED universe RELEASE |
4256 | PUBLISHED multiverse RELEASE |
4257 | |
4258 | @@ -573,14 +575,14 @@ |
4259 | >>> main_sources = gzip.open( |
4260 | ... "/var/tmp/archive/ubuntutest/dists/breezy-autotest" |
4261 | ... "/main/source/Sources.gz").read() |
4262 | - >>> print main_sources + '\nEND' |
4263 | + >>> print(main_sources + '\nEND') |
4264 | <BLANKLINE> |
4265 | END |
4266 | |
4267 | >>> multiverse_sources = gzip.open( |
4268 | ... "/var/tmp/archive/ubuntutest/dists/breezy-autotest" |
4269 | ... "/multiverse/source/Sources.gz").read() |
4270 | - >>> print multiverse_sources + '\nEND' |
4271 | + >>> print(multiverse_sources + '\nEND') |
4272 | Package: drdsl |
4273 | ... |
4274 | Package: etherwake |
4275 | @@ -600,7 +602,7 @@ |
4276 | |
4277 | >>> releasefile_contents = open("/var/tmp/archive/ubuntutest/dists/" |
4278 | ... "breezy-autotest/Release").read() |
4279 | - >>> print releasefile_contents + '\nEND' #doctest: -NORMALIZE_WHITESPACE |
4280 | + >>> print(releasefile_contents + '\nEND') #doctest: -NORMALIZE_WHITESPACE |
4281 | Origin: ubuntutest |
4282 | Label: ubuntutest |
4283 | Suite: breezy-autotest |
4284 | |
4285 | === modified file 'lib/lp/soyuz/doc/vocabularies.txt' |
4286 | --- lib/lp/soyuz/doc/vocabularies.txt 2018-03-01 23:00:35 +0000 |
4287 | +++ lib/lp/soyuz/doc/vocabularies.txt 2018-05-27 20:15:09 +0000 |
4288 | @@ -159,7 +159,7 @@ |
4289 | >>> verifyObject(IHugeVocabulary, vocabulary) |
4290 | True |
4291 | |
4292 | - >>> print vocabulary.displayname |
4293 | + >>> print(vocabulary.displayname) |
4294 | Select a PPA |
4295 | |
4296 | Iterations over the PPA vocabulary will return on PPA archives. |
4297 | @@ -175,13 +175,13 @@ |
4298 | |
4299 | >>> cprov_term = vocabulary.getTermByToken('~cprov/ubuntu/ppa') |
4300 | |
4301 | - >>> print cprov_term.token |
4302 | + >>> print(cprov_term.token) |
4303 | ~cprov/ubuntu/ppa |
4304 | |
4305 | - >>> print cprov_term.value |
4306 | + >>> print(cprov_term.value) |
4307 | <... lp.soyuz.model.archive.Archive instance ...> |
4308 | |
4309 | - >>> print cprov_term.title |
4310 | + >>> print(cprov_term.title) |
4311 | packages to help my friends. |
4312 | |
4313 | Not found terms result in LookupError. |
4314 | @@ -196,7 +196,7 @@ |
4315 | >>> def print_search_results(results): |
4316 | ... for archive in results: |
4317 | ... term = vocabulary.toTerm(archive) |
4318 | - ... print '%s: %s' % (term.token, term.title) |
4319 | + ... print('%s: %s' % (term.token, term.title)) |
4320 | |
4321 | >>> cprov_search = vocabulary.search(u'cprov') |
4322 | >>> print_search_results(cprov_search) |
4323 | @@ -256,14 +256,14 @@ |
4324 | >>> flush_database_updates() |
4325 | |
4326 | >>> cprov_term = vocabulary.getTermByToken('~cprov/ubuntu/ppa') |
4327 | - >>> print cprov_term.title |
4328 | + >>> print(cprov_term.title) |
4329 | Single line. |
4330 | |
4331 | >>> cprov.archive.description = "First line\nSecond line." |
4332 | >>> flush_database_updates() |
4333 | |
4334 | >>> cprov_term = vocabulary.getTermByToken('~cprov/ubuntu/ppa') |
4335 | - >>> print cprov_term.title |
4336 | + >>> print(cprov_term.title) |
4337 | First line |
4338 | |
4339 | PPAs with empty description are identified and have a title saying so. |
4340 | @@ -272,7 +272,7 @@ |
4341 | >>> flush_database_updates() |
4342 | |
4343 | >>> cprov_term = vocabulary.getTermByToken('~cprov/ubuntu/ppa') |
4344 | - >>> print cprov_term.title |
4345 | + >>> print(cprov_term.title) |
4346 | No description available |
4347 | |
4348 | Queries on empty strings also results in a valid SelectResults. |
4349 | |
4350 | === modified file 'lib/lp/soyuz/tests/test_doc.py' |
4351 | --- lib/lp/soyuz/tests/test_doc.py 2018-05-06 08:52:34 +0000 |
4352 | +++ lib/lp/soyuz/tests/test_doc.py 2018-05-27 20:15:09 +0000 |
4353 | @@ -56,7 +56,7 @@ |
4354 | |
4355 | def uploaderSetUp(test): |
4356 | """setup the package uploader script tests.""" |
4357 | - setUp(test) |
4358 | + setUp(test, future=True) |
4359 | switch_dbuser('uploader') |
4360 | |
4361 | |
4362 | @@ -64,7 +64,7 @@ |
4363 | test_dbuser = config.statistician.dbuser |
4364 | test.globs['test_dbuser'] = test_dbuser |
4365 | switch_dbuser(test_dbuser) |
4366 | - setUp(test) |
4367 | + setUp(test, future=True) |
4368 | |
4369 | |
4370 | def statisticianTearDown(test): |
4371 | @@ -75,7 +75,7 @@ |
4372 | lobotomize_stevea() |
4373 | test_dbuser = config.uploadqueue.dbuser |
4374 | switch_dbuser(test_dbuser) |
4375 | - setUp(test) |
4376 | + setUp(test, future=True) |
4377 | test.globs['test_dbuser'] = test_dbuser |
4378 | |
4379 | |
4380 | @@ -89,7 +89,7 @@ |
4381 | lobotomize_stevea() |
4382 | test_dbuser = config.uploader.dbuser |
4383 | switch_dbuser(test_dbuser) |
4384 | - setUp(test) |
4385 | + setUp(test, future=True) |
4386 | test.globs['test_dbuser'] = test_dbuser |
4387 | |
4388 | |
4389 | @@ -109,7 +109,7 @@ |
4390 | ), |
4391 | 'distroarchseriesbinarypackage.txt': LayeredDocFileSuite( |
4392 | '../doc/distroarchseriesbinarypackage.txt', |
4393 | - setUp=setUp, tearDown=tearDown, |
4394 | + setUp=lambda test: setUp(test, future=True), tearDown=tearDown, |
4395 | layer=LaunchpadZopelessLayer |
4396 | ), |
4397 | 'closing-bugs-from-changelogs.txt': LayeredDocFileSuite( |
4398 | @@ -127,6 +127,7 @@ |
4399 | ), |
4400 | 'soyuz-set-of-uploads.txt': LayeredDocFileSuite( |
4401 | '../doc/soyuz-set-of-uploads.txt', |
4402 | + setUp=lambda test: setUp(test, future=True), |
4403 | layer=LaunchpadZopelessLayer, |
4404 | ), |
4405 | 'package-relationship.txt': LayeredDocFileSuite( |
4406 | @@ -134,27 +135,27 @@ |
4407 | stdout_logging=False, layer=None), |
4408 | 'publishing.txt': LayeredDocFileSuite( |
4409 | '../doc/publishing.txt', |
4410 | - setUp=setUp, |
4411 | + setUp=lambda test: setUp(test, future=True), |
4412 | layer=LaunchpadZopelessLayer, |
4413 | ), |
4414 | 'build-failedtoupload-workflow.txt': LayeredDocFileSuite( |
4415 | '../doc/build-failedtoupload-workflow.txt', |
4416 | - setUp=setUp, tearDown=tearDown, |
4417 | + setUp=lambda test: setUp(test, future=True), tearDown=tearDown, |
4418 | layer=LaunchpadZopelessLayer, |
4419 | ), |
4420 | 'distroseriesqueue.txt': LayeredDocFileSuite( |
4421 | '../doc/distroseriesqueue.txt', |
4422 | - setUp=setUp, tearDown=tearDown, |
4423 | + setUp=lambda test: setUp(test, future=True), tearDown=tearDown, |
4424 | layer=LaunchpadZopelessLayer, |
4425 | ), |
4426 | 'distroseriesqueue-notify.txt': LayeredDocFileSuite( |
4427 | '../doc/distroseriesqueue-notify.txt', |
4428 | - setUp=setUp, tearDown=tearDown, |
4429 | + setUp=lambda test: setUp(test, future=True), tearDown=tearDown, |
4430 | layer=LaunchpadZopelessLayer, |
4431 | ), |
4432 | 'distroseriesqueue-translations.txt': LayeredDocFileSuite( |
4433 | '../doc/distroseriesqueue-translations.txt', |
4434 | - setUp=setUp, tearDown=tearDown, |
4435 | + setUp=lambda test: setUp(test, future=True), tearDown=tearDown, |
4436 | layer=LaunchpadZopelessLayer, |
4437 | ), |
4438 | } |
4439 | @@ -190,7 +191,8 @@ |
4440 | for filename in filenames: |
4441 | path = os.path.join('../doc', filename) |
4442 | one_test = LayeredDocFileSuite( |
4443 | - path, setUp=setUp, tearDown=tearDown, |
4444 | + path, |
4445 | + setUp=lambda test: setUp(test, future=True), tearDown=tearDown, |
4446 | layer=LaunchpadFunctionalLayer, |
4447 | stdout_logging_level=logging.WARNING) |
4448 | suite.addTest(one_test) |
Self-approving huge but mostly-mechanical change.