Merge ~cjwatson/launchpad:bs4-feeds into launchpad:master

Proposed by Colin Watson
Status: Merged
Approved by: Colin Watson
Approved revision: 3535948a45d3a6eb486436b61d7d4ff6459175e5
Merge reported by: Otto Co-Pilot
Merged at revision: not available
Proposed branch: ~cjwatson/launchpad:bs4-feeds
Merge into: launchpad:master
Diff against target: 1148 lines (+247/-223)
10 files modified
lib/lp/bugs/stories/feeds/xx-bug-atom.txt (+51/-42)
lib/lp/bugs/stories/feeds/xx-bug-html.txt (+5/-4)
lib/lp/code/stories/feeds/xx-branch-atom.txt (+29/-22)
lib/lp/code/stories/feeds/xx-revision-atom.txt (+9/-9)
lib/lp/registry/stories/announcements/xx-announcements.txt (+13/-15)
lib/lp/services/feeds/doc/feeds.txt (+1/-1)
lib/lp/services/feeds/feed.py (+2/-4)
lib/lp/services/feeds/stories/xx-links.txt (+109/-98)
lib/lp/services/feeds/stories/xx-security.txt (+19/-19)
lib/lp/services/feeds/tests/helper.py (+9/-9)
Reviewer Review Type Date Requested Status
Colin Watson (community) Approve
Review via email: mp+377977@code.launchpad.net

Commit message

Port feed tests to Beautiful Soup 4

To post a comment you must log in.
~cjwatson/launchpad:bs4-feeds updated
3535948... by Colin Watson

Port xx-announcements.txt too

It uses lp.services.feeds.tests.helper, so needs to be ported along with
the other feed tests.

By default, Beautiful Soup 4 produces output with HTML entities
converted to Unicode characters. I could have used formatter="html" or
similar to restore something closer to the old behaviour in this case,
but the new behaviour is easier to read.

Revision history for this message
Colin Watson (cjwatson) wrote :

Self-approving: this is almost entirely tests, and is mechanical enough that it's not very interesting to review.

review: Approve

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
diff --git a/lib/lp/bugs/stories/feeds/xx-bug-atom.txt b/lib/lp/bugs/stories/feeds/xx-bug-atom.txt
index 4b0faa4..840b943 100644
--- a/lib/lp/bugs/stories/feeds/xx-bug-atom.txt
+++ b/lib/lp/bugs/stories/feeds/xx-bug-atom.txt
@@ -1,10 +1,12 @@
1= Atom Feeds =1= Atom Feeds =
22
3Atom feeds produce XML not HTML. Therefore we must parse the output as XML3Atom feeds produce XML not HTML. Therefore we must parse the output as XML
4using BeautifulStoneSoup instead of BSS or the helper functions.4by asking BeautifulSoup to use lxml.
55
6 >>> from BeautifulSoup import BeautifulStoneSoup as BSS6 >>> from lp.services.beautifulsoup import (
7 >>> from BeautifulSoup import SoupStrainer7 ... BeautifulSoup4 as BeautifulSoup,
8 ... SoupStrainer4 as SoupStrainer,
9 ... )
8 >>> from lp.services.feeds.tests.helper import (10 >>> from lp.services.feeds.tests.helper import (
9 ... parse_entries, parse_links, validate_feed)11 ... parse_entries, parse_links, validate_feed)
1012
@@ -26,25 +28,26 @@ point to the bugs themselves.
26 >>> validate_feed(browser.contents,28 >>> validate_feed(browser.contents,
27 ... browser.headers['content-type'], browser.url)29 ... browser.headers['content-type'], browser.url)
28 No Errors30 No Errors
29 >>> BSS(browser.contents).title.contents31 >>> BeautifulSoup(browser.contents, 'xml').title.contents
30 [u'Bugs in Jokosher']32 [u'Bugs in Jokosher']
31 >>> browser.url33 >>> browser.url
32 'http://feeds.launchpad.test/jokosher/latest-bugs.atom'34 'http://feeds.launchpad.test/jokosher/latest-bugs.atom'
3335
34 >>> soup = BSS(browser.contents, parseOnlyThese=SoupStrainer('id'))36 >>> soup = BeautifulSoup(
37 ... browser.contents, 'xml', parse_only=SoupStrainer('id'))
35 >>> print(extract_text(soup.find('id')))38 >>> print(extract_text(soup.find('id')))
36 tag:launchpad.net,2007-03-15:/bugs/jokosher39 tag:launchpad.net,2007-03-15:/bugs/jokosher
37 >>> alternate_links = parse_links(browser.contents, 'alternate')40 >>> alternate_links = parse_links(browser.contents, 'alternate')
38 >>> for link in alternate_links:41 >>> for link in alternate_links:
39 ... print(link)42 ... print(link)
40 <link rel="alternate" href="http://bugs.launchpad.test/jokosher" />43 <link href="http://bugs.launchpad.test/jokosher" rel="alternate"/>
41 <link rel="alternate" href="http://bugs.launchpad.test/bugs/12" />44 <link href="http://bugs.launchpad.test/bugs/12" rel="alternate"/>
42 <link rel="alternate" href="http://bugs.launchpad.test/bugs/11" />45 <link href="http://bugs.launchpad.test/bugs/11" rel="alternate"/>
4346
44 >>> self_links = parse_links(browser.contents, 'self')47 >>> self_links = parse_links(browser.contents, 'self')
45 >>> for link in self_links:48 >>> for link in self_links:
46 ... print(link)49 ... print(link)
47 <link rel="self" href="http://feeds.launchpad.test/jokosher/latest-bugs.atom" />50 <link href="http://feeds.launchpad.test/jokosher/latest-bugs.atom" rel="self"/>
4851
49 >>> entries = parse_entries(browser.contents)52 >>> entries = parse_entries(browser.contents)
50 >>> print(len(entries))53 >>> print(len(entries))
@@ -83,19 +86,20 @@ as the latest bugs feed for a product.
83 >>> validate_feed(browser.contents,86 >>> validate_feed(browser.contents,
84 ... browser.headers['content-type'], browser.url)87 ... browser.headers['content-type'], browser.url)
85 No Errors88 No Errors
86 >>> BSS(browser.contents).title.contents89 >>> BeautifulSoup(browser.contents, 'xml').title.contents
87 [u'Bugs in The Mozilla Project']90 [u'Bugs in The Mozilla Project']
88 >>> browser.url91 >>> browser.url
89 'http://feeds.launchpad.test/mozilla/latest-bugs.atom'92 'http://feeds.launchpad.test/mozilla/latest-bugs.atom'
9093
91 >>> soup = BSS(browser.contents, parseOnlyThese=SoupStrainer('id'))94 >>> soup = BeautifulSoup(
95 ... browser.contents, 'xml', parse_only=SoupStrainer('id'))
92 >>> print(extract_text(soup.find('id')))96 >>> print(extract_text(soup.find('id')))
93 tag:launchpad.net,2004-09-24:/bugs/mozilla97 tag:launchpad.net,2004-09-24:/bugs/mozilla
9498
95 >>> self_links = parse_links(browser.contents, 'self')99 >>> self_links = parse_links(browser.contents, 'self')
96 >>> for link in self_links:100 >>> for link in self_links:
97 ... print(link)101 ... print(link)
98 <link rel="self" href="http://feeds.launchpad.test/mozilla/latest-bugs.atom" />102 <link href="http://feeds.launchpad.test/mozilla/latest-bugs.atom" rel="self"/>
99103
100 >>> entries = parse_entries(browser.contents)104 >>> entries = parse_entries(browser.contents)
101 >>> print(len(entries))105 >>> print(len(entries))
@@ -144,19 +148,20 @@ of content as the latest bugs feed for a product.
144 >>> validate_feed(browser.contents,148 >>> validate_feed(browser.contents,
145 ... browser.headers['content-type'], browser.url)149 ... browser.headers['content-type'], browser.url)
146 No Errors150 No Errors
147 >>> BSS(browser.contents).title.contents151 >>> BeautifulSoup(browser.contents, 'xml').title.contents
148 [u'Bugs in Ubuntu']152 [u'Bugs in Ubuntu']
149 >>> browser.url153 >>> browser.url
150 'http://feeds.launchpad.test/ubuntu/latest-bugs.atom'154 'http://feeds.launchpad.test/ubuntu/latest-bugs.atom'
151155
152 >>> soup = BSS(browser.contents, parseOnlyThese=SoupStrainer('id'))156 >>> soup = BeautifulSoup(
157 ... browser.contents, 'xml', parse_only=SoupStrainer('id'))
153 >>> print(extract_text(soup.find('id')))158 >>> print(extract_text(soup.find('id')))
154 tag:launchpad.net,2006-10-16:/bugs/ubuntu159 tag:launchpad.net,2006-10-16:/bugs/ubuntu
155160
156 >>> self_links = parse_links(browser.contents, 'self')161 >>> self_links = parse_links(browser.contents, 'self')
157 >>> for link in self_links:162 >>> for link in self_links:
158 ... print(link)163 ... print(link)
159 <link rel="self" href="http://feeds.launchpad.test/ubuntu/latest-bugs.atom" />164 <link href="http://feeds.launchpad.test/ubuntu/latest-bugs.atom" rel="self"/>
160165
161 >>> entries = parse_entries(browser.contents)166 >>> entries = parse_entries(browser.contents)
162 >>> print(len(entries))167 >>> print(len(entries))
@@ -214,11 +219,8 @@ The bug should be included in the feed.
214219
215Private teams should show as '-'.220Private teams should show as '-'.
216221
217 >>> entry_content = BSS(222 >>> soup = BeautifulSoup(entry.find('content').text, 'xml')
218 ... entry.find('content').text,223 >>> print([tr.find_all('td')[4].text for tr in soup.find_all('tr')[1:4]])
219 ... convertEntities=BSS.HTML_ENTITIES)
220 >>> soup = BSS(entry_content.text)
221 >>> print([tr.findAll('td')[4].text for tr in soup.findAll('tr')[1:4]])
222 [u'Mark Shuttleworth', u'-', u'-']224 [u'Mark Shuttleworth', u'-', u'-']
223225
224== Latest bugs for a source package ==226== Latest bugs for a source package ==
@@ -232,11 +234,12 @@ type of content as the latest bugs feed for a product.
232 >>> validate_feed(browser.contents,234 >>> validate_feed(browser.contents,
233 ... browser.headers['content-type'], browser.url)235 ... browser.headers['content-type'], browser.url)
234 No Errors236 No Errors
235 >>> BSS(browser.contents).title.contents237 >>> BeautifulSoup(browser.contents, 'xml').title.contents
236 [u'Bugs in thunderbird in Ubuntu']238 [u'Bugs in thunderbird in Ubuntu']
237 >>> browser.url239 >>> browser.url
238 'http://feeds.launchpad.test/ubuntu/+source/thunderbird/latest-bugs.atom'240 'http://feeds.launchpad.test/ubuntu/+source/thunderbird/latest-bugs.atom'
239 >>> soup = BSS(browser.contents, parseOnlyThese=SoupStrainer('id'))241 >>> soup = BeautifulSoup(
242 ... browser.contents, 'xml', parse_only=SoupStrainer('id'))
240 >>> print(extract_text(soup.find('id')))243 >>> print(extract_text(soup.find('id')))
241 tag:launchpad.net,2008:/bugs/ubuntu/+source/thunderbird244 tag:launchpad.net,2008:/bugs/ubuntu/+source/thunderbird
242 >>> entries = parse_entries(browser.contents)245 >>> entries = parse_entries(browser.contents)
@@ -264,19 +267,20 @@ type of content as the latest bugs feed for a product.
264 >>> validate_feed(browser.contents,267 >>> validate_feed(browser.contents,
265 ... browser.headers['content-type'], browser.url)268 ... browser.headers['content-type'], browser.url)
266 No Errors269 No Errors
267 >>> BSS(browser.contents).title.contents270 >>> BeautifulSoup(browser.contents, 'xml').title.contents
268 [u'Bugs in Hoary']271 [u'Bugs in Hoary']
269 >>> browser.url272 >>> browser.url
270 'http://feeds.launchpad.test/ubuntu/hoary/latest-bugs.atom'273 'http://feeds.launchpad.test/ubuntu/hoary/latest-bugs.atom'
271274
272 >>> soup = BSS(browser.contents, parseOnlyThese=SoupStrainer('id'))275 >>> soup = BeautifulSoup(
276 ... browser.contents, 'xml', parse_only=SoupStrainer('id'))
273 >>> print(extract_text(soup.find('id')))277 >>> print(extract_text(soup.find('id')))
274 tag:launchpad.net,2006-10-16:/bugs/ubuntu/hoary278 tag:launchpad.net,2006-10-16:/bugs/ubuntu/hoary
275279
276 >>> self_links = parse_links(browser.contents, 'self')280 >>> self_links = parse_links(browser.contents, 'self')
277 >>> for link in self_links:281 >>> for link in self_links:
278 ... print(link)282 ... print(link)
279 <link rel="self" href="http://feeds.launchpad.test/ubuntu/hoary/latest-bugs.atom" />283 <link href="http://feeds.launchpad.test/ubuntu/hoary/latest-bugs.atom" rel="self"/>
280284
281 >>> entries = parse_entries(browser.contents)285 >>> entries = parse_entries(browser.contents)
282 >>> print(len(entries))286 >>> print(len(entries))
@@ -304,19 +308,20 @@ type of content as the latest bugs feed for a product.
304 >>> validate_feed(browser.contents,308 >>> validate_feed(browser.contents,
305 ... browser.headers['content-type'], browser.url)309 ... browser.headers['content-type'], browser.url)
306 No Errors310 No Errors
307 >>> BSS(browser.contents).title.contents311 >>> BeautifulSoup(browser.contents, 'xml').title.contents
308 [u'Bugs in 1.0']312 [u'Bugs in 1.0']
309 >>> browser.url313 >>> browser.url
310 'http://feeds.launchpad.test/firefox/1.0/latest-bugs.atom'314 'http://feeds.launchpad.test/firefox/1.0/latest-bugs.atom'
311315
312 >>> soup = BSS(browser.contents, parseOnlyThese=SoupStrainer('id'))316 >>> soup = BeautifulSoup(
317 ... browser.contents, 'xml', parse_only=SoupStrainer('id'))
313 >>> print(extract_text(soup.find('id')))318 >>> print(extract_text(soup.find('id')))
314 tag:launchpad.net,2005-06-06:/bugs/firefox/1.0319 tag:launchpad.net,2005-06-06:/bugs/firefox/1.0
315320
316 >>> self_links = parse_links(browser.contents, 'self')321 >>> self_links = parse_links(browser.contents, 'self')
317 >>> for link in self_links:322 >>> for link in self_links:
318 ... print(link)323 ... print(link)
319 <link rel="self" href="http://feeds.launchpad.test/firefox/1.0/latest-bugs.atom" />324 <link href="http://feeds.launchpad.test/firefox/1.0/latest-bugs.atom" rel="self"/>
320325
321 >>> entries = parse_entries(browser.contents)326 >>> entries = parse_entries(browser.contents)
322 >>> print(len(entries))327 >>> print(len(entries))
@@ -342,19 +347,20 @@ This feed gets the latest bugs for a person.
342 >>> validate_feed(browser.contents,347 >>> validate_feed(browser.contents,
343 ... browser.headers['content-type'], browser.url)348 ... browser.headers['content-type'], browser.url)
344 No Errors349 No Errors
345 >>> BSS(browser.contents).title.contents350 >>> BeautifulSoup(browser.contents, 'xml').title.contents
346 [u'Bugs for Foo Bar']351 [u'Bugs for Foo Bar']
347 >>> browser.url352 >>> browser.url
348 'http://feeds.launchpad.test/~name16/latest-bugs.atom'353 'http://feeds.launchpad.test/~name16/latest-bugs.atom'
349354
350 >>> soup = BSS(browser.contents, parseOnlyThese=SoupStrainer('id'))355 >>> soup = BeautifulSoup(
356 ... browser.contents, 'xml', parse_only=SoupStrainer('id'))
351 >>> print(extract_text(soup.find('id')))357 >>> print(extract_text(soup.find('id')))
352 tag:launchpad.net,2005-06-06:/bugs/~name16358 tag:launchpad.net,2005-06-06:/bugs/~name16
353359
354 >>> self_links = parse_links(browser.contents, 'self')360 >>> self_links = parse_links(browser.contents, 'self')
355 >>> for link in self_links:361 >>> for link in self_links:
356 ... print(link)362 ... print(link)
357 <link rel="self" href="http://feeds.launchpad.test/~name16/latest-bugs.atom" />363 <link href="http://feeds.launchpad.test/~name16/latest-bugs.atom" rel="self"/>
358364
359 >>> entries = parse_entries(browser.contents)365 >>> entries = parse_entries(browser.contents)
360 >>> print(len(entries))366 >>> print(len(entries))
@@ -417,17 +423,18 @@ some results.
417 >>> validate_feed(browser.contents,423 >>> validate_feed(browser.contents,
418 ... browser.headers['content-type'], browser.url)424 ... browser.headers['content-type'], browser.url)
419 No Errors425 No Errors
420 >>> BSS(browser.contents).title.contents426 >>> BeautifulSoup(browser.contents, 'xml').title.contents
421 [u'Bugs for Simple Team']427 [u'Bugs for Simple Team']
422428
423 >>> soup = BSS(browser.contents, parseOnlyThese=SoupStrainer('id'))429 >>> soup = BeautifulSoup(
430 ... browser.contents, 'xml', parse_only=SoupStrainer('id'))
424 >>> print(extract_text(soup.find('id')))431 >>> print(extract_text(soup.find('id')))
425 tag:launchpad.net,2007-02-21:/bugs/~simple-team432 tag:launchpad.net,2007-02-21:/bugs/~simple-team
426433
427 >>> self_links = parse_links(browser.contents, 'self')434 >>> self_links = parse_links(browser.contents, 'self')
428 >>> for link in self_links:435 >>> for link in self_links:
429 ... print(link)436 ... print(link)
430 <link rel="self" href="http://feeds.launchpad.test/~simple-team/latest-bugs.atom" />437 <link href="http://feeds.launchpad.test/~simple-team/latest-bugs.atom" rel="self"/>
431438
432 >>> entries = parse_entries(browser.contents)439 >>> entries = parse_entries(browser.contents)
433 >>> print(len(entries))440 >>> print(len(entries))
@@ -445,19 +452,20 @@ This feed gets the latest bugs reported against any target.
445 >>> validate_feed(browser.contents,452 >>> validate_feed(browser.contents,
446 ... browser.headers['content-type'], browser.url)453 ... browser.headers['content-type'], browser.url)
447 No Errors454 No Errors
448 >>> BSS(browser.contents).title.contents455 >>> BeautifulSoup(browser.contents, 'xml').title.contents
449 [u'Launchpad bugs']456 [u'Launchpad bugs']
450 >>> browser.url457 >>> browser.url
451 'http://feeds.launchpad.test/bugs/latest-bugs.atom'458 'http://feeds.launchpad.test/bugs/latest-bugs.atom'
452459
453 >>> soup = BSS(browser.contents, parseOnlyThese=SoupStrainer('id'))460 >>> soup = BeautifulSoup(
461 ... browser.contents, 'xml', parse_only=SoupStrainer('id'))
454 >>> print(extract_text(soup.find('id')))462 >>> print(extract_text(soup.find('id')))
455 tag:launchpad.net,2008:/bugs463 tag:launchpad.net,2008:/bugs
456464
457 >>> self_links = parse_links(browser.contents, 'self')465 >>> self_links = parse_links(browser.contents, 'self')
458 >>> for link in self_links:466 >>> for link in self_links:
459 ... print(link)467 ... print(link)
460 <link rel="self" href="http://feeds.launchpad.test/bugs/latest-bugs.atom" />468 <link href="http://feeds.launchpad.test/bugs/latest-bugs.atom" rel="self"/>
461469
462 >>> entries = parse_entries(browser.contents)470 >>> entries = parse_entries(browser.contents)
463 >>> print(len(entries))471 >>> print(len(entries))
@@ -508,10 +516,11 @@ The bug search feed can be tested after setting is_bug_search_feed_active
508to True.516to True.
509517
510 >>> browser.open(url)518 >>> browser.open(url)
511 >>> BSS(browser.contents).title.contents519 >>> BeautifulSoup(browser.contents, 'xml').title.contents
512 [u'Bugs from custom search']520 [u'Bugs from custom search']
513521
514 >>> soup = BSS(browser.contents, parseOnlyThese=SoupStrainer('id'))522 >>> soup = BeautifulSoup(
523 ... browser.contents, 'xml', parse_only=SoupStrainer('id'))
515 >>> feed_id = extract_text(soup.find('id'))524 >>> feed_id = extract_text(soup.find('id'))
516 >>> print(feed_id)525 >>> print(feed_id)
517 tag:launchpad.net,2008:/+bugs.atom?field.scope.target=&amp;field.scope=all&amp;field.searchtext=&amp;search=Search+Bug+Reports526 tag:launchpad.net,2008:/+bugs.atom?field.scope.target=&amp;field.scope=all&amp;field.searchtext=&amp;search=Search+Bug+Reports
@@ -523,7 +532,7 @@ to True.
523 >>> self_links = parse_links(browser.contents, 'self')532 >>> self_links = parse_links(browser.contents, 'self')
524 >>> for link in self_links:533 >>> for link in self_links:
525 ... print(link)534 ... print(link)
526 <link rel="self" href="http://feeds.launchpad.test/bugs/+bugs.atom?field.scope.target=&amp;field.scope=all&amp;field.searchtext=&amp;search=Search+Bug+Reports" />535 <link href="http://feeds.launchpad.test/bugs/+bugs.atom?field.scope.target=&amp;field.scope=all&amp;field.searchtext=&amp;search=Search+Bug+Reports" rel="self"/>
527536
528 >>> entries = parse_entries(browser.contents)537 >>> entries = parse_entries(browser.contents)
529 >>> print(len(entries))538 >>> print(len(entries))
@@ -554,7 +563,7 @@ This feed shows the status of a single bug.
554 >>> validate_feed(browser.contents,563 >>> validate_feed(browser.contents,
555 ... browser.headers['content-type'], browser.url)564 ... browser.headers['content-type'], browser.url)
556 No Errors565 No Errors
557 >>> BSS(browser.contents).title.contents566 >>> BeautifulSoup(browser.contents, 'xml').title.contents
558 [u'Bug 1']567 [u'Bug 1']
559 >>> entries = parse_entries(browser.contents)568 >>> entries = parse_entries(browser.contents)
560 >>> print(len(entries))569 >>> print(len(entries))
@@ -565,7 +574,7 @@ This feed shows the status of a single bug.
565 >>> self_links = parse_links(browser.contents, 'self')574 >>> self_links = parse_links(browser.contents, 'self')
566 >>> for link in self_links:575 >>> for link in self_links:
567 ... print(link)576 ... print(link)
568 <link rel="self" href="http://feeds.launchpad.test/bugs/1/bug.atom" />577 <link href="http://feeds.launchpad.test/bugs/1/bug.atom" rel="self"/>
569578
570== Feeds Configuration Options ==579== Feeds Configuration Options ==
571580
diff --git a/lib/lp/bugs/stories/feeds/xx-bug-html.txt b/lib/lp/bugs/stories/feeds/xx-bug-html.txt
index fff1f4f..fa02653 100644
--- a/lib/lp/bugs/stories/feeds/xx-bug-html.txt
+++ b/lib/lp/bugs/stories/feeds/xx-bug-html.txt
@@ -5,15 +5,16 @@ The content of an HTML feed is very similar to an Atom feed, but is formatted
5as HTML instead of Atom.5as HTML instead of Atom.
66
7 >>> from lp.services.beautifulsoup import (7 >>> from lp.services.beautifulsoup import (
8 ... BeautifulSoup,8 ... BeautifulSoup4 as BeautifulSoup,
9 ... SoupStrainer,9 ... SoupStrainer4 as SoupStrainer,
10 ... )10 ... )
1111
12Define a helper function for parsing the entries:12Define a helper function for parsing the entries:
1313
14 >>> def parse_entries(contents):14 >>> def parse_entries(contents):
15 ... entries = [tag for tag in BeautifulSoup(browser.contents,15 ... entries = [
16 ... parseOnlyThese=SoupStrainer('tr'))]16 ... tag for tag in BeautifulSoup(
17 ... browser.contents, parse_only=SoupStrainer('tr'))]
17 ... return entries18 ... return entries
1819
19And two for printing the results:20And two for printing the results:
diff --git a/lib/lp/code/stories/feeds/xx-branch-atom.txt b/lib/lp/code/stories/feeds/xx-branch-atom.txt
index db90f8f..74db8e1 100644
--- a/lib/lp/code/stories/feeds/xx-branch-atom.txt
+++ b/lib/lp/code/stories/feeds/xx-branch-atom.txt
@@ -1,10 +1,12 @@
1= Atom Feeds For Branches =1= Atom Feeds For Branches =
22
3Atom feeds produce XML not HTML. Therefore we must parse the output as XML3Atom feeds produce XML not HTML. Therefore we must parse the output as XML
4using BeautifulStoneSoup instead of BeautifulSoup or the helper functions.4by asking BeautifulSoup to use lxml.
55
6 >>> from BeautifulSoup import BeautifulStoneSoup as BSS6 >>> from lp.services.beautifulsoup import (
7 >>> from BeautifulSoup import SoupStrainer7 ... BeautifulSoup4 as BeautifulSoup,
8 ... SoupStrainer4 as SoupStrainer,
9 ... )
8 >>> from lp.services.feeds.tests.helper import (10 >>> from lp.services.feeds.tests.helper import (
9 ... parse_ids, parse_links, validate_feed)11 ... parse_ids, parse_links, validate_feed)
1012
@@ -49,7 +51,7 @@ which will include an entry for each branch.
49 ... browser.contents, browser.headers['content-type'], browser.url)51 ... browser.contents, browser.headers['content-type'], browser.url)
50 >>> validate_browser_feed(anon_browser)52 >>> validate_browser_feed(anon_browser)
51 No Errors53 No Errors
52 >>> BSS(anon_browser.contents).title.contents54 >>> BeautifulSoup(anon_browser.contents, 'xml').title.contents
53 [u'Branches for Mike Murphy']55 [u'Branches for Mike Murphy']
54 >>> def print_parse_ids(browser):56 >>> def print_parse_ids(browser):
55 ... for id in parse_ids(browser.contents):57 ... for id in parse_ids(browser.contents):
@@ -71,14 +73,15 @@ Ensure the self link is correct and there is only one.
71 ... for link in parse_links(browser.contents, rel="self"):73 ... for link in parse_links(browser.contents, rel="self"):
72 ... print(link)74 ... print(link)
73 >>> print_parse_links(anon_browser)75 >>> print_parse_links(anon_browser)
74 <link rel="self" href="http://feeds.launchpad.test/~mike/branches.atom" />76 <link href="http://feeds.launchpad.test/~mike/branches.atom" rel="self"/>
7577
76The <update> field for the feed will be the most recent value for the78The <update> field for the feed will be the most recent value for the
77updated field in all of the entries.79updated field in all of the entries.
7880
79 >>> strainer = SoupStrainer('updated')81 >>> strainer = SoupStrainer('updated')
80 >>> updated_dates = [extract_text(tag) for tag in BSS(anon_browser.contents,82 >>> updated_dates = [
81 ... parseOnlyThese=strainer)]83 ... extract_text(tag) for tag in BeautifulSoup(
84 ... anon_browser.contents, 'xml', parse_only=strainer)]
82 >>> feed_updated = updated_dates[0]85 >>> feed_updated = updated_dates[0]
83 >>> entry_dates = sorted(updated_dates[1:], reverse=True)86 >>> entry_dates = sorted(updated_dates[1:], reverse=True)
84 >>> assert feed_updated == entry_dates[0], (87 >>> assert feed_updated == entry_dates[0], (
@@ -90,7 +93,7 @@ still be hidden:
90 >>> anon_browser.open('http://feeds.launchpad.test/~name12/branches.atom')93 >>> anon_browser.open('http://feeds.launchpad.test/~name12/branches.atom')
91 >>> validate_browser_feed(anon_browser)94 >>> validate_browser_feed(anon_browser)
92 No Errors95 No Errors
93 >>> BSS(anon_browser.contents).title.contents96 >>> BeautifulSoup(anon_browser.contents, 'xml').title.contents
94 [u'Branches for Sample Person']97 [u'Branches for Sample Person']
95 >>> 'foo@localhost' in anon_browser.contents98 >>> 'foo@localhost' in anon_browser.contents
96 False99 False
@@ -125,7 +128,7 @@ branches listed, just an id for the feed.
125 >>> browser.open('http://feeds.launchpad.test/~landscape-developers/branches.atom')128 >>> browser.open('http://feeds.launchpad.test/~landscape-developers/branches.atom')
126 >>> validate_browser_feed(browser)129 >>> validate_browser_feed(browser)
127 No Errors130 No Errors
128 >>> BSS(browser.contents).title.contents131 >>> BeautifulSoup(browser.contents, 'xml').title.contents
129 [u'Branches for Landscape Developers']132 [u'Branches for Landscape Developers']
130 >>> print_parse_ids(browser)133 >>> print_parse_ids(browser)
131 <id>tag:launchpad.net,2006-07-11:/code/~landscape-developers</id>134 <id>tag:launchpad.net,2006-07-11:/code/~landscape-developers</id>
@@ -139,7 +142,7 @@ which will include an entry for each branch.
139 >>> anon_browser.open('http://feeds.launchpad.test/fooix/branches.atom')142 >>> anon_browser.open('http://feeds.launchpad.test/fooix/branches.atom')
140 >>> validate_browser_feed(anon_browser)143 >>> validate_browser_feed(anon_browser)
141 No Errors144 No Errors
142 >>> BSS(anon_browser.contents).title.contents145 >>> BeautifulSoup(anon_browser.contents, 'xml').title.contents
143 [u'Branches for Fooix']146 [u'Branches for Fooix']
144 >>> print_parse_ids(anon_browser)147 >>> print_parse_ids(anon_browser)
145 <id>tag:launchpad.net,...:/code/fooix</id>148 <id>tag:launchpad.net,...:/code/fooix</id>
@@ -148,14 +151,15 @@ which will include an entry for each branch.
148 <id>tag:launchpad.net,2007-12-01:/code/~mike/fooix/first</id>151 <id>tag:launchpad.net,2007-12-01:/code/~mike/fooix/first</id>
149152
150 >>> print_parse_links(anon_browser)153 >>> print_parse_links(anon_browser)
151 <link rel="self" href="http://feeds.launchpad.test/fooix/branches.atom" />154 <link href="http://feeds.launchpad.test/fooix/branches.atom" rel="self"/>
152155
153The <update> field for the feed will be the most recent value for the156The <update> field for the feed will be the most recent value for the
154updated field in all of the entries.157updated field in all of the entries.
155158
156 >>> strainer = SoupStrainer('updated')159 >>> strainer = SoupStrainer('updated')
157 >>> updated_dates = [extract_text(tag) for tag in BSS(anon_browser.contents,160 >>> updated_dates = [
158 ... parseOnlyThese=strainer)]161 ... extract_text(tag) for tag in BeautifulSoup(
162 ... anon_browser.contents, 'xml', parse_only=strainer)]
159 >>> feed_updated = updated_dates[0]163 >>> feed_updated = updated_dates[0]
160 >>> entry_dates = sorted(updated_dates[1:], reverse=True)164 >>> entry_dates = sorted(updated_dates[1:], reverse=True)
161 >>> assert feed_updated == entry_dates[0], (165 >>> assert feed_updated == entry_dates[0], (
@@ -170,7 +174,7 @@ branches which will include an entry for each branch.
170 >>> anon_browser.open('http://feeds.launchpad.test/oh-man/branches.atom')174 >>> anon_browser.open('http://feeds.launchpad.test/oh-man/branches.atom')
171 >>> validate_browser_feed(anon_browser)175 >>> validate_browser_feed(anon_browser)
172 No Errors176 No Errors
173 >>> BSS(anon_browser.contents).title.contents177 >>> BeautifulSoup(anon_browser.contents, 'xml').title.contents
174 [u'Branches for Oh Man']178 [u'Branches for Oh Man']
175 >>> print_parse_ids(anon_browser)179 >>> print_parse_ids(anon_browser)
176 <id>tag:launchpad.net,...:/code/oh-man</id>180 <id>tag:launchpad.net,...:/code/oh-man</id>
@@ -182,14 +186,15 @@ branches which will include an entry for each branch.
182 <id>tag:launchpad.net,2007-12-01:/code/~mike/fooix/first</id>186 <id>tag:launchpad.net,2007-12-01:/code/~mike/fooix/first</id>
183187
184 >>> print_parse_links(anon_browser)188 >>> print_parse_links(anon_browser)
185 <link rel="self" href="http://feeds.launchpad.test/oh-man/branches.atom" />189 <link href="http://feeds.launchpad.test/oh-man/branches.atom" rel="self"/>
186190
187The <update> field for the feed will be the most recent value for the191The <update> field for the feed will be the most recent value for the
188updated field in all of the entries.192updated field in all of the entries.
189193
190 >>> strainer = SoupStrainer('updated')194 >>> strainer = SoupStrainer('updated')
191 >>> updated_dates = [extract_text(tag) for tag in BSS(anon_browser.contents,195 >>> updated_dates = [
192 ... parseOnlyThese=strainer)]196 ... extract_text(tag) for tag in BeautifulSoup(
197 ... anon_browser.contents, 'xml', parse_only=strainer)]
193 >>> feed_updated = updated_dates[0]198 >>> feed_updated = updated_dates[0]
194 >>> entry_dates = sorted(updated_dates[1:], reverse=True)199 >>> entry_dates = sorted(updated_dates[1:], reverse=True)
195 >>> assert feed_updated == entry_dates[0], (200 >>> assert feed_updated == entry_dates[0], (
@@ -206,7 +211,7 @@ different entry.
206 >>> validate_feed(browser.contents,211 >>> validate_feed(browser.contents,
207 ... browser.headers['content-type'], browser.url)212 ... browser.headers['content-type'], browser.url)
208 No Errors213 No Errors
209 >>> BSS(browser.contents).title.contents214 >>> BeautifulSoup(browser.contents, 'xml').title.contents
210 [u'Latest Revisions for Branch lp://dev/~mark/firefox/release--0.9.1']215 [u'Latest Revisions for Branch lp://dev/~mark/firefox/release--0.9.1']
211 >>> print(browser.url)216 >>> print(browser.url)
212 http://feeds.launchpad.test/~mark/firefox/release--0.9.1/branch.atom217 http://feeds.launchpad.test/~mark/firefox/release--0.9.1/branch.atom
@@ -214,17 +219,19 @@ different entry.
214The first <id> in a feed identifies the feed. Each entry then has its219The first <id> in a feed identifies the feed. Each entry then has its
215own <id>, which in the case of a single branch feed will be identical.220own <id>, which in the case of a single branch feed will be identical.
216221
217 >>> soup = BSS(browser.contents, parseOnlyThese=SoupStrainer('id'))222 >>> soup = BeautifulSoup(
223 ... browser.contents, 'xml', parse_only=SoupStrainer('id'))
218 >>> ids = parse_ids(browser.contents)224 >>> ids = parse_ids(browser.contents)
219 >>> for id_ in ids:225 >>> for id_ in ids:
220 ... print(id_)226 ... print(id_)
221 <id>tag:launchpad.net,2006-10-16:/code/~mark/firefox/release--0.9.1</id>227 <id>tag:launchpad.net,2006-10-16:/code/~mark/firefox/release--0.9.1</id>
222 <id>tag:launchpad.net,2005-03-09:/code/~mark/firefox/release--0.9.1/revision/1</id>228 <id>tag:launchpad.net,2005-03-09:/code/~mark/firefox/release--0.9.1/revision/1</id>
223 >>> print_parse_links(browser)229 >>> print_parse_links(browser)
224 <link rel="self" href="http://feeds.launchpad.test/~mark/firefox/release--0.9.1/branch.atom" />230 <link href="http://feeds.launchpad.test/~mark/firefox/release--0.9.1/branch.atom" rel="self"/>
225 >>> strainer = SoupStrainer('updated')231 >>> strainer = SoupStrainer('updated')
226 >>> updated_dates = [extract_text(tag) for tag in BSS(browser.contents,232 >>> updated_dates = [
227 ... parseOnlyThese=strainer)]233 ... extract_text(tag) for tag in BeautifulSoup(
234 ... browser.contents, 'xml', parse_only=strainer)]
228235
229The update date for the entire feed (updated_dates[0]) must be equal236The update date for the entire feed (updated_dates[0]) must be equal
230to the update_date of the first entry in the feed (updated_dates[1]).237to the update_date of the first entry in the feed (updated_dates[1]).
diff --git a/lib/lp/code/stories/feeds/xx-revision-atom.txt b/lib/lp/code/stories/feeds/xx-revision-atom.txt
index 0ef5eae..3638b09 100644
--- a/lib/lp/code/stories/feeds/xx-revision-atom.txt
+++ b/lib/lp/code/stories/feeds/xx-revision-atom.txt
@@ -1,9 +1,9 @@
1= Atom Feeds For Revisions =1= Atom Feeds For Revisions =
22
3Atom feeds produce XML not HTML. Therefore we must parse the output as XML3Atom feeds produce XML not HTML. Therefore we must parse the output as XML
4using BeautifulStoneSoup instead of BeautifulSoup or the helper functions.4by asking BeautifulSoup to use lxml.
55
6 >>> from BeautifulSoup import BeautifulStoneSoup as BSS6 >>> from lp.services.beautifulsoup import BeautifulSoup4 as BeautifulSoup
7 >>> from lp.services.feeds.tests.helper import (7 >>> from lp.services.feeds.tests.helper import (
8 ... parse_ids, parse_links, validate_feed)8 ... parse_ids, parse_links, validate_feed)
99
@@ -75,7 +75,7 @@ that have been committed by that person (or attributed to that person).
75 ... browser.contents, browser.headers['content-type'], browser.url)75 ... browser.contents, browser.headers['content-type'], browser.url)
76 >>> validate_browser_feed(anon_browser)76 >>> validate_browser_feed(anon_browser)
77 No Errors77 No Errors
78 >>> BSS(anon_browser.contents).title.contents78 >>> BeautifulSoup(anon_browser.contents, 'xml').title.contents
79 [u'Latest Revisions by Mike Murphy']79 [u'Latest Revisions by Mike Murphy']
80 >>> def print_parse_ids(browser):80 >>> def print_parse_ids(browser):
81 ... for id in parse_ids(browser.contents):81 ... for id in parse_ids(browser.contents):
@@ -96,7 +96,7 @@ Ensure the self link is correct and there is only one.
96 ... for link in parse_links(browser.contents, rel="self"):96 ... for link in parse_links(browser.contents, rel="self"):
97 ... print(link)97 ... print(link)
98 >>> print_parse_links(anon_browser)98 >>> print_parse_links(anon_browser)
99 <link rel="self" href="http://feeds.launchpad.test/~mike/revisions.atom" />99 <link href="http://feeds.launchpad.test/~mike/revisions.atom" rel="self"/>
100100
101If we look at the feed for a team, we get revisions created by any member101If we look at the feed for a team, we get revisions created by any member
102of that team.102of that team.
@@ -104,7 +104,7 @@ of that team.
104 >>> browser.open('http://feeds.launchpad.test/~m-team/revisions.atom')104 >>> browser.open('http://feeds.launchpad.test/~m-team/revisions.atom')
105 >>> validate_browser_feed(browser)105 >>> validate_browser_feed(browser)
106 No Errors106 No Errors
107 >>> BSS(browser.contents).title.contents107 >>> BeautifulSoup(browser.contents, 'xml').title.contents
108 [u'Latest Revisions by members of The M Team']108 [u'Latest Revisions by members of The M Team']
109 >>> print_parse_ids(browser)109 >>> print_parse_ids(browser)
110 <id>tag:launchpad.net,...:/code/~m-team</id>110 <id>tag:launchpad.net,...:/code/~m-team</id>
@@ -122,7 +122,7 @@ that have been committed on branches for the product.
122 >>> anon_browser.open('http://feeds.launchpad.test/fooix/revisions.atom')122 >>> anon_browser.open('http://feeds.launchpad.test/fooix/revisions.atom')
123 >>> validate_browser_feed(anon_browser)123 >>> validate_browser_feed(anon_browser)
124 No Errors124 No Errors
125 >>> BSS(anon_browser.contents).title.contents125 >>> BeautifulSoup(anon_browser.contents, 'xml').title.contents
126 [u'Latest Revisions for Fooix']126 [u'Latest Revisions for Fooix']
127127
128Ignore the date associated with the id of 'fooix' as this is the date created128Ignore the date associated with the id of 'fooix' as this is the date created
@@ -136,7 +136,7 @@ for the product, which will be different each time the test is run.
136Ensure the self link points to the feed location and there is only one.136Ensure the self link points to the feed location and there is only one.
137137
138 >>> print_parse_links(anon_browser)138 >>> print_parse_links(anon_browser)
139 <link rel="self" href="http://feeds.launchpad.test/fooix/revisions.atom" />139 <link href="http://feeds.launchpad.test/fooix/revisions.atom" rel="self"/>
140140
141141
142== Feed for a project group's revisions ==142== Feed for a project group's revisions ==
@@ -147,7 +147,7 @@ branch for any product that is associated with the project group.
147 >>> anon_browser.open('http://feeds.launchpad.test/fubar/revisions.atom')147 >>> anon_browser.open('http://feeds.launchpad.test/fubar/revisions.atom')
148 >>> validate_browser_feed(anon_browser)148 >>> validate_browser_feed(anon_browser)
149 No Errors149 No Errors
150 >>> BSS(anon_browser.contents).title.contents150 >>> BeautifulSoup(anon_browser.contents, 'xml').title.contents
151 [u'Latest Revisions for Fubar']151 [u'Latest Revisions for Fubar']
152152
153Ignore the date associated with the id of 'fubar' as this is the date created153Ignore the date associated with the id of 'fubar' as this is the date created
@@ -163,4 +163,4 @@ of the project group, which will be different each time the test is run.
163Ensure the self link points to the feed location and there is only one.163Ensure the self link points to the feed location and there is only one.
164164
165 >>> print_parse_links(anon_browser)165 >>> print_parse_links(anon_browser)
166 <link rel="self" href="http://feeds.launchpad.test/fubar/revisions.atom" />166 <link href="http://feeds.launchpad.test/fubar/revisions.atom" rel="self"/>
diff --git a/lib/lp/registry/stories/announcements/xx-announcements.txt b/lib/lp/registry/stories/announcements/xx-announcements.txt
index f8e1045..addfc4a 100644
--- a/lib/lp/registry/stories/announcements/xx-announcements.txt
+++ b/lib/lp/registry/stories/announcements/xx-announcements.txt
@@ -7,8 +7,8 @@ dedicated batched page showing all announcements, and as an RSS/Atom
7news feed.7news feed.
88
9 >>> from lp.services.beautifulsoup import (9 >>> from lp.services.beautifulsoup import (
10 ... BeautifulSoup,10 ... BeautifulSoup4 as BeautifulSoup,
11 ... SoupStrainer,11 ... SoupStrainer4 as SoupStrainer,
12 ... )12 ... )
13 >>> from lp.services.feeds.tests.helper import (13 >>> from lp.services.feeds.tests.helper import (
14 ... parse_ids, parse_links, validate_feed)14 ... parse_ids, parse_links, validate_feed)
@@ -643,7 +643,7 @@ domain.
643 >>> links = parse_links(nopriv_browser.contents, rel='self')643 >>> links = parse_links(nopriv_browser.contents, rel='self')
644 >>> for link in links:644 >>> for link in links:
645 ... print link645 ... print link
646 <link rel="self" href="http://feeds.launchpad.test/netapplet/announcements.atom" />646 <link href="http://feeds.launchpad.test/netapplet/announcements.atom" rel="self"/>
647647
648 >>> for id_ in parse_ids(nopriv_browser.contents):648 >>> for id_ in parse_ids(nopriv_browser.contents):
649 ... print extract_text(id_)649 ... print extract_text(id_)
@@ -716,7 +716,7 @@ products.
716 >>> links = parse_links(nopriv_browser.contents, rel='self')716 >>> links = parse_links(nopriv_browser.contents, rel='self')
717 >>> for link in links:717 >>> for link in links:
718 ... print link718 ... print link
719 <link rel="self" href="http://feeds.launchpad.test/apache/announcements.atom" />719 <link href="http://feeds.launchpad.test/apache/announcements.atom" rel="self"/>
720720
721Finally, there is a feed for all announcements across all projects721Finally, there is a feed for all announcements across all projects
722hosted in Launchpad:722hosted in Launchpad:
@@ -755,18 +755,16 @@ let us use a DTD to define the html entities that standard xml is missing.
755 No Errors755 No Errors
756 >>> soup = BeautifulSoup(nopriv_browser.contents)756 >>> soup = BeautifulSoup(nopriv_browser.contents)
757 >>> soup.find('feed').entry.title757 >>> soup.find('feed').entry.title
758 <...>Ampersand=&quot;&amp;&quot; LessThan=&quot;&lt;&quot;758 <...>Ampersand="&amp;" LessThan="&lt;" GreaterThan="&gt;"</title>
759 GreaterThan=&quot;&gt;&quot;</title>759 >>> print(soup.find('feed').entry.content)
760 >>> soup.find('feed').entry.content
761 <...760 <...
762 Ampersand=&amp;quot;&amp;amp;&amp;quot;&lt;br /&gt;761 Ampersand="&amp;amp;"&lt;br/&gt;
763 LessThan=&amp;quot;&amp;lt;&amp;quot;&lt;br /&gt;762 LessThan="&amp;lt;"&lt;br/&gt;
764 GreaterThan=&amp;quot;&amp;gt;&amp;quot;&lt;br /&gt;763 GreaterThan="&amp;gt;"&lt;br/&gt;
765 Newline=&amp;quot;&lt;br /&gt;764 Newline="&lt;br/&gt;
766 &amp;quot;&lt;br /&gt;765 "&lt;br/&gt;
767 url=&amp;quot;&lt;a rel=&quot;nofollow&quot;766 url="&lt;a href="http://www.ubuntu.com"
768 href=&quot;http://www.ubuntu.com&quot;&gt;http://&lt;wbr767 rel="nofollow"&gt;http://&lt;wbr/&gt;www.ubuntu.&lt;wbr/&gt;com&lt;/a&gt;"...
769 /&gt;www.ubuntu.&lt;wbr /&gt;com&lt;/a&gt;&amp;quot;...
770768
771769
772Deletion770Deletion
diff --git a/lib/lp/services/feeds/doc/feeds.txt b/lib/lp/services/feeds/doc/feeds.txt
index 35086d2..02315cb 100644
--- a/lib/lp/services/feeds/doc/feeds.txt
+++ b/lib/lp/services/feeds/doc/feeds.txt
@@ -157,7 +157,7 @@ we are testing xhtml encoding here in case we need it in the future.
157 >>> xhtml = FeedTypedData("<b> and &nbsp; and &amp;</b><hr/>",157 >>> xhtml = FeedTypedData("<b> and &nbsp; and &amp;</b><hr/>",
158 ... content_type="xhtml")158 ... content_type="xhtml")
159 >>> xhtml.content159 >>> xhtml.content
160 u'<b> and \xa0 and &amp;</b><hr />'160 u'<b> and \xa0 and &amp;</b><hr/>'
161161
162162
163== validate_feed() helper function ==163== validate_feed() helper function ==
diff --git a/lib/lp/services/feeds/feed.py b/lib/lp/services/feeds/feed.py
index 9462062..143121c 100644
--- a/lib/lp/services/feeds/feed.py
+++ b/lib/lp/services/feeds/feed.py
@@ -27,7 +27,7 @@ from zope.component import getUtility
27from zope.datetime import rfc1123_date27from zope.datetime import rfc1123_date
28from zope.interface import implementer28from zope.interface import implementer
2929
30from lp.services.beautifulsoup import BeautifulSoup30from lp.services.beautifulsoup import BeautifulSoup4 as BeautifulSoup
31from lp.services.config import config31from lp.services.config import config
32from lp.services.feeds.interfaces.feed import (32from lp.services.feeds.interfaces.feed import (
33 IFeed,33 IFeed,
@@ -302,9 +302,7 @@ class FeedTypedData:
302 if self.content_type in ('text', 'html'):302 if self.content_type in ('text', 'html'):
303 altered_content = html_escape(altered_content)303 altered_content = html_escape(altered_content)
304 elif self.content_type == 'xhtml':304 elif self.content_type == 'xhtml':
305 soup = BeautifulSoup(305 soup = BeautifulSoup(altered_content)
306 altered_content,
307 convertEntities=BeautifulSoup.HTML_ENTITIES)
308 altered_content = unicode(soup)306 altered_content = unicode(soup)
309 return altered_content307 return altered_content
310308
diff --git a/lib/lp/services/feeds/stories/xx-links.txt b/lib/lp/services/feeds/stories/xx-links.txt
index 83110fd..23e467b 100644
--- a/lib/lp/services/feeds/stories/xx-links.txt
+++ b/lib/lp/services/feeds/stories/xx-links.txt
@@ -11,13 +11,13 @@ launchpad.test to provide links to corresponding Atom feeds.
11The root launchpad.test url will have a link to the Atom feed which11The root launchpad.test url will have a link to the Atom feed which
12displays the most recent announcements for all the projects.12displays the most recent announcements for all the projects.
1313
14 >>> from lp.services.beautifulsoup import BeautifulSoup14 >>> from lp.services.beautifulsoup import BeautifulSoup4 as BeautifulSoup
15 >>> browser.open('http://launchpad.test/')15 >>> browser.open('http://launchpad.test/')
16 >>> soup = BeautifulSoup(browser.contents)16 >>> soup = BeautifulSoup(browser.contents)
17 >>> soup.head.findAll('link', type='application/atom+xml')17 >>> soup.head.findAll('link', type='application/atom+xml')
18 [<link rel="alternate" type="application/atom+xml"18 [<link href="http://feeds.launchpad.test/announcements.atom"
19 href="http://feeds.launchpad.test/announcements.atom"19 rel="alternate" title="All Announcements"
20 title="All Announcements" />]20 type="application/atom+xml"/>]
2121
22The http://launchpad.test/+announcements page also displays recent22The http://launchpad.test/+announcements page also displays recent
23announcements for all the projects so it should have a link to the same23announcements for all the projects so it should have a link to the same
@@ -26,9 +26,9 @@ feed.
26 >>> browser.open('http://launchpad.test/+announcements')26 >>> browser.open('http://launchpad.test/+announcements')
27 >>> soup = BeautifulSoup(browser.contents)27 >>> soup = BeautifulSoup(browser.contents)
28 >>> soup.head.findAll('link', type='application/atom+xml')28 >>> soup.head.findAll('link', type='application/atom+xml')
29 [<link rel="alternate" type="application/atom+xml"29 [<link href="http://feeds.launchpad.test/announcements.atom"
30 href="http://feeds.launchpad.test/announcements.atom"30 rel="alternate" title="All Announcements"
31 title="All Announcements" />]31 type="application/atom+xml"/>]
3232
33== Single Bug Feed ==33== Single Bug Feed ==
3434
@@ -38,9 +38,9 @@ atom feed for that one bug.
38 >>> browser.open('http://bugs.launchpad.test/firefox/+bug/1')38 >>> browser.open('http://bugs.launchpad.test/firefox/+bug/1')
39 >>> soup = BeautifulSoup(browser.contents)39 >>> soup = BeautifulSoup(browser.contents)
40 >>> soup.head.findAll('link', type='application/atom+xml')40 >>> soup.head.findAll('link', type='application/atom+xml')
41 [<link rel="alternate" type="application/atom+xml"41 [<link href="http://feeds.launchpad.test/bugs/1/bug.atom"
42 href="http://feeds.launchpad.test/bugs/1/bug.atom"42 rel="alternate" title="Bug 1 Feed"
43 title="Bug 1 Feed" />]43 type="application/atom+xml"/>]
4444
45But if the bug is private, there should be no link.45But if the bug is private, there should be no link.
4646
@@ -80,15 +80,15 @@ branches.
80 >>> browser.open('http://launchpad.test/~stevea')80 >>> browser.open('http://launchpad.test/~stevea')
81 >>> soup = BeautifulSoup(browser.contents)81 >>> soup = BeautifulSoup(browser.contents)
82 >>> soup.head.findAll('link', type='application/atom+xml')82 >>> soup.head.findAll('link', type='application/atom+xml')
83 [<link rel="alternate" type="application/atom+xml"83 [<link href="http://feeds.launchpad.test/~stevea/latest-bugs.atom"
84 href="http://feeds.launchpad.test/~stevea/latest-bugs.atom"84 rel="alternate" title="Latest Bugs for Steve Alexander"
85 title="Latest Bugs for Steve Alexander" />,85 type="application/atom+xml"/>,
86 <link rel="alternate" type="application/atom+xml"86 <link href="http://feeds.launchpad.test/~stevea/branches.atom"
87 href="http://feeds.launchpad.test/~stevea/branches.atom"87 rel="alternate" title="Latest Branches for Steve Alexander"
88 title="Latest Branches for Steve Alexander" />,88 type="application/atom+xml"/>,
89 <link rel="alternate" type="application/atom+xml"89 <link href="http://feeds.launchpad.test/~stevea/revisions.atom"
90 href="http://feeds.launchpad.test/~stevea/revisions.atom"90 rel="alternate" title="Latest Revisions by Steve Alexander"
91 title="Latest Revisions by Steve Alexander" />]91 type="application/atom+xml"/>]
9292
93On the bugs subdomain, only a link to the bugs feed will be included,93On the bugs subdomain, only a link to the bugs feed will be included,
94not the branches link.94not the branches link.
@@ -96,9 +96,9 @@ not the branches link.
96 >>> browser.open('http://bugs.launchpad.test/~stevea')96 >>> browser.open('http://bugs.launchpad.test/~stevea')
97 >>> soup = BeautifulSoup(browser.contents)97 >>> soup = BeautifulSoup(browser.contents)
98 >>> soup.head.findAll('link', type='application/atom+xml')98 >>> soup.head.findAll('link', type='application/atom+xml')
99 [<link rel="alternate" type="application/atom+xml"99 [<link href="http://feeds.launchpad.test/~stevea/latest-bugs.atom"
100 href="http://feeds.launchpad.test/~stevea/latest-bugs.atom"100 rel="alternate" title="Latest Bugs for Steve Alexander"
101 title="Latest Bugs for Steve Alexander" />]101 type="application/atom+xml"/>]
102102
103103
104== Latest Bugs, Branches, and Announcements for a Product ==104== Latest Bugs, Branches, and Announcements for a Product ==
@@ -112,27 +112,27 @@ main product page.
112 >>> browser.open('http://launchpad.test/jokosher')112 >>> browser.open('http://launchpad.test/jokosher')
113 >>> soup = BeautifulSoup(browser.contents)113 >>> soup = BeautifulSoup(browser.contents)
114 >>> soup.head.findAll('link', type='application/atom+xml')114 >>> soup.head.findAll('link', type='application/atom+xml')
115 [<link rel="alternate" type="application/atom+xml"115 [<link href="http://feeds.launchpad.test/jokosher/announcements.atom"
116 href="http://feeds.launchpad.test/jokosher/announcements.atom"116 rel="alternate" title="Announcements for Jokosher"
117 title="Announcements for Jokosher" />,117 type="application/atom+xml"/>,
118 <link rel="alternate" type="application/atom+xml"118 <link href="http://feeds.launchpad.test/jokosher/latest-bugs.atom"
119 href="http://feeds.launchpad.test/jokosher/latest-bugs.atom"119 rel="alternate" title="Latest Bugs for Jokosher"
120 title="Latest Bugs for Jokosher" />,120 type="application/atom+xml"/>,
121 <link rel="alternate" type="application/atom+xml"121 <link href="http://feeds.launchpad.test/jokosher/branches.atom"
122 href="http://feeds.launchpad.test/jokosher/branches.atom"122 rel="alternate" title="Latest Branches for Jokosher"
123 title="Latest Branches for Jokosher" />,123 type="application/atom+xml"/>,
124 <link rel="alternate" type="application/atom+xml"124 <link href="http://feeds.launchpad.test/jokosher/revisions.atom"
125 href="http://feeds.launchpad.test/jokosher/revisions.atom"125 rel="alternate" title="Latest Revisions for Jokosher"
126 title="Latest Revisions for Jokosher" />]126 type="application/atom+xml"/>]
127127
128Only bug feeds should be linked to on bugs.launchpad.test.128Only bug feeds should be linked to on bugs.launchpad.test.
129129
130 >>> browser.open('http://bugs.launchpad.test/jokosher')130 >>> browser.open('http://bugs.launchpad.test/jokosher')
131 >>> soup = BeautifulSoup(browser.contents)131 >>> soup = BeautifulSoup(browser.contents)
132 >>> soup.head.findAll('link', type='application/atom+xml')132 >>> soup.head.findAll('link', type='application/atom+xml')
133 [<link rel="alternate" type="application/atom+xml"133 [<link href="http://feeds.launchpad.test/jokosher/latest-bugs.atom"
134 href="http://feeds.launchpad.test/jokosher/latest-bugs.atom"134 rel="alternate" title="Latest Bugs for Jokosher"
135 title="Latest Bugs for Jokosher" />]135 type="application/atom+xml"/>]
136136
137137
138== Escaping the title ==138== Escaping the title ==
@@ -160,18 +160,22 @@ it must have quotes and html escaped.
160 >>> browser.open('http://launchpad.test/bad-displayname')160 >>> browser.open('http://launchpad.test/bad-displayname')
161 >>> soup = BeautifulSoup(browser.contents)161 >>> soup = BeautifulSoup(browser.contents)
162 >>> soup.head.findAll('link', type='application/atom+xml')162 >>> soup.head.findAll('link', type='application/atom+xml')
163 [<link rel="alternate" type="application/atom+xml"163 [<link href="http://feeds.launchpad.test/bad-displayname/announcements.atom"
164 href="http://feeds.launchpad.test/bad-displayname/announcements.atom"164 rel="alternate"
165 title='Announcements for Bad displayname"&gt;&lt;script&gt;alert("h4x0r")&lt;/script&gt;' />,165 title='Announcements for Bad displayname"&gt;&lt;script&gt;alert("h4x0r")&lt;/script&gt;'
166 <link rel="alternate" type="application/atom+xml"166 type="application/atom+xml"/>,
167 href="http://feeds.launchpad.test/bad-displayname/latest-bugs.atom"167 <link href="http://feeds.launchpad.test/bad-displayname/latest-bugs.atom"
168 title='Latest Bugs for Bad displayname"&gt;&lt;script&gt;alert("h4x0r")&lt;/script&gt;' />,168 rel="alternate"
169 <link rel="alternate" type="application/atom+xml"169 title='Latest Bugs for Bad displayname"&gt;&lt;script&gt;alert("h4x0r")&lt;/script&gt;'
170 href="http://feeds.launchpad.test/bad-displayname/branches.atom"170 type="application/atom+xml"/>,
171 title='Latest Branches for Bad displayname"&gt;&lt;script&gt;alert("h4x0r")&lt;/script&gt;' />,171 <link href="http://feeds.launchpad.test/bad-displayname/branches.atom"
172 <link rel="alternate" type="application/atom+xml"172 rel="alternate"
173 href="http://feeds.launchpad.test/bad-displayname/revisions.atom"173 title='Latest Branches for Bad displayname"&gt;&lt;script&gt;alert("h4x0r")&lt;/script&gt;'
174 title='Latest Revisions for Bad displayname"&gt;&lt;script&gt;alert("h4x0r")&lt;/script&gt;' />]174 type="application/atom+xml"/>,
175 <link href="http://feeds.launchpad.test/bad-displayname/revisions.atom"
176 rel="alternate"
177 title='Latest Revisions for Bad displayname"&gt;&lt;script&gt;alert("h4x0r")&lt;/script&gt;'
178 type="application/atom+xml"/>]
175179
176== Latest Bugs for a ProjectGroup ==180== Latest Bugs for a ProjectGroup ==
177181
@@ -184,27 +188,27 @@ on the main project group page.
184 >>> browser.open('http://launchpad.test/gnome')188 >>> browser.open('http://launchpad.test/gnome')
185 >>> soup = BeautifulSoup(browser.contents)189 >>> soup = BeautifulSoup(browser.contents)
186 >>> soup.head.findAll('link', type='application/atom+xml')190 >>> soup.head.findAll('link', type='application/atom+xml')
187 [<link rel="alternate" type="application/atom+xml"191 [<link href="http://feeds.launchpad.test/gnome/announcements.atom"
188 href="http://feeds.launchpad.test/gnome/announcements.atom"192 rel="alternate" title="Announcements for GNOME"
189 title="Announcements for GNOME" />,193 type="application/atom+xml"/>,
190 <link rel="alternate" type="application/atom+xml"194 <link href="http://feeds.launchpad.test/gnome/latest-bugs.atom"
191 href="http://feeds.launchpad.test/gnome/latest-bugs.atom"195 rel="alternate" title="Latest Bugs for GNOME"
192 title="Latest Bugs for GNOME" />,196 type="application/atom+xml"/>,
193 <link rel="alternate" type="application/atom+xml"197 <link href="http://feeds.launchpad.test/gnome/branches.atom"
194 href="http://feeds.launchpad.test/gnome/branches.atom"198 rel="alternate" title="Latest Branches for GNOME"
195 title="Latest Branches for GNOME" />,199 type="application/atom+xml"/>,
196 <link rel="alternate" type="application/atom+xml"200 <link href="http://feeds.launchpad.test/gnome/revisions.atom"
197 href="http://feeds.launchpad.test/gnome/revisions.atom"201 rel="alternate" title="Latest Revisions for GNOME"
198 title="Latest Revisions for GNOME" />]202 type="application/atom+xml"/>]
199203
200Only bug feeds should be linked to on bugs.launchpad.test.204Only bug feeds should be linked to on bugs.launchpad.test.
201205
202 >>> browser.open('http://bugs.launchpad.test/gnome')206 >>> browser.open('http://bugs.launchpad.test/gnome')
203 >>> soup = BeautifulSoup(browser.contents)207 >>> soup = BeautifulSoup(browser.contents)
204 >>> soup.head.findAll('link', type='application/atom+xml')208 >>> soup.head.findAll('link', type='application/atom+xml')
205 [<link rel="alternate" type="application/atom+xml"209 [<link href="http://feeds.launchpad.test/gnome/latest-bugs.atom"
206 href="http://feeds.launchpad.test/gnome/latest-bugs.atom"210 rel="alternate" title="Latest Bugs for GNOME"
207 title="Latest Bugs for GNOME" />]211 type="application/atom+xml"/>]
208212
209The default view for a project group on bugs.launchpad.test is +bugs. The213The default view for a project group on bugs.launchpad.test is +bugs. The
210default bug listing matches the latest-bugs atom feed, but any search214default bug listing matches the latest-bugs atom feed, but any search
@@ -231,21 +235,21 @@ An announcements feed link should also be shown on the main distro page.
231 >>> browser.open('http://launchpad.test/ubuntu')235 >>> browser.open('http://launchpad.test/ubuntu')
232 >>> soup = BeautifulSoup(browser.contents)236 >>> soup = BeautifulSoup(browser.contents)
233 >>> soup.head.findAll('link', type='application/atom+xml')237 >>> soup.head.findAll('link', type='application/atom+xml')
234 [<link rel="alternate" type="application/atom+xml"238 [<link href="http://feeds.launchpad.test/ubuntu/announcements.atom"
235 href="http://feeds.launchpad.test/ubuntu/announcements.atom"239 rel="alternate" title="Announcements for Ubuntu"
236 title="Announcements for Ubuntu" />,240 type="application/atom+xml"/>,
237 <link rel="alternate" type="application/atom+xml"241 <link href="http://feeds.launchpad.test/ubuntu/latest-bugs.atom"
238 href="http://feeds.launchpad.test/ubuntu/latest-bugs.atom"242 rel="alternate" title="Latest Bugs for Ubuntu"
239 title="Latest Bugs for Ubuntu" />]243 type="application/atom+xml"/>]
240244
241Only bug feeds should be linked to on bugs.launchpad.test.245Only bug feeds should be linked to on bugs.launchpad.test.
242246
243 >>> browser.open('http://bugs.launchpad.test/ubuntu')247 >>> browser.open('http://bugs.launchpad.test/ubuntu')
244 >>> soup = BeautifulSoup(browser.contents)248 >>> soup = BeautifulSoup(browser.contents)
245 >>> soup.head.findAll('link', type='application/atom+xml')249 >>> soup.head.findAll('link', type='application/atom+xml')
246 [<link rel="alternate" type="application/atom+xml"250 [<link href="http://feeds.launchpad.test/ubuntu/latest-bugs.atom"
247 href="http://feeds.launchpad.test/ubuntu/latest-bugs.atom"251 rel="alternate" title="Latest Bugs for Ubuntu"
248 title="Latest Bugs for Ubuntu" />]252 type="application/atom+xml"/>]
249253
250254
251== Latest Bugs for a Distroseries ==255== Latest Bugs for a Distroseries ==
@@ -256,9 +260,10 @@ show a link to the atom feed for that distroseries' latest bugs.
256 >>> browser.open('http://bugs.launchpad.test/ubuntu/hoary')260 >>> browser.open('http://bugs.launchpad.test/ubuntu/hoary')
257 >>> soup = BeautifulSoup(browser.contents)261 >>> soup = BeautifulSoup(browser.contents)
258 >>> soup.head.findAll('link', type='application/atom+xml')262 >>> soup.head.findAll('link', type='application/atom+xml')
259 [<link rel="alternate" type="application/atom+xml"263 [<link
260 href="http://feeds.launchpad.test/ubuntu/hoary/latest-bugs.atom"264 href="http://feeds.launchpad.test/ubuntu/hoary/latest-bugs.atom"
261 title="Latest Bugs for Hoary" />]265 rel="alternate" title="Latest Bugs for Hoary"
266 type="application/atom+xml"/>]
262267
263268
264== Latest Bugs for a Product Series ==269== Latest Bugs for a Product Series ==
@@ -269,9 +274,9 @@ show a link to the atom feed for that product series' latest bugs.
269 >>> browser.open('http://bugs.launchpad.test/firefox/1.0')274 >>> browser.open('http://bugs.launchpad.test/firefox/1.0')
270 >>> soup = BeautifulSoup(browser.contents)275 >>> soup = BeautifulSoup(browser.contents)
271 >>> soup.head.findAll('link', type='application/atom+xml')276 >>> soup.head.findAll('link', type='application/atom+xml')
272 [<link rel="alternate" type="application/atom+xml"277 [<link href="http://feeds.launchpad.test/firefox/1.0/latest-bugs.atom"
273 href="http://feeds.launchpad.test/firefox/1.0/latest-bugs.atom"278 rel="alternate" title="Latest Bugs for 1.0"
274 title="Latest Bugs for 1.0" />]279 type="application/atom+xml"/>]
275280
276281
277== Latest Bugs for a Source Package ==282== Latest Bugs for a Source Package ==
@@ -282,9 +287,10 @@ show a link to the atom feed for that source package's latest bugs.
282 >>> browser.open('http://bugs.launchpad.test/ubuntu/+source/cnews')287 >>> browser.open('http://bugs.launchpad.test/ubuntu/+source/cnews')
283 >>> soup = BeautifulSoup(browser.contents)288 >>> soup = BeautifulSoup(browser.contents)
284 >>> soup.head.findAll('link', type='application/atom+xml')289 >>> soup.head.findAll('link', type='application/atom+xml')
285 [<link rel="alternate" type="application/atom+xml"290 [<link
286 href="http://feeds.launchpad.test/ubuntu/+source/cnews/latest-bugs.atom"291 href="http://feeds.launchpad.test/ubuntu/+source/cnews/latest-bugs.atom"
287 title="Latest Bugs for cnews in Ubuntu" />]292 rel="alternate" title="Latest Bugs for cnews in Ubuntu"
293 type="application/atom+xml"/>]
288294
289295
290== Latest Branches for a ProjectGroup ==296== Latest Branches for a ProjectGroup ==
@@ -295,12 +301,14 @@ to the atom feed for that project group's latest branches.
295 >>> browser.open('http://code.launchpad.test/mozilla')301 >>> browser.open('http://code.launchpad.test/mozilla')
296 >>> soup = BeautifulSoup(browser.contents)302 >>> soup = BeautifulSoup(browser.contents)
297 >>> soup.head.findAll('link', type='application/atom+xml')303 >>> soup.head.findAll('link', type='application/atom+xml')
298 [<link rel="alternate" type="application/atom+xml"304 [<link
299 href="http://feeds.launchpad.test/mozilla/branches.atom"305 href="http://feeds.launchpad.test/mozilla/branches.atom"
300 title="Latest Branches for The Mozilla Project" />,306 rel="alternate" title="Latest Branches for The Mozilla Project"
301 <link rel="alternate" type="application/atom+xml"307 type="application/atom+xml"/>,
308 <link
302 href="http://feeds.launchpad.test/mozilla/revisions.atom"309 href="http://feeds.launchpad.test/mozilla/revisions.atom"
303 title="Latest Revisions for The Mozilla Project" />]310 rel="alternate" title="Latest Revisions for The Mozilla Project"
311 type="application/atom+xml"/>]
304312
305313
306== Latest Branches for a Product ==314== Latest Branches for a Product ==
@@ -311,12 +319,13 @@ to the atom feed for that product's latest branches.
311 >>> browser.open('http://code.launchpad.test/firefox')319 >>> browser.open('http://code.launchpad.test/firefox')
312 >>> soup = BeautifulSoup(browser.contents)320 >>> soup = BeautifulSoup(browser.contents)
313 >>> soup.head.findAll('link', type='application/atom+xml')321 >>> soup.head.findAll('link', type='application/atom+xml')
314 [<link rel="alternate" type="application/atom+xml"322 [<link href="http://feeds.launchpad.test/firefox/branches.atom"
315 href="http://feeds.launchpad.test/firefox/branches.atom"323 rel="alternate" title="Latest Branches for Mozilla Firefox"
316 title="Latest Branches for Mozilla Firefox" />,324 type="application/atom+xml"/>,
317 <link rel="alternate" type="application/atom+xml"325 <link href="http://feeds.launchpad.test/firefox/revisions.atom"
318 href="http://feeds.launchpad.test/firefox/revisions.atom"326 rel="alternate"
319 title="Latest Revisions for Mozilla Firefox" />]327 title="Latest Revisions for Mozilla Firefox"
328 type="application/atom+xml"/>]
320329
321330
322== Latest Branches for a Person ==331== Latest Branches for a Person ==
@@ -327,12 +336,12 @@ to the atom feed for that person's latest branches.
327 >>> browser.open('http://code.launchpad.test/~mark')336 >>> browser.open('http://code.launchpad.test/~mark')
328 >>> soup = BeautifulSoup(browser.contents)337 >>> soup = BeautifulSoup(browser.contents)
329 >>> soup.head.findAll('link', type='application/atom+xml')338 >>> soup.head.findAll('link', type='application/atom+xml')
330 [<link rel="alternate" type="application/atom+xml"339 [<link href="http://feeds.launchpad.test/~mark/branches.atom"
331 href="http://feeds.launchpad.test/~mark/branches.atom"340 rel="alternate" title="Latest Branches for Mark Shuttleworth"
332 title="Latest Branches for Mark Shuttleworth" />,341 type="application/atom+xml"/>,
333 <link rel="alternate" type="application/atom+xml"342 <link href="http://feeds.launchpad.test/~mark/revisions.atom"
334 href="http://feeds.launchpad.test/~mark/revisions.atom"343 rel="alternate" title="Latest Revisions by Mark Shuttleworth"
335 title="Latest Revisions by Mark Shuttleworth" />]344 type="application/atom+xml"/>]
336345
337346
338== Latest Revisions on a Branch ==347== Latest Revisions on a Branch ==
@@ -344,9 +353,11 @@ atom feed for that branch's revisions.
344 >>> browser.open(url)353 >>> browser.open(url)
345 >>> soup = BeautifulSoup(browser.contents)354 >>> soup = BeautifulSoup(browser.contents)
346 >>> soup.head.findAll('link', type='application/atom+xml')355 >>> soup.head.findAll('link', type='application/atom+xml')
347 [<link rel="alternate" type="application/atom+xml"356 [<link
348 href="http://feeds.launchpad.test/~mark/firefox/release--0.9.1/branch.atom"357 href="http://feeds.launchpad.test/~mark/firefox/release--0.9.1/branch.atom"
349 title="Latest Revisions for Branch lp://dev/~mark/firefox/release--0.9.1" />]358 rel="alternate"
359 title="Latest Revisions for Branch lp://dev/~mark/firefox/release--0.9.1"
360 type="application/atom+xml"/>]
350361
351But if the branch is private, there should be no link.362But if the branch is private, there should be no link.
352363
diff --git a/lib/lp/services/feeds/stories/xx-security.txt b/lib/lp/services/feeds/stories/xx-security.txt
index ea7122d..2e441df 100644
--- a/lib/lp/services/feeds/stories/xx-security.txt
+++ b/lib/lp/services/feeds/stories/xx-security.txt
@@ -4,32 +4,32 @@ Feeds do not display private bugs
4Feeds never contain private bugs, as we are serving feeds over HTTP.4Feeds never contain private bugs, as we are serving feeds over HTTP.
5First, set all the bugs to private.5First, set all the bugs to private.
66
7 >>> from zope.security.interfaces import Unauthorized
8 >>> from BeautifulSoup import BeautifulStoneSoup as BSS
9 >>> from lp.services.database.interfaces import IStore
10 >>> import transaction7 >>> import transaction
11 >>> from lp.bugs.model.bug import Bug8 >>> from zope.security.interfaces import Unauthorized
12 >>> from lp.app.enums import InformationType9 >>> from lp.app.enums import InformationType
10 >>> from lp.bugs.model.bug import Bug
11 >>> from lp.services.beautifulsoup import BeautifulSoup4 as BeautifulSoup
12 >>> from lp.services.database.interfaces import IStore
13 >>> IStore(Bug).find(Bug).set(information_type=InformationType.USERDATA)13 >>> IStore(Bug).find(Bug).set(information_type=InformationType.USERDATA)
14 >>> transaction.commit()14 >>> transaction.commit()
1515
16There should be zero entries in these feeds, since all the bugs are private.16There should be zero entries in these feeds, since all the bugs are private.
1717
18 >>> browser.open('http://feeds.launchpad.test/jokosher/latest-bugs.atom')18 >>> browser.open('http://feeds.launchpad.test/jokosher/latest-bugs.atom')
19 >>> BSS(browser.contents)('entry')19 >>> BeautifulSoup(browser.contents, 'xml')('entry')
20 []20 []
2121
22 >>> browser.open('http://feeds.launchpad.test/mozilla/latest-bugs.atom')22 >>> browser.open('http://feeds.launchpad.test/mozilla/latest-bugs.atom')
23 >>> BSS(browser.contents)('entry')23 >>> BeautifulSoup(browser.contents, 'xml')('entry')
24 []24 []
2525
26 >>> browser.open('http://feeds.launchpad.test/~name16/latest-bugs.atom')26 >>> browser.open('http://feeds.launchpad.test/~name16/latest-bugs.atom')
27 >>> BSS(browser.contents)('entry')27 >>> BeautifulSoup(browser.contents, 'xml')('entry')
28 []28 []
2929
30 >>> browser.open(30 >>> browser.open(
31 ... 'http://feeds.launchpad.test/~simple-team/latest-bugs.atom')31 ... 'http://feeds.launchpad.test/~simple-team/latest-bugs.atom')
32 >>> BSS(browser.contents)('entry')32 >>> BeautifulSoup(browser.contents, 'xml')('entry')
33 []33 []
3434
35 >>> from lp.services.config import config35 >>> from lp.services.config import config
@@ -41,52 +41,52 @@ There should be zero entries in these feeds, since all the bugs are private.
41 >>> browser.open('http://feeds.launchpad.test/bugs/+bugs.atom?'41 >>> browser.open('http://feeds.launchpad.test/bugs/+bugs.atom?'
42 ... 'field.searchtext=&search=Search+Bug+Reports&'42 ... 'field.searchtext=&search=Search+Bug+Reports&'
43 ... 'field.scope=all&field.scope.target=')43 ... 'field.scope=all&field.scope.target=')
44 >>> BSS(browser.contents)('entry')44 >>> BeautifulSoup(browser.contents, 'xml')('entry')
45 []45 []
4646
47There should be just one <tr> elements for the table header in47There should be just one <tr> elements for the table header in
48these HTML feeds, since all the bugs are private.48these HTML feeds, since all the bugs are private.
4949
50 >>> browser.open('http://feeds.launchpad.test/jokosher/latest-bugs.html')50 >>> browser.open('http://feeds.launchpad.test/jokosher/latest-bugs.html')
51 >>> len(BSS(browser.contents)('tr'))51 >>> len(BeautifulSoup(browser.contents, 'xml')('tr'))
52 152 1
5353
54 >>> print extract_text(BSS(browser.contents)('tr')[0])54 >>> print extract_text(BeautifulSoup(browser.contents, 'xml')('tr')[0])
55 Bugs in Jokosher55 Bugs in Jokosher
5656
57 >>> browser.open('http://feeds.launchpad.test/mozilla/latest-bugs.html')57 >>> browser.open('http://feeds.launchpad.test/mozilla/latest-bugs.html')
58 >>> len(BSS(browser.contents)('tr'))58 >>> len(BeautifulSoup(browser.contents, 'xml')('tr'))
59 159 1
6060
61 >>> print extract_text(BSS(browser.contents)('tr')[0])61 >>> print extract_text(BeautifulSoup(browser.contents, 'xml')('tr')[0])
62 Bugs in The Mozilla Project62 Bugs in The Mozilla Project
6363
64 >>> browser.open('http://feeds.launchpad.test/~name16/latest-bugs.html')64 >>> browser.open('http://feeds.launchpad.test/~name16/latest-bugs.html')
65 >>> len(BSS(browser.contents)('tr'))65 >>> len(BeautifulSoup(browser.contents, 'xml')('tr'))
66 166 1
6767
68 >>> print extract_text(BSS(browser.contents)('tr')[0])68 >>> print extract_text(BeautifulSoup(browser.contents, 'xml')('tr')[0])
69 Bugs for Foo Bar69 Bugs for Foo Bar
7070
71 >>> browser.open(71 >>> browser.open(
72 ... 'http://feeds.launchpad.test/~simple-team/latest-bugs.html')72 ... 'http://feeds.launchpad.test/~simple-team/latest-bugs.html')
73 >>> len(BSS(browser.contents)('tr'))73 >>> len(BeautifulSoup(browser.contents, 'xml')('tr'))
74 174 1
7575
76 >>> print extract_text(BSS(browser.contents)('tr')[0])76 >>> print extract_text(BeautifulSoup(browser.contents, 'xml')('tr')[0])
77 Bugs for Simple Team77 Bugs for Simple Team
7878
79 >>> browser.open('http://feeds.launchpad.test/bugs/+bugs.html?'79 >>> browser.open('http://feeds.launchpad.test/bugs/+bugs.html?'
80 ... 'field.searchtext=&search=Search+Bug+Reports&'80 ... 'field.searchtext=&search=Search+Bug+Reports&'
81 ... 'field.scope=all&field.scope.target=')81 ... 'field.scope=all&field.scope.target=')
82 >>> len(BSS(browser.contents)('tr'))82 >>> len(BeautifulSoup(browser.contents, 'xml')('tr'))
83 183 1
8484
85 >>> try:85 >>> try:
86 ... browser.open('http://feeds.launchpad.test/bugs/1/bug.html')86 ... browser.open('http://feeds.launchpad.test/bugs/1/bug.html')
87 ... except Unauthorized:87 ... except Unauthorized:
88 ... print "Shouldn't raise Unauthorized exception"88 ... print "Shouldn't raise Unauthorized exception"
89 >>> BSS(browser.contents)('entry')89 >>> BeautifulSoup(browser.contents, 'xml')('entry')
90 []90 []
9191
92Revert configuration change after tests are finished.92Revert configuration change after tests are finished.
diff --git a/lib/lp/services/feeds/tests/helper.py b/lib/lp/services/feeds/tests/helper.py
index e0a96aa..826df83 100644
--- a/lib/lp/services/feeds/tests/helper.py
+++ b/lib/lp/services/feeds/tests/helper.py
@@ -31,9 +31,11 @@ from zope.interface import (
31 implementer,31 implementer,
32 Interface,32 Interface,
33 )33 )
34from BeautifulSoup import BeautifulStoneSoup as BSS
35from BeautifulSoup import SoupStrainer
3634
35from lp.services.beautifulsoup import (
36 BeautifulSoup4 as BeautifulSoup,
37 SoupStrainer4 as SoupStrainer,
38 )
37from lp.services.webapp.publisher import LaunchpadView39from lp.services.webapp.publisher import LaunchpadView
3840
3941
@@ -62,25 +64,23 @@ class ThingFeedView(LaunchpadView):
62def parse_entries(contents):64def parse_entries(contents):
63 """Define a helper function for parsing feed entries."""65 """Define a helper function for parsing feed entries."""
64 strainer = SoupStrainer('entry')66 strainer = SoupStrainer('entry')
65 entries = [tag for tag in BSS(contents,67 entries = [
66 parseOnlyThese=strainer)]68 tag for tag in BeautifulSoup(contents, 'xml', parse_only=strainer)]
67 return entries69 return entries
6870
6971
70def parse_links(contents, rel):72def parse_links(contents, rel):
71 """Define a helper function for parsing feed links."""73 """Define a helper function for parsing feed links."""
72 strainer = SoupStrainer('link', rel=rel)74 strainer = SoupStrainer('link', rel=rel)
73 entries = [tag for tag in BSS(contents,75 entries = [
74 parseOnlyThese=strainer,76 tag for tag in BeautifulSoup(contents, 'xml', parse_only=strainer)]
75 selfClosingTags=['link'])]
76 return entries77 return entries
7778
7879
79def parse_ids(contents):80def parse_ids(contents):
80 """Define a helper function for parsing ids."""81 """Define a helper function for parsing ids."""
81 strainer = SoupStrainer('id')82 strainer = SoupStrainer('id')
82 ids = [tag for tag in BSS(contents,83 ids = [tag for tag in BeautifulSoup(contents, 'xml', parse_only=strainer)]
83 parseOnlyThese=strainer)]
84 return ids84 return ids
8585
8686

Subscribers

People subscribed via source and target branches

to status/vote changes: