Merge ~cjwatson/launchpad:six-urllib into launchpad:master

Proposed by Colin Watson
Status: Merged
Approved by: Colin Watson
Approved revision: 3997e4d3841c7f0a2a3ace00c65909ac47e5b036
Merge reported by: Otto Co-Pilot
Merged at revision: not available
Proposed branch: ~cjwatson/launchpad:six-urllib
Merge into: launchpad:master
Diff against target: 2809 lines (+330/-315)
110 files modified
lib/launchpad_loggerhead/app.py (+6/-4)
lib/lp/answers/browser/faqcollection.py (+1/-1)
lib/lp/answers/browser/questiontarget.py (+1/-1)
lib/lp/answers/browser/tests/test_questiontarget.py (+1/-1)
lib/lp/app/browser/launchpad.py (+5/-4)
lib/lp/app/browser/tales.py (+2/-2)
lib/lp/app/browser/tests/test_vocabulary.py (+1/-1)
lib/lp/bugs/browser/buglisting.py (+5/-4)
lib/lp/bugs/browser/bugtarget.py (+6/-3)
lib/lp/bugs/browser/bugtask.py (+3/-3)
lib/lp/bugs/browser/person.py (+4/-4)
lib/lp/bugs/browser/tests/test_bugsubscriptionfilter.py (+1/-1)
lib/lp/bugs/browser/tests/test_bugtask.py (+2/-2)
lib/lp/bugs/browser/tests/test_structuralsubscription.py (+1/-2)
lib/lp/bugs/externalbugtracker/github.py (+4/-2)
lib/lp/bugs/externalbugtracker/roundup.py (+1/-1)
lib/lp/bugs/externalbugtracker/sourceforge.py (+3/-2)
lib/lp/bugs/externalbugtracker/xmlrpc.py (+4/-4)
lib/lp/bugs/model/bugwatch.py (+5/-3)
lib/lp/bugs/scripts/sfremoteproductfinder.py (+2/-3)
lib/lp/bugs/stories/bugs/xx-bugs-advanced-search-upstream-status.txt (+1/-1)
lib/lp/bugs/stories/webservice/xx-bug.txt (+1/-1)
lib/lp/bugs/tests/test_bugwatch.py (+1/-1)
lib/lp/buildmaster/interactor.py (+1/-1)
lib/lp/code/browser/branchlisting.py (+2/-2)
lib/lp/code/browser/codeimport.py (+1/-1)
lib/lp/code/browser/tests/test_gitsubscription.py (+1/-2)
lib/lp/code/interfaces/codehosting.py (+2/-2)
lib/lp/code/model/githosting.py (+4/-2)
lib/lp/code/model/gitref.py (+5/-5)
lib/lp/code/model/gitrepository.py (+1/-1)
lib/lp/code/stories/branches/xx-private-branch-listings.txt (+2/-2)
lib/lp/code/stories/branches/xx-subscribing-branches.txt (+1/-1)
lib/lp/codehosting/codeimport/tests/test_workermonitor.py (+2/-2)
lib/lp/codehosting/codeimport/worker.py (+4/-4)
lib/lp/codehosting/puller/tests/test_errors.py (+2/-2)
lib/lp/codehosting/puller/worker.py (+2/-2)
lib/lp/codehosting/scanner/buglinks.py (+2/-3)
lib/lp/codehosting/sshserver/session.py (+2/-2)
lib/lp/codehosting/tests/test_acceptance.py (+2/-2)
lib/lp/hardwaredb/stories/hwdb/xx-hwdb.txt (+1/-1)
lib/lp/hardwaredb/stories/webservice/xx-hwdb.txt (+1/-1)
lib/lp/registry/browser/mailinglists.py (+1/-1)
lib/lp/registry/browser/person.py (+6/-3)
lib/lp/registry/browser/product.py (+1/-1)
lib/lp/registry/browser/sourcepackage.py (+2/-3)
lib/lp/registry/browser/team.py (+1/-1)
lib/lp/registry/browser/tests/test_distroseries.py (+4/-2)
lib/lp/registry/browser/tests/test_product.py (+4/-2)
lib/lp/registry/browser/tests/test_sourcepackage_views.py (+6/-5)
lib/lp/registry/scripts/distributionmirror_prober.py (+13/-9)
lib/lp/registry/scripts/productreleasefinder/finder.py (+2/-2)
lib/lp/registry/scripts/productreleasefinder/walker.py (+5/-5)
lib/lp/registry/stories/product/xx-product-files.txt (+2/-2)
lib/lp/scripts/utilities/js/combo.py (+5/-3)
lib/lp/services/config/__init__.py (+4/-4)
lib/lp/services/feeds/feed.py (+1/-1)
lib/lp/services/gpg/handler.py (+3/-4)
lib/lp/services/librarian/client.py (+20/-16)
lib/lp/services/librarian/doc/librarian.txt (+1/-1)
lib/lp/services/librarian/model.py (+1/-1)
lib/lp/services/librarian/smoketest.py (+2/-2)
lib/lp/services/librarian/tests/test_client.py (+5/-5)
lib/lp/services/librarian/tests/test_smoketest.py (+19/-33)
lib/lp/services/librarianserver/db.py (+8/-6)
lib/lp/services/librarianserver/swift.py (+2/-3)
lib/lp/services/librarianserver/testing/fake.py (+1/-1)
lib/lp/services/librarianserver/testing/tests/test_server_fixture.py (+2/-1)
lib/lp/services/librarianserver/tests/test_db_outage.py (+4/-3)
lib/lp/services/librarianserver/tests/test_web.py (+1/-1)
lib/lp/services/librarianserver/web.py (+1/-1)
lib/lp/services/oauth/stories/access-token.txt (+1/-1)
lib/lp/services/oauth/stories/authorize-token.txt (+1/-1)
lib/lp/services/oauth/stories/request-token.txt (+1/-1)
lib/lp/services/scripts/base.py (+5/-5)
lib/lp/services/sitesearch/__init__.py (+7/-7)
lib/lp/services/verification/browser/logintoken.py (+6/-4)
lib/lp/services/webapp/doc/webapp-publication.txt (+1/-1)
lib/lp/services/webapp/errorlog.py (+3/-4)
lib/lp/services/webapp/login.py (+6/-7)
lib/lp/services/webapp/openid.py (+2/-2)
lib/lp/services/webapp/publication.py (+2/-2)
lib/lp/services/webapp/tests/test_login.py (+12/-9)
lib/lp/services/webapp/url.py (+2/-2)
lib/lp/services/webservice/wadl.py (+3/-2)
lib/lp/snappy/browser/snap.py (+1/-2)
lib/lp/snappy/browser/tests/test_snap.py (+4/-4)
lib/lp/snappy/model/snap.py (+1/-1)
lib/lp/snappy/model/snapstoreclient.py (+1/-1)
lib/lp/snappy/tests/test_snap.py (+1/-1)
lib/lp/snappy/tests/test_snapbuild.py (+1/-1)
lib/lp/soyuz/browser/widgets/archive.py (+2/-2)
lib/lp/soyuz/interfaces/archive.py (+1/-1)
lib/lp/soyuz/scripts/ppa_apache_log_parser.py (+3/-2)
lib/lp/soyuz/tests/test_livefsbuild.py (+1/-1)
lib/lp/soyuz/tests/test_packageupload.py (+1/-1)
lib/lp/testing/keyserver/tests/test_harness.py (+1/-1)
lib/lp/testing/layers.py (+5/-7)
lib/lp/testing/pages.py (+1/-1)
lib/lp/testing/tests/test_layers_functional.py (+3/-5)
lib/lp/testopenid/stories/basics.txt (+1/-1)
lib/lp/translations/browser/person.py (+2/-2)
lib/lp/translations/browser/pofile.py (+2/-2)
lib/lp/translations/browser/tests/test_persontranslationview.py (+2/-3)
lib/lp/translations/browser/translationmessage.py (+5/-3)
lib/lp/translations/doc/poexport-request-productseries.txt (+2/-2)
lib/lp/translations/doc/poexport-request.txt (+3/-3)
lib/lp/translations/stories/importqueue/xx-translation-import-queue.txt (+3/-3)
utilities/paste (+1/-2)
utilities/roundup-sniffer.py (+4/-4)
Reviewer Review Type Date Requested Status
Tom Wardill (community) Approve
Review via email: mp+378460@code.launchpad.net

Commit message

Import urllib and friends from six.moves

Description of the change

This part of the standard library was rearranged in Python 3.

lp.testing.layers and its tests needed a few extra adjustments, since they were relying on some details of urllib.urlopen's error handling that differed from those in urllib2.urlopen / urllib.request.urlopen.

To post a comment you must log in.
Revision history for this message
Tom Wardill (twom) :
review: Approve

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1diff --git a/lib/launchpad_loggerhead/app.py b/lib/launchpad_loggerhead/app.py
2index cd98725..59225d7 100644
3--- a/lib/launchpad_loggerhead/app.py
4+++ b/lib/launchpad_loggerhead/app.py
5@@ -4,8 +4,6 @@
6 import logging
7 import os
8 import threading
9-import urllib
10-import urlparse
11 import xmlrpclib
12
13 from breezy import (
14@@ -42,6 +40,10 @@ from paste.request import (
15 parse_querystring,
16 path_info_pop,
17 )
18+from six.moves.urllib.parse import (
19+ urlencode,
20+ urljoin,
21+ )
22
23 from lp.code.interfaces.codehosting import (
24 BRANCH_TRANSPORT,
25@@ -127,7 +129,7 @@ class RootApp:
26 raise HTTPMovedPermanently(openid_request.redirectURL(
27 config.codehosting.secure_codebrowse_root,
28 config.codehosting.secure_codebrowse_root + '+login/?'
29- + urllib.urlencode({'back_to': back_to})))
30+ + urlencode({'back_to': back_to})))
31
32 def _complete_login(self, environ, start_response):
33 """Complete the OpenID authentication process.
34@@ -261,7 +263,7 @@ class RootApp:
35 environ['PATH_INFO'] = trail
36 environ['SCRIPT_NAME'] += consumed.rstrip('/')
37 branch_url = lp_server.get_url() + branch_name
38- branch_link = urlparse.urljoin(
39+ branch_link = urljoin(
40 config.codebrowse.launchpad_root, branch_name)
41 cachepath = os.path.join(
42 config.codebrowse.cachepath, branch_name[1:])
43diff --git a/lib/lp/answers/browser/faqcollection.py b/lib/lp/answers/browser/faqcollection.py
44index 03b0c28..67c07a7 100644
45--- a/lib/lp/answers/browser/faqcollection.py
46+++ b/lib/lp/answers/browser/faqcollection.py
47@@ -10,7 +10,7 @@ __all__ = [
48 'SearchFAQsView',
49 ]
50
51-from urllib import urlencode
52+from six.moves.urllib.parse import urlencode
53
54 from lp import _
55 from lp.answers.enums import (
56diff --git a/lib/lp/answers/browser/questiontarget.py b/lib/lp/answers/browser/questiontarget.py
57index 9fcf9e0..a80b36f 100644
58--- a/lib/lp/answers/browser/questiontarget.py
59+++ b/lib/lp/answers/browser/questiontarget.py
60@@ -21,13 +21,13 @@ __all__ = [
61 ]
62
63 from operator import attrgetter
64-from urllib import urlencode
65
66 from lazr.restful.interfaces import (
67 IJSONRequestCache,
68 IWebServiceClientRequest,
69 )
70 from simplejson import dumps
71+from six.moves.urllib.parse import urlencode
72 from zope.browserpage import ViewPageTemplateFile
73 from zope.component import (
74 getMultiAdapter,
75diff --git a/lib/lp/answers/browser/tests/test_questiontarget.py b/lib/lp/answers/browser/tests/test_questiontarget.py
76index 2c842c0..cd510b6 100644
77--- a/lib/lp/answers/browser/tests/test_questiontarget.py
78+++ b/lib/lp/answers/browser/tests/test_questiontarget.py
79@@ -8,7 +8,6 @@ from __future__ import absolute_import, print_function, unicode_literals
80 __metaclass__ = type
81
82 import os
83-from urllib import quote
84
85 from lazr.restful.interfaces import (
86 IJSONRequestCache,
87@@ -16,6 +15,7 @@ from lazr.restful.interfaces import (
88 )
89 from simplejson import dumps
90 import six
91+from six.moves.urllib.parse import quote
92 from zope.component import getUtility
93 from zope.security.proxy import removeSecurityProxy
94 from zope.traversing.browser import absoluteURL
95diff --git a/lib/lp/app/browser/launchpad.py b/lib/lp/app/browser/launchpad.py
96index 50ccd6c..d12c849 100644
97--- a/lib/lp/app/browser/launchpad.py
98+++ b/lib/lp/app/browser/launchpad.py
99@@ -27,9 +27,11 @@ import operator
100 import os
101 import re
102 import time
103-import urllib
104
105-from six.moves.urllib.parse import parse_qs
106+from six.moves.urllib.parse import (
107+ parse_qs,
108+ urlencode,
109+ )
110 from zope import i18n
111 from zope.component import (
112 getGlobalSiteManager,
113@@ -633,8 +635,7 @@ class LoginStatus:
114 if query_string:
115 query_dict = parse_qs(query_string, keep_blank_values=True)
116 query_dict.pop('loggingout', None)
117- query_string = urllib.urlencode(
118- sorted(query_dict.items()), doseq=True)
119+ query_string = urlencode(sorted(query_dict.items()), doseq=True)
120 # If we still have a query_string after things we don't want
121 # have been removed, add it onto the url.
122 if query_string:
123diff --git a/lib/lp/app/browser/tales.py b/lib/lp/app/browser/tales.py
124index 3466a83..c732f5e 100644
125--- a/lib/lp/app/browser/tales.py
126+++ b/lib/lp/app/browser/tales.py
127@@ -16,12 +16,12 @@ import os.path
128 import rfc822
129 import sys
130 from textwrap import dedent
131-import urllib
132
133 from lazr.enum import enumerated_type_registry
134 from lazr.restful.utils import get_current_browser_request
135 from lazr.uri import URI
136 import pytz
137+from six.moves.urllib.parse import quote
138 from zope.browserpage import ViewPageTemplateFile
139 from zope.component import (
140 adapter,
141@@ -1645,7 +1645,7 @@ class ProductReleaseFileFormatterAPI(ObjectFormatterAPI):
142 url = urlappend(canonical_url(self._release), '+download')
143 # Quote the filename to eliminate non-ascii characters which
144 # are invalid in the url.
145- return urlappend(url, urllib.quote(lfa.filename.encode('utf-8')))
146+ return urlappend(url, quote(lfa.filename.encode('utf-8')))
147
148
149 class BranchFormatterAPI(ObjectFormatterAPI):
150diff --git a/lib/lp/app/browser/tests/test_vocabulary.py b/lib/lp/app/browser/tests/test_vocabulary.py
151index 2036621..2904d8c 100644
152--- a/lib/lp/app/browser/tests/test_vocabulary.py
153+++ b/lib/lp/app/browser/tests/test_vocabulary.py
154@@ -6,10 +6,10 @@
155 __metaclass__ = type
156
157 from datetime import datetime
158-from urllib import urlencode
159
160 import pytz
161 import simplejson
162+from six.moves.urllib.parse import urlencode
163 from zope.component import (
164 getSiteManager,
165 getUtility,
166diff --git a/lib/lp/bugs/browser/buglisting.py b/lib/lp/bugs/browser/buglisting.py
167index 3cef8f9..c0e67a6 100644
168--- a/lib/lp/bugs/browser/buglisting.py
169+++ b/lib/lp/bugs/browser/buglisting.py
170@@ -22,7 +22,6 @@ __all__ = [
171 ]
172
173 import os.path
174-import urllib
175
176 from lazr.delegates import delegate_to
177 from lazr.restful.interfaces import IJSONRequestCache
178@@ -33,6 +32,8 @@ from simplejson.encoder import JSONEncoderForHTML
179 from six.moves.urllib.parse import (
180 parse_qs,
181 parse_qsl,
182+ quote,
183+ urlencode,
184 )
185 from zope.authentication.interfaces import IUnauthenticatedPrincipal
186 from zope.browserpage import ViewPageTemplateFile
187@@ -245,7 +246,7 @@ def rewrite_old_bugtask_status_query_string(query_string):
188 if query_elements == query_elements_mapped:
189 return query_string
190 else:
191- return urllib.urlencode(query_elements_mapped, doseq=True)
192+ return urlencode(query_elements_mapped, doseq=True)
193
194
195 def target_has_expirable_bugs_listing(target):
196@@ -593,7 +594,7 @@ def get_buglisting_search_filter_url(
197 if orderby is not None:
198 search_params.append(('orderby', orderby))
199
200- query_string = urllib.urlencode(search_params, doseq=True)
201+ query_string = urlencode(search_params, doseq=True)
202
203 search_filter_url = "+bugs?search=Search"
204 if query_string != '':
205@@ -688,7 +689,7 @@ class BugTaskListingItem:
206 'reporter': reporter.displayname,
207 'status': self.status.title,
208 'status_class': 'status' + self.status.name,
209- 'tags': [{'url': base_tag_url + urllib.quote(tag), 'tag': tag}
210+ 'tags': [{'url': base_tag_url + quote(tag), 'tag': tag}
211 for tag in self.tags],
212 'title': self.bug.title,
213 }
214diff --git a/lib/lp/bugs/browser/bugtarget.py b/lib/lp/bugs/browser/bugtarget.py
215index 3dfc332..13e45a1 100644
216--- a/lib/lp/bugs/browser/bugtarget.py
217+++ b/lib/lp/bugs/browser/bugtarget.py
218@@ -23,13 +23,16 @@ from cStringIO import StringIO
219 from datetime import datetime
220 from functools import partial
221 from operator import itemgetter
222-import urllib
223
224 from lazr.restful.interface import copy_field
225 from lazr.restful.interfaces import IJSONRequestCache
226 from pytz import timezone
227 from simplejson import dumps
228 from six.moves import http_client
229+from six.moves.urllib.parse import (
230+ quote,
231+ urlencode,
232+ )
233 from sqlobject import SQLObjectNotFound
234 from zope.browserpage import ViewPageTemplateFile
235 from zope.component import getUtility
236@@ -1100,7 +1103,7 @@ class ProjectGroupFileBugGuidedView(LaunchpadFormView):
237 base = canonical_url(
238 data['product'], view_name='+filebug', rootsite='bugs')
239 title = data['title'].encode('utf8')
240- query = urllib.urlencode([
241+ query = urlencode([
242 ('field.title', title),
243 ('field.tags', ' '.join(data['tags'])),
244 ])
245@@ -1217,7 +1220,7 @@ class BugTargetBugTagsView(LaunchpadView):
246 def _getSearchURL(self, tag):
247 """Return the search URL for the tag."""
248 # Use path_only here to reduce the size of the rendered page.
249- return "+bugs?field.tag=%s" % urllib.quote(tag)
250+ return "+bugs?field.tag=%s" % quote(tag)
251
252 @property
253 def tags_cloud_data(self):
254diff --git a/lib/lp/bugs/browser/bugtask.py b/lib/lp/bugs/browser/bugtask.py
255index 3cc4ad5..77e7f28 100644
256--- a/lib/lp/bugs/browser/bugtask.py
257+++ b/lib/lp/bugs/browser/bugtask.py
258@@ -35,7 +35,6 @@ from datetime import (
259 from itertools import groupby
260 from operator import attrgetter
261 import re
262-import urllib
263
264 from lazr.delegates import delegate_to
265 from lazr.lifecycle.event import ObjectModifiedEvent
266@@ -50,6 +49,7 @@ from lazr.restful.interfaces import (
267 from lazr.restful.utils import smartquote
268 from pytz import utc
269 from simplejson import dumps
270+from six.moves.urllib.parse import quote
271 import transaction
272 from zope import formlib
273 from zope.browserpage import ViewPageTemplateFile
274@@ -864,7 +864,7 @@ class BugTaskView(LaunchpadView, BugViewMixin, FeedsMixin):
275 if tag in target_official_tags:
276 links.append((tag, '%s?field.tag=%s' % (
277 canonical_url(self.context.target, view_name='+bugs',
278- force_local_path=True), urllib.quote(tag))))
279+ force_local_path=True), quote(tag))))
280 return links
281
282 @property
283@@ -876,7 +876,7 @@ class BugTaskView(LaunchpadView, BugViewMixin, FeedsMixin):
284 if tag not in target_official_tags:
285 links.append((tag, '%s?field.tag=%s' % (
286 canonical_url(self.context.target, view_name='+bugs',
287- force_local_path=True), urllib.quote(tag))))
288+ force_local_path=True), quote(tag))))
289 return links
290
291 @property
292diff --git a/lib/lp/bugs/browser/person.py b/lib/lp/bugs/browser/person.py
293index 77dff55..c25219d 100644
294--- a/lib/lp/bugs/browser/person.py
295+++ b/lib/lp/bugs/browser/person.py
296@@ -18,8 +18,8 @@ __all__ = [
297
298 import copy
299 from operator import itemgetter
300-import urllib
301
302+from six.moves.urllib.parse import urlencode
303 from zope.component import getUtility
304
305 from lp.bugs.browser.buglisting import BugTaskSearchListingView
306@@ -58,14 +58,14 @@ def get_package_search_url(dsp_bugs_url, extra_params=None):
307 "field.status": [
308 status.title for status in UNRESOLVED_BUGTASK_STATUSES]}
309 if extra_params is not None:
310- # We must UTF-8 encode searchtext to play nicely with
311- # urllib.urlencode, because it may contain non-ASCII characters.
312+ # We must UTF-8 encode searchtext to play nicely with urlencode,
313+ # because it may contain non-ASCII characters.
314 if 'field.searchtext' in extra_params:
315 extra_params["field.searchtext"] = (
316 extra_params["field.searchtext"].encode("utf8"))
317 params.update(extra_params)
318 return '%s?%s' % (
319- dsp_bugs_url, urllib.urlencode(sorted(params.items()), doseq=True))
320+ dsp_bugs_url, urlencode(sorted(params.items()), doseq=True))
321
322
323 class PersonBugsMenu(NavigationMenu):
324diff --git a/lib/lp/bugs/browser/tests/test_bugsubscriptionfilter.py b/lib/lp/bugs/browser/tests/test_bugsubscriptionfilter.py
325index 1ae3b1e..9454ff7 100644
326--- a/lib/lp/bugs/browser/tests/test_bugsubscriptionfilter.py
327+++ b/lib/lp/bugs/browser/tests/test_bugsubscriptionfilter.py
328@@ -6,9 +6,9 @@
329 __metaclass__ = type
330
331 import json
332-from urlparse import urlparse
333
334 from lxml import html
335+from six.moves.urllib.parse import urlparse
336 from testtools.matchers import StartsWith
337
338 from lp.app.enums import InformationType
339diff --git a/lib/lp/bugs/browser/tests/test_bugtask.py b/lib/lp/bugs/browser/tests/test_bugtask.py
340index 3648a95..b134c89 100644
341--- a/lib/lp/bugs/browser/tests/test_bugtask.py
342+++ b/lib/lp/bugs/browser/tests/test_bugtask.py
343@@ -8,12 +8,12 @@ from datetime import (
344 timedelta,
345 )
346 import re
347-import urllib
348
349 from lazr.restful.interfaces import IJSONRequestCache
350 from pytz import UTC
351 import simplejson
352 import six
353+from six.moves.urllib.parse import urlencode
354 import soupmatchers
355 from testscenarios import (
356 load_tests_apply_scenarios,
357@@ -2000,7 +2000,7 @@ class TestBugTaskSearchListingView(BrowserTestCase):
358 query_vars['start'] = int(memo) - size
359 if not forwards:
360 query_vars['direction'] = 'backwards'
361- query_string = urllib.urlencode(query_vars)
362+ query_string = urlencode(query_vars)
363 request = LaunchpadTestRequest(
364 QUERY_STRING=query_string, orderby=orderby, HTTP_COOKIE=cookie)
365 if bugtask is None:
366diff --git a/lib/lp/bugs/browser/tests/test_structuralsubscription.py b/lib/lp/bugs/browser/tests/test_structuralsubscription.py
367index ff2b160..89d8e07 100644
368--- a/lib/lp/bugs/browser/tests/test_structuralsubscription.py
369+++ b/lib/lp/bugs/browser/tests/test_structuralsubscription.py
370@@ -3,8 +3,7 @@
371
372 """Tests for structural subscription traversal."""
373
374-from urlparse import urlparse
375-
376+from six.moves.urllib.parse import urlparse
377 from zope.publisher.interfaces import NotFound
378
379 from lp.registry.browser.distribution import DistributionNavigation
380diff --git a/lib/lp/bugs/externalbugtracker/github.py b/lib/lp/bugs/externalbugtracker/github.py
381index 038a532..31d91b7 100644
382--- a/lib/lp/bugs/externalbugtracker/github.py
383+++ b/lib/lp/bugs/externalbugtracker/github.py
384@@ -13,12 +13,14 @@ __all__ = [
385
386 from contextlib import contextmanager
387 import time
388-from urllib import urlencode
389-from urlparse import urlunsplit
390
391 import pytz
392 import requests
393 from six.moves import http_client
394+from six.moves.urllib.parse import (
395+ urlencode,
396+ urlunsplit,
397+ )
398 from zope.component import getUtility
399 from zope.interface import Interface
400
401diff --git a/lib/lp/bugs/externalbugtracker/roundup.py b/lib/lp/bugs/externalbugtracker/roundup.py
402index ea43711..f862aec 100644
403--- a/lib/lp/bugs/externalbugtracker/roundup.py
404+++ b/lib/lp/bugs/externalbugtracker/roundup.py
405@@ -7,9 +7,9 @@ __metaclass__ = type
406 __all__ = ['Roundup']
407
408 import csv
409-from urllib import quote_plus
410
411 from lazr.uri import URI
412+from six.moves.urllib.parse import quote_plus
413
414 from lp.bugs.externalbugtracker import (
415 BugNotFound,
416diff --git a/lib/lp/bugs/externalbugtracker/sourceforge.py b/lib/lp/bugs/externalbugtracker/sourceforge.py
417index b61c4b1..fdc7441 100644
418--- a/lib/lp/bugs/externalbugtracker/sourceforge.py
419+++ b/lib/lp/bugs/externalbugtracker/sourceforge.py
420@@ -7,7 +7,8 @@ __metaclass__ = type
421 __all__ = ['SourceForge']
422
423 import re
424-import urllib
425+
426+from six.moves.urllib.parse import splitvalue
427
428 from lp.bugs.externalbugtracker import (
429 BugNotFound,
430@@ -103,7 +104,7 @@ class SourceForge(ExternalBugTracker):
431
432 query_bits = query.split('&')
433 for bit in query_bits:
434- key, value = urllib.splitvalue(bit)
435+ key, value = splitvalue(bit)
436 query_dict[key] = value
437
438 try:
439diff --git a/lib/lp/bugs/externalbugtracker/xmlrpc.py b/lib/lp/bugs/externalbugtracker/xmlrpc.py
440index a3e99f4..6257d52 100644
441--- a/lib/lp/bugs/externalbugtracker/xmlrpc.py
442+++ b/lib/lp/bugs/externalbugtracker/xmlrpc.py
443@@ -10,10 +10,6 @@ __all__ = [
444
445
446 from io import BytesIO
447-from urlparse import (
448- urlparse,
449- urlunparse,
450- )
451 from xmlrpclib import (
452 ProtocolError,
453 Transport,
454@@ -23,6 +19,10 @@ from defusedxml.xmlrpc import monkey_patch
455 import requests
456 from requests.cookies import RequestsCookieJar
457 import six
458+from six.moves.urllib.parse import (
459+ urlparse,
460+ urlunparse,
461+ )
462
463 from lp.bugs.externalbugtracker.base import repost_on_redirect_hook
464 from lp.services.config import config
465diff --git a/lib/lp/bugs/model/bugwatch.py b/lib/lp/bugs/model/bugwatch.py
466index 18fd674..a9f430d 100644
467--- a/lib/lp/bugs/model/bugwatch.py
468+++ b/lib/lp/bugs/model/bugwatch.py
469@@ -11,13 +11,15 @@ __all__ = [
470
471 from datetime import datetime
472 import re
473-import urllib
474-from urlparse import urlunsplit
475
476 from lazr.lifecycle.event import ObjectModifiedEvent
477 from lazr.lifecycle.snapshot import Snapshot
478 from lazr.uri import find_uris_in_text
479 from pytz import utc
480+from six.moves.urllib.parse import (
481+ splitvalue,
482+ urlunsplit,
483+ )
484 from sqlobject import (
485 ForeignKey,
486 SQLObjectNotFound,
487@@ -722,7 +724,7 @@ class BugWatchSet:
488 scheme, host, path, query_string, frag = urlsplit(url)
489 query = {}
490 for query_part in query_string.split('&'):
491- key, value = urllib.splitvalue(query_part)
492+ key, value = splitvalue(query_part)
493 query[key] = value
494
495 bugtracker_data = parse_func(scheme, host, path, query)
496diff --git a/lib/lp/bugs/scripts/sfremoteproductfinder.py b/lib/lp/bugs/scripts/sfremoteproductfinder.py
497index b6342f3..66a88fd 100644
498--- a/lib/lp/bugs/scripts/sfremoteproductfinder.py
499+++ b/lib/lp/bugs/scripts/sfremoteproductfinder.py
500@@ -8,9 +8,8 @@ __all__ = [
501 'SourceForgeRemoteProductFinder',
502 ]
503
504-import urllib
505-
506 import requests
507+from six.moves.urllib.parse import splitvalue
508 from zope.component import getUtility
509
510 from lp.app.interfaces.launchpad import ILaunchpadCelebrities
511@@ -101,7 +100,7 @@ class SourceForgeRemoteProductFinder:
512 query_dict = {}
513 query_bits = query.split('&')
514 for bit in query_bits:
515- key, value = urllib.splitvalue(bit)
516+ key, value = splitvalue(bit)
517 query_dict[key] = value
518
519 try:
520diff --git a/lib/lp/bugs/stories/bugs/xx-bugs-advanced-search-upstream-status.txt b/lib/lp/bugs/stories/bugs/xx-bugs-advanced-search-upstream-status.txt
521index 92347e0..ce0f6db 100644
522--- a/lib/lp/bugs/stories/bugs/xx-bugs-advanced-search-upstream-status.txt
523+++ b/lib/lp/bugs/stories/bugs/xx-bugs-advanced-search-upstream-status.txt
524@@ -144,7 +144,7 @@ status. Bookmarks of such searches work nevertheless.
525
526 The user opens a bookmark for "upstream status doesn't matter"
527
528- >>> from urllib import urlencode
529+ >>> from six.moves.urllib.parse import urlencode
530 >>> bookmark_params = {
531 ... 'field.status_upstream': '',
532 ... 'field.status_upstream-empty-marker': '1',
533diff --git a/lib/lp/bugs/stories/webservice/xx-bug.txt b/lib/lp/bugs/stories/webservice/xx-bug.txt
534index 9fc5a22..9154525 100644
535--- a/lib/lp/bugs/stories/webservice/xx-bug.txt
536+++ b/lib/lp/bugs/stories/webservice/xx-bug.txt
537@@ -1297,7 +1297,7 @@ we must follow to download the data.
538 Location: http://.../numbers.txt
539 ...
540
541- >>> from urllib2 import urlopen
542+ >>> from six.moves.urllib.request import urlopen
543
544 >>> data = None
545 >>> conn = urlopen(data_response.getHeader('Location'))
546diff --git a/lib/lp/bugs/tests/test_bugwatch.py b/lib/lp/bugs/tests/test_bugwatch.py
547index 7dd061f..50996b5 100644
548--- a/lib/lp/bugs/tests/test_bugwatch.py
549+++ b/lib/lp/bugs/tests/test_bugwatch.py
550@@ -10,10 +10,10 @@ from datetime import (
551 timedelta,
552 )
553 import re
554-from urlparse import urlunsplit
555
556 from lazr.lifecycle.snapshot import Snapshot
557 from pytz import utc
558+from six.moves.urllib.parse import urlunsplit
559 from storm.store import Store
560 from testscenarios import (
561 load_tests_apply_scenarios,
562diff --git a/lib/lp/buildmaster/interactor.py b/lib/lp/buildmaster/interactor.py
563index 1605cda..4f809c4 100644
564--- a/lib/lp/buildmaster/interactor.py
565+++ b/lib/lp/buildmaster/interactor.py
566@@ -12,8 +12,8 @@ from collections import namedtuple
567 import logging
568 import os.path
569 import tempfile
570-from urlparse import urlparse
571
572+from six.moves.urllib.parse import urlparse
573 import transaction
574 from twisted.internet import (
575 defer,
576diff --git a/lib/lp/code/browser/branchlisting.py b/lib/lp/code/browser/branchlisting.py
577index 176733b..f472ef0 100644
578--- a/lib/lp/code/browser/branchlisting.py
579+++ b/lib/lp/code/browser/branchlisting.py
580@@ -27,13 +27,13 @@ __all__ = [
581 ]
582
583 from operator import attrgetter
584-import urlparse
585
586 from lazr.delegates import delegate_to
587 from lazr.enum import (
588 EnumeratedType,
589 Item,
590 )
591+from six.moves.urllib.parse import parse_qs
592 from storm.expr import Desc
593 from zope.browserpage import ViewPageTemplateFile
594 from zope.component import getUtility
595@@ -496,7 +496,7 @@ class BranchListingView(LaunchpadFormView, FeedsMixin):
596 @property
597 def template(self):
598 query_string = self.request.get('QUERY_STRING') or ''
599- query_params = urlparse.parse_qs(query_string)
600+ query_params = parse_qs(query_string)
601 render_table_only = 'batch_request' in query_params
602 if render_table_only:
603 return self.table_only_template
604diff --git a/lib/lp/code/browser/codeimport.py b/lib/lp/code/browser/codeimport.py
605index dc7d9e6..635347d 100644
606--- a/lib/lp/code/browser/codeimport.py
607+++ b/lib/lp/code/browser/codeimport.py
608@@ -20,12 +20,12 @@ __all__ = [
609 ]
610
611 from textwrap import dedent
612-from urlparse import urlparse
613
614 from lazr.restful.interface import (
615 copy_field,
616 use_template,
617 )
618+from six.moves.urllib.parse import urlparse
619 from zope.component import (
620 getUtility,
621 queryAdapter,
622diff --git a/lib/lp/code/browser/tests/test_gitsubscription.py b/lib/lp/code/browser/tests/test_gitsubscription.py
623index 62346ff..2997c93 100644
624--- a/lib/lp/code/browser/tests/test_gitsubscription.py
625+++ b/lib/lp/code/browser/tests/test_gitsubscription.py
626@@ -7,9 +7,8 @@ from __future__ import absolute_import, print_function, unicode_literals
627
628 __metaclass__ = type
629
630-from urllib import urlencode
631-
632 from fixtures import FakeLogger
633+from six.moves.urllib.parse import urlencode
634 from testtools.matchers import MatchesStructure
635 from zope.security.interfaces import Unauthorized
636 from zope.testbrowser.browser import LinkNotFoundError
637diff --git a/lib/lp/code/interfaces/codehosting.py b/lib/lp/code/interfaces/codehosting.py
638index 447e978..b5adc52 100644
639--- a/lib/lp/code/interfaces/codehosting.py
640+++ b/lib/lp/code/interfaces/codehosting.py
641@@ -22,10 +22,10 @@ __all__ = [
642 ]
643
644 import os.path
645-import urllib
646
647 from lazr.uri import URI
648 import six
649+from six.moves.urllib.parse import quote
650 from zope.interface import Interface
651
652 from lp.app.validators.name import valid_name
653@@ -204,7 +204,7 @@ def compose_public_url(scheme, unique_name, suffix=None):
654 host = URI(config.codehosting.supermirror_root).host
655 # After quoting and encoding, the path should be perfectly
656 # safe as a plain ASCII string, str() just enforces this
657- path = '/' + str(urllib.quote(six.ensure_binary(unique_name), safe='/~+'))
658+ path = '/' + str(quote(six.ensure_binary(unique_name), safe='/~+'))
659 if suffix:
660 path = os.path.join(path, suffix)
661 return str(URI(scheme=scheme, host=host, path=path))
662diff --git a/lib/lp/code/model/githosting.py b/lib/lp/code/model/githosting.py
663index d895967..94d6538 100644
664--- a/lib/lp/code/model/githosting.py
665+++ b/lib/lp/code/model/githosting.py
666@@ -11,12 +11,14 @@ __all__ = [
667 import base64
668 import json
669 import sys
670-from urllib import quote
671-from urlparse import urljoin
672
673 from lazr.restful.utils import get_current_browser_request
674 import requests
675 from six import reraise
676+from six.moves.urllib.parse import (
677+ quote,
678+ urljoin,
679+ )
680 from zope.interface import implementer
681
682 from lp.code.errors import (
683diff --git a/lib/lp/code/model/gitref.py b/lib/lp/code/model/gitref.py
684index 526b815..3667eb7 100644
685--- a/lib/lp/code/model/gitref.py
686+++ b/lib/lp/code/model/gitref.py
687@@ -12,16 +12,16 @@ __all__ = [
688 from functools import partial
689 import json
690 import re
691-from urllib import (
692- quote,
693- quote_plus,
694- )
695-from urlparse import urlsplit
696
697 from lazr.lifecycle.event import ObjectCreatedEvent
698 import pytz
699 import requests
700 import six
701+from six.moves.urllib.parse import (
702+ quote,
703+ quote_plus,
704+ urlsplit,
705+ )
706 from storm.locals import (
707 DateTime,
708 Int,
709diff --git a/lib/lp/code/model/gitrepository.py b/lib/lp/code/model/gitrepository.py
710index eb9c251..6260c16 100644
711--- a/lib/lp/code/model/gitrepository.py
712+++ b/lib/lp/code/model/gitrepository.py
713@@ -25,7 +25,6 @@ from itertools import (
714 groupby,
715 )
716 from operator import attrgetter
717-from urllib import quote_plus
718
719 from breezy import urlutils
720 from lazr.enum import DBItem
721@@ -33,6 +32,7 @@ from lazr.lifecycle.event import ObjectModifiedEvent
722 from lazr.lifecycle.snapshot import Snapshot
723 import pytz
724 import six
725+from six.moves.urllib.parse import quote_plus
726 from storm.databases.postgres import Returning
727 from storm.expr import (
728 And,
729diff --git a/lib/lp/code/stories/branches/xx-private-branch-listings.txt b/lib/lp/code/stories/branches/xx-private-branch-listings.txt
730index 03c21db..e0ee96e 100644
731--- a/lib/lp/code/stories/branches/xx-private-branch-listings.txt
732+++ b/lib/lp/code/stories/branches/xx-private-branch-listings.txt
733@@ -141,7 +141,7 @@ Person code listing pages
734 The person code listings is the other obvious place to filter out the
735 viewable branches.
736
737- >>> import urllib
738+ >>> from six.moves.urllib.parse import urlencode
739 >>> def print_person_code_listing(browser, category=None):
740 ... params = {'batch': '15'}
741 ... if category is not None:
742@@ -149,7 +149,7 @@ viewable branches.
743 ... # The batch argument is given to override the default batch
744 ... # size of five.
745 ... full_url = 'http://code.launchpad.test/~name12?%s' % (
746- ... urllib.urlencode(params),)
747+ ... urlencode(params),)
748 ... browser.open(full_url)
749 ... table = find_tag_by_id(browser.contents, 'branchtable')
750 ... branches = []
751diff --git a/lib/lp/code/stories/branches/xx-subscribing-branches.txt b/lib/lp/code/stories/branches/xx-subscribing-branches.txt
752index c4a3a73..315a00e 100644
753--- a/lib/lp/code/stories/branches/xx-subscribing-branches.txt
754+++ b/lib/lp/code/stories/branches/xx-subscribing-branches.txt
755@@ -120,7 +120,7 @@ shown to the user.
756 Clicking the back button and then clicking on either Change or
757 Unsubscribe will give a message that we are not subscribed.
758
759- >>> from urllib import urlencode
760+ >>> from six.moves.urllib.parse import urlencode
761 >>> browser.addHeader('Referer', 'https://launchpad.test/')
762 >>> browser.open(
763 ... form_url,
764diff --git a/lib/lp/codehosting/codeimport/tests/test_workermonitor.py b/lib/lp/codehosting/codeimport/tests/test_workermonitor.py
765index 04a43ba..047cbc9 100644
766--- a/lib/lp/codehosting/codeimport/tests/test_workermonitor.py
767+++ b/lib/lp/codehosting/codeimport/tests/test_workermonitor.py
768@@ -13,12 +13,12 @@ import shutil
769 import StringIO
770 import subprocess
771 import tempfile
772-import urllib
773
774 from bzrlib.branch import Branch
775 from bzrlib.tests import TestCaseInTempDir
776 from dulwich.repo import Repo as GitRepo
777 import oops_twisted
778+from six.moves.urllib.request import urlopen
779 from testtools.twistedsupport import (
780 assert_fails_with,
781 AsynchronousDeferredRunTest,
782@@ -368,7 +368,7 @@ class TestWorkerMonitorUnit(TestCase):
783 def check_file_uploaded(result):
784 transaction.abort()
785 url = worker_monitor.codeimport_endpoint.calls[0][3]
786- text = urllib.urlopen(url).read()
787+ text = urlopen(url).read()
788 self.assertEqual(log_text, text)
789
790 return worker_monitor.finishJob(
791diff --git a/lib/lp/codehosting/codeimport/worker.py b/lib/lp/codehosting/codeimport/worker.py
792index 8916e8d..e20a6f3 100644
793--- a/lib/lp/codehosting/codeimport/worker.py
794+++ b/lib/lp/codehosting/codeimport/worker.py
795@@ -24,10 +24,6 @@ import io
796 import os
797 import shutil
798 import subprocess
799-from urlparse import (
800- urlsplit,
801- urlunsplit,
802- )
803
804 # FIRST Ensure correct plugins are loaded. Do not delete this comment or the
805 # line below this comment.
806@@ -78,6 +74,10 @@ from lazr.uri import (
807 )
808 from pymacaroons import Macaroon
809 import SCM
810+from six.moves.urllib.parse import (
811+ urlsplit,
812+ urlunsplit,
813+ )
814
815 from lp.code.interfaces.branch import get_blacklisted_hostnames
816 from lp.codehosting.codeimport.foreigntree import CVSWorkingTree
817diff --git a/lib/lp/codehosting/puller/tests/test_errors.py b/lib/lp/codehosting/puller/tests/test_errors.py
818index 8abc1e5..18ae10e 100644
819--- a/lib/lp/codehosting/puller/tests/test_errors.py
820+++ b/lib/lp/codehosting/puller/tests/test_errors.py
821@@ -8,7 +8,6 @@ __metaclass__ = type
822 import os
823 import socket
824 import tempfile
825-import urllib2
826
827 from breezy.errors import (
828 BzrError,
829@@ -23,6 +22,7 @@ from breezy.url_policy_open import (
830 )
831 from lazr.uri import InvalidURIError
832 from six.moves import http_client
833+from six.moves.urllib.error import HTTPError
834
835 from lp.code.enums import BranchType
836 from lp.codehosting.puller.worker import (
837@@ -141,7 +141,7 @@ class TestErrorCatching(TestCase):
838 # If the source branch requires HTTP authentication, say so in the
839 # error message.
840 msg = self.getMirrorFailureForException(
841- urllib2.HTTPError(
842+ HTTPError(
843 'http://something', http_client.UNAUTHORIZED,
844 'Authorization Required', 'some headers',
845 os.fdopen(tempfile.mkstemp()[0])))
846diff --git a/lib/lp/codehosting/puller/worker.py b/lib/lp/codehosting/puller/worker.py
847index a15ee1a..0767cb8 100644
848--- a/lib/lp/codehosting/puller/worker.py
849+++ b/lib/lp/codehosting/puller/worker.py
850@@ -5,7 +5,6 @@ __metaclass__ = type
851
852 import socket
853 import sys
854-import urllib2
855
856 # FIRST Ensure correct plugins are loaded. Do not delete this comment or the
857 # line below this comment.
858@@ -41,6 +40,7 @@ from lazr.uri import (
859 URI,
860 )
861 from six.moves import http_client
862+from six.moves.urllib.error import HTTPError
863
864 from lp.code.bzr import (
865 BranchFormat,
866@@ -387,7 +387,7 @@ class PullerWorker:
867 # add further encountered errors from the production runs here
868 # ------ HERE ---------
869 #
870- except urllib2.HTTPError as e:
871+ except HTTPError as e:
872 msg = str(e)
873 if int(e.code) == http_client.UNAUTHORIZED:
874 # Maybe this will be caught in bzrlib one day, and then we'll
875diff --git a/lib/lp/codehosting/scanner/buglinks.py b/lib/lp/codehosting/scanner/buglinks.py
876index 95d8eaa..f9609c3 100644
877--- a/lib/lp/codehosting/scanner/buglinks.py
878+++ b/lib/lp/codehosting/scanner/buglinks.py
879@@ -8,9 +8,8 @@ __all__ = [
880 'BugBranchLinker',
881 ]
882
883-import urlparse
884-
885 from breezy.bugtracker import InvalidBugStatus
886+from six.moves.urllib.parse import urlsplit
887 from zope.component import getUtility
888
889 from lp.app.errors import NotFoundError
890@@ -25,7 +24,7 @@ class BugBranchLinker:
891 self.db_branch = db_branch
892
893 def _getBugFromUrl(self, url):
894- protocol, host, path, ignored, ignored = urlparse.urlsplit(url)
895+ protocol, host, path, ignored, ignored = urlsplit(url)
896
897 # Skip URLs that don't point to Launchpad.
898 if host != 'launchpad.net':
899diff --git a/lib/lp/codehosting/sshserver/session.py b/lib/lp/codehosting/sshserver/session.py
900index 91d31e4..38bea68 100644
901--- a/lib/lp/codehosting/sshserver/session.py
902+++ b/lib/lp/codehosting/sshserver/session.py
903@@ -12,11 +12,11 @@ import os
904 import signal
905 import socket
906 import sys
907-import urlparse
908
909 from lazr.sshserver.events import AvatarEvent
910 from lazr.sshserver.session import DoNothingSession
911 from six import reraise
912+from six.moves.urllib.parse import urlparse
913 from twisted.internet import (
914 error,
915 interfaces,
916@@ -471,7 +471,7 @@ def launch_smart_server(avatar):
917 environment = dict(os.environ)
918
919 # Extract the hostname from the supermirror root config.
920- hostname = urlparse.urlparse(config.codehosting.supermirror_root)[1]
921+ hostname = urlparse(config.codehosting.supermirror_root)[1]
922 environment['BRZ_EMAIL'] = '%s@%s' % (avatar.username, hostname)
923 # TODO: Use a FeatureFlag to enable this in a more fine-grained approach.
924 # If the forking daemon has been spawned, then we can use it if the
925diff --git a/lib/lp/codehosting/tests/test_acceptance.py b/lib/lp/codehosting/tests/test_acceptance.py
926index 1b4e1bf..23ee03e 100644
927--- a/lib/lp/codehosting/tests/test_acceptance.py
928+++ b/lib/lp/codehosting/tests/test_acceptance.py
929@@ -11,7 +11,6 @@ import signal
930 import subprocess
931 import sys
932 import time
933-import urllib2
934 import xmlrpclib
935
936 import breezy.branch
937@@ -19,6 +18,7 @@ from breezy.tests import TestCaseWithTransport
938 from breezy.tests.per_repository import all_repository_format_scenarios
939 from breezy.urlutils import local_path_from_url
940 from breezy.workingtree import WorkingTree
941+from six.moves.urllib.request import urlopen
942 from testscenarios import (
943 load_tests_apply_scenarios,
944 WithScenarios,
945@@ -747,7 +747,7 @@ class SmartserverTests(WithScenarios, SSHTestCase):
946 self.assertEqual('tcp:', config.codehosting.web_status_port[:4])
947 port = int(config.codehosting.web_status_port[4:])
948 web_status_url = 'http://localhost:%d/' % port
949- urllib2.urlopen(web_status_url)
950+ urlopen(web_status_url)
951
952
953 load_tests = load_tests_apply_scenarios
954diff --git a/lib/lp/hardwaredb/stories/hwdb/xx-hwdb.txt b/lib/lp/hardwaredb/stories/hwdb/xx-hwdb.txt
955index 988534b..98128d1 100644
956--- a/lib/lp/hardwaredb/stories/hwdb/xx-hwdb.txt
957+++ b/lib/lp/hardwaredb/stories/hwdb/xx-hwdb.txt
958@@ -196,7 +196,7 @@ present in the form and submits this data. Unfortunately, it sometimes
959 simply omits some fields. In such a case, we return an extra header
960 "Required fields not contained in POST data".
961
962- >>> from urllib import urlencode
963+ >>> from six.moves.urllib.parse import urlencode
964 >>> form_data = {
965 ... 'field.date_created': '2009-01-01',
966 ... 'field.format': 'VERSION_1',
967diff --git a/lib/lp/hardwaredb/stories/webservice/xx-hwdb.txt b/lib/lp/hardwaredb/stories/webservice/xx-hwdb.txt
968index 0ca3d45..73194eb 100644
969--- a/lib/lp/hardwaredb/stories/webservice/xx-hwdb.txt
970+++ b/lib/lp/hardwaredb/stories/webservice/xx-hwdb.txt
971@@ -565,7 +565,7 @@ We can limit the result set to submissions where the device is accessed
972 by a specific driver. Device 2 is a USB controller, so will get the sample
973 submission when we set the parameter driver to the usb driver...
974
975- >>> from urllib import urlencode
976+ >>> from six.moves.urllib.parse import urlencode
977 >>> parameters = {
978 ... 'ws.op': 'getSubmissions',
979 ... 'driver': usb_driver['self_link'],
980diff --git a/lib/lp/registry/browser/mailinglists.py b/lib/lp/registry/browser/mailinglists.py
981index ea422df..9fa6dd2 100644
982--- a/lib/lp/registry/browser/mailinglists.py
983+++ b/lib/lp/registry/browser/mailinglists.py
984@@ -11,8 +11,8 @@ __all__ = [
985
986
987 from textwrap import TextWrapper
988-from urllib import quote
989
990+from six.moves.urllib.parse import quote
991 from zope.component import getUtility
992
993 from lp.app.browser.tales import PersonFormatterAPI
994diff --git a/lib/lp/registry/browser/person.py b/lib/lp/registry/browser/person.py
995index 35cf170..bb0ef69 100644
996--- a/lib/lp/registry/browser/person.py
997+++ b/lib/lp/registry/browser/person.py
998@@ -54,7 +54,6 @@ from operator import (
999 itemgetter,
1000 )
1001 from textwrap import dedent
1002-import urllib
1003
1004 from lazr.config import as_timedelta
1005 from lazr.delegates import delegate_to
1006@@ -63,6 +62,10 @@ from lazr.restful.interfaces import IWebServiceClientRequest
1007 from lazr.restful.utils import smartquote
1008 from lazr.uri import URI
1009 import pytz
1010+from six.moves.urllib.parse import (
1011+ quote,
1012+ urlencode,
1013+ )
1014 from storm.zope.interfaces import IResultSet
1015 from zope.browserpage import ViewPageTemplateFile
1016 from zope.component import (
1017@@ -1717,7 +1720,7 @@ class PersonView(LaunchpadView, FeedsMixin, ContactViaWebLinksMixin):
1018 """Return an URL to a page which lists all bugs assigned to this
1019 person that are In Progress.
1020 """
1021- query_string = urllib.urlencode(
1022+ query_string = urlencode(
1023 [('field.status', BugTaskStatus.INPROGRESS.title)])
1024 url = "%s/+assignedbugs" % canonical_url(self.context)
1025 return ("%(url)s?search=Search&%(query_string)s"
1026@@ -2952,7 +2955,7 @@ class PersonEditEmailsView(LaunchpadFormView):
1027 "to be confirmed as yours." % newemail)
1028 else:
1029 owner = email.person
1030- owner_name = urllib.quote(owner.name)
1031+ owner_name = quote(owner.name)
1032 merge_url = (
1033 '%s/+requestmerge?field.dupe_person=%s'
1034 % (canonical_url(getUtility(IPersonSet)), owner_name))
1035diff --git a/lib/lp/registry/browser/product.py b/lib/lp/registry/browser/product.py
1036index b2161c5..c059dbb 100644
1037--- a/lib/lp/registry/browser/product.py
1038+++ b/lib/lp/registry/browser/product.py
1039@@ -42,7 +42,6 @@ __all__ = [
1040
1041
1042 from operator import attrgetter
1043-from urlparse import urlunsplit
1044
1045 from breezy import urlutils
1046 from breezy.revision import NULL_REVISION
1047@@ -52,6 +51,7 @@ from lazr.restful.interface import (
1048 use_template,
1049 )
1050 from lazr.restful.interfaces import IJSONRequestCache
1051+from six.moves.urllib.parse import urlunsplit
1052 from zope.browserpage import ViewPageTemplateFile
1053 from zope.component import getUtility
1054 from zope.event import notify
1055diff --git a/lib/lp/registry/browser/sourcepackage.py b/lib/lp/registry/browser/sourcepackage.py
1056index 19a6e89..50fa669 100644
1057--- a/lib/lp/registry/browser/sourcepackage.py
1058+++ b/lib/lp/registry/browser/sourcepackage.py
1059@@ -18,7 +18,6 @@ __all__ = [
1060 ]
1061
1062 import string
1063-import urllib
1064
1065 from apt_pkg import (
1066 upstream_version,
1067@@ -29,6 +28,7 @@ from lazr.enum import (
1068 Item,
1069 )
1070 from lazr.restful.interface import copy_field
1071+from six.moves.urllib.parse import urlencode
1072 from zope.browserpage import ViewPageTemplateFile
1073 from zope.component import (
1074 adapter,
1075@@ -125,8 +125,7 @@ def get_register_upstream_url(source_package):
1076 for binary in source_package.releases[0].sample_binary_packages:
1077 summary_set.add(binary.summary)
1078 params['field.summary'] = '\n'.join(sorted(summary_set))
1079- query_string = urllib.urlencode(
1080- sorted(params.items()), doseq=True)
1081+ query_string = urlencode(sorted(params.items()), doseq=True)
1082 return '/projects/+new?%s' % query_string
1083
1084
1085diff --git a/lib/lp/registry/browser/team.py b/lib/lp/registry/browser/team.py
1086index 73f30c5..416a8a4 100644
1087--- a/lib/lp/registry/browser/team.py
1088+++ b/lib/lp/registry/browser/team.py
1089@@ -37,13 +37,13 @@ from datetime import (
1090 timedelta,
1091 )
1092 import math
1093-from urllib import unquote
1094
1095 from lazr.restful.interface import copy_field
1096 from lazr.restful.interfaces import IJSONRequestCache
1097 from lazr.restful.utils import smartquote
1098 import pytz
1099 import simplejson
1100+from six.moves.urllib.parse import unquote
1101 from zope.browserpage import ViewPageTemplateFile
1102 from zope.component import getUtility
1103 from zope.formlib.form import (
1104diff --git a/lib/lp/registry/browser/tests/test_distroseries.py b/lib/lp/registry/browser/tests/test_distroseries.py
1105index de45a78..1f8d5d5 100644
1106--- a/lib/lp/registry/browser/tests/test_distroseries.py
1107+++ b/lib/lp/registry/browser/tests/test_distroseries.py
1108@@ -9,12 +9,14 @@ from datetime import timedelta
1109 import difflib
1110 import re
1111 from textwrap import TextWrapper
1112-from urllib import urlencode
1113-from urlparse import urlparse
1114
1115 from fixtures import FakeLogger
1116 from lazr.restful.interfaces import IJSONRequestCache
1117 from lxml import html
1118+from six.moves.urllib.parse import (
1119+ urlencode,
1120+ urlparse,
1121+ )
1122 import soupmatchers
1123 from storm.zope.interfaces import IResultSet
1124 from testtools.content import (
1125diff --git a/lib/lp/registry/browser/tests/test_product.py b/lib/lp/registry/browser/tests/test_product.py
1126index 2232c93..4271302 100644
1127--- a/lib/lp/registry/browser/tests/test_product.py
1128+++ b/lib/lp/registry/browser/tests/test_product.py
1129@@ -8,10 +8,12 @@ __metaclass__ = type
1130 __all__ = ['make_product_form']
1131
1132 import re
1133-from urlparse import urlsplit
1134
1135 from lazr.restful.interfaces import IJSONRequestCache
1136-from six.moves.urllib.parse import urlencode
1137+from six.moves.urllib.parse import (
1138+ urlencode,
1139+ urlsplit,
1140+ )
1141 from soupmatchers import (
1142 HTMLContains,
1143 Tag,
1144diff --git a/lib/lp/registry/browser/tests/test_sourcepackage_views.py b/lib/lp/registry/browser/tests/test_sourcepackage_views.py
1145index 3893d1e..c3cac87 100644
1146--- a/lib/lp/registry/browser/tests/test_sourcepackage_views.py
1147+++ b/lib/lp/registry/browser/tests/test_sourcepackage_views.py
1148@@ -5,9 +5,10 @@
1149
1150 __metaclass__ = type
1151
1152-import urllib
1153-
1154-from six.moves.urllib.parse import parse_qsl
1155+from six.moves.urllib.parse import (
1156+ parse_qsl,
1157+ splitquery,
1158+ )
1159 from soupmatchers import (
1160 HTMLContains,
1161 Tag,
1162@@ -60,7 +61,7 @@ class TestSourcePackageViewHelpers(TestCaseWithFactory):
1163 return distroseries.getSourcePackage(source_package_name)
1164
1165 def assertInQueryString(self, url, field, value):
1166- base, query = urllib.splitquery(url)
1167+ base, query = splitquery(url)
1168 params = parse_qsl(query)
1169 self.assertTrue((field, value) in params)
1170
1171@@ -72,7 +73,7 @@ class TestSourcePackageViewHelpers(TestCaseWithFactory):
1172 distroseries=distroseries,
1173 sourcepackagename='python-super-package')
1174 url = get_register_upstream_url(source_package)
1175- base, query = urllib.splitquery(url)
1176+ base, query = splitquery(url)
1177 self.assertEqual('/projects/+new', base)
1178 params = parse_qsl(query)
1179 expected_params = [
1180diff --git a/lib/lp/registry/scripts/distributionmirror_prober.py b/lib/lp/registry/scripts/distributionmirror_prober.py
1181index 1aa5ca3..916d4ba 100644
1182--- a/lib/lp/registry/scripts/distributionmirror_prober.py
1183+++ b/lib/lp/registry/scripts/distributionmirror_prober.py
1184@@ -11,11 +11,15 @@ import itertools
1185 import logging
1186 import os.path
1187 from StringIO import StringIO
1188-import urllib
1189-import urlparse
1190
1191 import requests
1192 from six.moves import http_client
1193+from six.moves.urllib.parse import (
1194+ unquote,
1195+ urljoin,
1196+ urlparse,
1197+ urlunparse,
1198+ )
1199 from twisted.internet import (
1200 defer,
1201 protocol,
1202@@ -321,8 +325,8 @@ class RedirectAwareProberFactory(ProberFactory):
1203
1204 scheme, host, port, orig_path = _parse(self.url)
1205 scheme, host, port, new_path = _parse(url)
1206- if (urllib.unquote(orig_path.split('/')[-1])
1207- != urllib.unquote(new_path.split('/')[-1])):
1208+ if (unquote(orig_path.split('/')[-1])
1209+ != unquote(new_path.split('/')[-1])):
1210 # Server redirected us to a file which doesn't seem to be what we
1211 # requested. It's likely to be a stupid server which redirects
1212 # instead of 404ing (https://launchpad.net/bugs/204460).
1213@@ -618,12 +622,12 @@ def _get_cdimage_file_list():
1214 url = config.distributionmirrorprober.cdimage_file_list_url
1215 # In test environments, this may be a file: URL. Adjust it to be in a
1216 # form that requests can cope with (i.e. using an absolute path).
1217- parsed_url = urlparse.urlparse(url)
1218+ parsed_url = urlparse(url)
1219 if parsed_url.scheme == 'file' and not os.path.isabs(parsed_url.path):
1220 assert parsed_url.path == parsed_url[2]
1221 parsed_url = list(parsed_url)
1222 parsed_url[2] = os.path.join(config.root, parsed_url[2])
1223- url = urlparse.urlunparse(parsed_url)
1224+ url = urlunparse(parsed_url)
1225 try:
1226 return urlfetch(
1227 url, headers={'Pragma': 'no-cache', 'Cache-control': 'no-cache'},
1228@@ -685,7 +689,7 @@ def probe_archive_mirror(mirror, logfile, unchecked_keys, logger):
1229 all_paths = itertools.chain(packages_paths, sources_paths)
1230 request_manager = RequestManager()
1231 for series, pocket, component, path in all_paths:
1232- url = urlparse.urljoin(base_url, path)
1233+ url = urljoin(base_url, path)
1234 callbacks = ArchiveMirrorProberCallbacks(
1235 mirror, series, pocket, component, url, logfile)
1236 unchecked_keys.append(url)
1237@@ -735,7 +739,7 @@ def probe_cdimage_mirror(mirror, logfile, unchecked_keys, logger):
1238 deferredList = []
1239 request_manager = RequestManager()
1240 for path in paths:
1241- url = urlparse.urljoin(base_url, path)
1242+ url = urljoin(base_url, path)
1243 # Use a RedirectAwareProberFactory because CD mirrors are allowed
1244 # to redirect, and we need to cope with that.
1245 prober = RedirectAwareProberFactory(url)
1246@@ -761,7 +765,7 @@ def should_skip_host(host):
1247
1248 def _parse(url, defaultPort=80):
1249 """Parse the given URL returning the scheme, host, port and path."""
1250- scheme, host, path, dummy, dummy, dummy = urlparse.urlparse(url)
1251+ scheme, host, path, dummy, dummy, dummy = urlparse(url)
1252 port = defaultPort
1253 if ':' in host:
1254 host, port = host.split(':')
1255diff --git a/lib/lp/registry/scripts/productreleasefinder/finder.py b/lib/lp/registry/scripts/productreleasefinder/finder.py
1256index 07e2aaa..3fd49f0 100644
1257--- a/lib/lp/registry/scripts/productreleasefinder/finder.py
1258+++ b/lib/lp/registry/scripts/productreleasefinder/finder.py
1259@@ -13,11 +13,11 @@ import mimetypes
1260 import os
1261 import re
1262 import tempfile
1263-import urlparse
1264
1265 from cscvs.dircompare import path
1266 import pytz
1267 import requests
1268+from six.moves.urllib.parse import urlsplit
1269 from zope.component import getUtility
1270
1271 from lp.app.validators.name import invalid_name_pattern
1272@@ -199,7 +199,7 @@ class ProductReleaseFinder:
1273 def handleRelease(self, product_name, series_name, url, file_names):
1274 """If the given URL looks like a release tarball, download it
1275 and create a corresponding ProductRelease."""
1276- filename = urlparse.urlsplit(url)[2]
1277+ filename = urlsplit(url)[2]
1278 slash = filename.rfind("/")
1279 if slash != -1:
1280 filename = filename[slash + 1:]
1281diff --git a/lib/lp/registry/scripts/productreleasefinder/walker.py b/lib/lp/registry/scripts/productreleasefinder/walker.py
1282index fa9e374..4865a46 100644
1283--- a/lib/lp/registry/scripts/productreleasefinder/walker.py
1284+++ b/lib/lp/registry/scripts/productreleasefinder/walker.py
1285@@ -14,11 +14,6 @@ __all__ = [
1286
1287 import ftplib
1288 import socket
1289-from urllib import unquote_plus
1290-from urlparse import (
1291- urljoin,
1292- urlsplit,
1293- )
1294
1295 from cscvs.dircompare.path import (
1296 as_dir,
1297@@ -30,6 +25,11 @@ from lazr.uri import (
1298 )
1299 import requests
1300 import scandir
1301+from six.moves.urllib.parse import (
1302+ unquote_plus,
1303+ urljoin,
1304+ urlsplit,
1305+ )
1306
1307 from lp.registry.scripts.productreleasefinder import log
1308 from lp.services.beautifulsoup import BeautifulSoup
1309diff --git a/lib/lp/registry/stories/product/xx-product-files.txt b/lib/lp/registry/stories/product/xx-product-files.txt
1310index 403e23e..5751b75 100644
1311--- a/lib/lp/registry/stories/product/xx-product-files.txt
1312+++ b/lib/lp/registry/stories/product/xx-product-files.txt
1313@@ -401,8 +401,8 @@ Downloading and deleting files
1314
1315 Download one of the files.
1316
1317- >>> from urllib import urlopen
1318- >>> from urlparse import urlparse
1319+ >>> from six.moves.urllib.parse import urlparse
1320+ >>> from six.moves.urllib.request import urlopen
1321
1322 XXX Downloading via the testbrowser does not work
1323 XXX unless the file is served by the Zope publisher.
1324diff --git a/lib/lp/scripts/utilities/js/combo.py b/lib/lp/scripts/utilities/js/combo.py
1325index b02e381..8b0969f 100644
1326--- a/lib/lp/scripts/utilities/js/combo.py
1327+++ b/lib/lp/scripts/utilities/js/combo.py
1328@@ -6,9 +6,11 @@ from __future__ import absolute_import, print_function, unicode_literals
1329 __metaclass__ = type
1330
1331 import os
1332-import urlparse
1333
1334-from six.moves.urllib.parse import parse_qsl
1335+from six.moves.urllib.parse import (
1336+ parse_qsl,
1337+ urlsplit,
1338+ )
1339
1340 from lp.scripts.utilities.js.jsbuild import (
1341 CSSComboFile,
1342@@ -21,7 +23,7 @@ def parse_url(url):
1343
1344 Returns the list of arguments in the original order.
1345 """
1346- scheme, loc, path, query, frag = urlparse.urlsplit(url)
1347+ scheme, loc, path, query, frag = urlsplit(url)
1348 return parse_qs(query)
1349
1350
1351diff --git a/lib/lp/services/config/__init__.py b/lib/lp/services/config/__init__.py
1352index 80b1d0c..ae94cf0 100644
1353--- a/lib/lp/services/config/__init__.py
1354+++ b/lib/lp/services/config/__init__.py
1355@@ -19,13 +19,13 @@ except ImportError:
1356 import logging
1357 import os
1358 import sys
1359-from urlparse import (
1360- urlparse,
1361- urlunparse,
1362- )
1363
1364 from lazr.config import ImplicitTypeSchema
1365 from lazr.config.interfaces import ConfigErrors
1366+from six.moves.urllib.parse import (
1367+ urlparse,
1368+ urlunparse,
1369+ )
1370 import ZConfig
1371
1372 from lp.services.osutils import open_for_writing
1373diff --git a/lib/lp/services/feeds/feed.py b/lib/lp/services/feeds/feed.py
1374index 6b041a9..76ff15b 100644
1375--- a/lib/lp/services/feeds/feed.py
1376+++ b/lib/lp/services/feeds/feed.py
1377@@ -20,8 +20,8 @@ __all__ = [
1378 import operator
1379 import os
1380 import time
1381-from urlparse import urljoin
1382
1383+from six.moves.urllib.parse import urljoin
1384 from zope.browserpage import ViewPageTemplateFile
1385 from zope.component import getUtility
1386 from zope.datetime import rfc1123_date
1387diff --git a/lib/lp/services/gpg/handler.py b/lib/lp/services/gpg/handler.py
1388index c81ca75..41b7467 100644
1389--- a/lib/lp/services/gpg/handler.py
1390+++ b/lib/lp/services/gpg/handler.py
1391@@ -19,12 +19,12 @@ from StringIO import StringIO
1392 import subprocess
1393 import sys
1394 import tempfile
1395-import urllib
1396
1397 import gpgme
1398 from lazr.restful.utils import get_current_browser_request
1399 import requests
1400 from six.moves import http_client
1401+from six.moves.urllib.parse import urlencode
1402 from zope.interface import implementer
1403 from zope.security.proxy import removeSecurityProxy
1404
1405@@ -467,7 +467,7 @@ class GPGHandler:
1406 config.gpghandler.host, config.gpghandler.port)
1407
1408 conn = http_client.HTTPConnection(keyserver_http_url)
1409- params = urllib.urlencode({'keytext': content})
1410+ params = urlencode({'keytext': content})
1411 headers = {
1412 "Content-type": "application/x-www-form-urlencoded",
1413 "Accept": "text/plain",
1414@@ -512,8 +512,7 @@ class GPGHandler:
1415 base = 'https://%s' % host
1416 else:
1417 base = 'http://%s:%s' % (host, config.gpghandler.port)
1418- return '%s/pks/lookup?%s' % (
1419- base, urllib.urlencode(sorted(params.items())))
1420+ return '%s/pks/lookup?%s' % (base, urlencode(sorted(params.items())))
1421
1422 def _getPubKey(self, fingerprint):
1423 """See IGPGHandler for further information."""
1424diff --git a/lib/lp/services/librarian/client.py b/lib/lp/services/librarian/client.py
1425index 687e8b8..880365e 100644
1426--- a/lib/lp/services/librarian/client.py
1427+++ b/lib/lp/services/librarian/client.py
1428@@ -22,17 +22,21 @@ from socket import (
1429 )
1430 import threading
1431 import time
1432-import urllib
1433-import urllib2
1434-from urlparse import (
1435- urljoin,
1436- urlparse,
1437- urlunparse,
1438- )
1439
1440 from lazr.restful.utils import get_current_browser_request
1441 import six
1442 from six.moves import http_client
1443+from six.moves.urllib.error import (
1444+ HTTPError,
1445+ URLError,
1446+ )
1447+from six.moves.urllib.parse import (
1448+ quote,
1449+ urljoin,
1450+ urlparse,
1451+ urlunparse,
1452+ )
1453+from six.moves.urllib.request import urlopen
1454 from storm.store import Store
1455 from zope.interface import implementer
1456
1457@@ -55,12 +59,12 @@ from lp.services.timeline.requesttimeline import get_request_timeline
1458
1459 def url_path_quote(filename):
1460 """Quote `filename` for use in a URL."""
1461- # RFC 3986 says ~ should not be generated escaped, but urllib.quote
1462+ # RFC 3986 says ~ should not be generated escaped, but urllib.parse.quote
1463 # predates it. Additionally, + is safe to use unescaped in paths and is
1464 # frequently used in Debian versions, so leave it alone.
1465 #
1466 # This needs to match Library.getAlias' TimeLimitedToken handling.
1467- return urllib.quote(filename, safe='/~+')
1468+ return quote(filename, safe='/~+')
1469
1470
1471 def get_libraryfilealias_download_path(aliasID, filename):
1472@@ -339,7 +343,7 @@ class FileDownloadClient:
1473 # url = ('http://%s:%d/search?digest=%s' % (
1474 # host, port, hexdigest)
1475 # )
1476- # results = urllib2.urlopen(url).read()
1477+ # results = urlopen(url).read()
1478 # lines = results.split('\n')
1479 # count, paths = lines[0], lines[1:]
1480 # if int(count) != len(paths):
1481@@ -500,21 +504,21 @@ class FileDownloadClient:
1482 """Helper for getFileByAlias."""
1483 while 1:
1484 try:
1485- return _File(urllib2.urlopen(url), url)
1486- except urllib2.URLError as error:
1487+ return _File(urlopen(url), url)
1488+ except URLError as error:
1489 # 404 errors indicate a data inconsistency: more than one
1490 # attempt to open the file is pointless.
1491 #
1492 # Note that URLError is a base class of HTTPError.
1493- if isinstance(error, urllib2.HTTPError) and error.code == 404:
1494+ if isinstance(error, HTTPError) and error.code == 404:
1495 raise LookupError(aliasID)
1496 # HTTPErrors with a 5xx error code ("server problem")
1497 # are a reason to retry the access again, as well as
1498 # generic, non-HTTP, URLErrors like "connection refused".
1499- if (isinstance(error, urllib2.HTTPError)
1500+ if (isinstance(error, HTTPError)
1501 and 500 <= error.code <= 599
1502- or isinstance(error, urllib2.URLError) and
1503- not isinstance(error, urllib2.HTTPError)):
1504+ or isinstance(error, URLError) and
1505+ not isinstance(error, HTTPError)):
1506 if time.time() <= try_until:
1507 time.sleep(1)
1508 else:
1509diff --git a/lib/lp/services/librarian/doc/librarian.txt b/lib/lp/services/librarian/doc/librarian.txt
1510index 58f7979..01f7ef9 100644
1511--- a/lib/lp/services/librarian/doc/librarian.txt
1512+++ b/lib/lp/services/librarian/doc/librarian.txt
1513@@ -252,7 +252,7 @@ the client until it begins a new transaction.
1514 >>> print url
1515 http://.../text.txt
1516
1517- >>> from urllib2 import urlopen
1518+ >>> from six.moves.urllib.request import urlopen
1519 >>> urlopen(url).read()
1520 'This is some data'
1521
1522diff --git a/lib/lp/services/librarian/model.py b/lib/lp/services/librarian/model.py
1523index adb2b12..0cd870e 100644
1524--- a/lib/lp/services/librarian/model.py
1525+++ b/lib/lp/services/librarian/model.py
1526@@ -13,10 +13,10 @@ __all__ = [
1527
1528 from datetime import datetime
1529 import hashlib
1530-from urlparse import urlparse
1531
1532 from lazr.delegates import delegate_to
1533 import pytz
1534+from six.moves.urllib.parse import urlparse
1535 from sqlobject import (
1536 BoolCol,
1537 ForeignKey,
1538diff --git a/lib/lp/services/librarian/smoketest.py b/lib/lp/services/librarian/smoketest.py
1539index 36c3c81..6c389a5 100644
1540--- a/lib/lp/services/librarian/smoketest.py
1541+++ b/lib/lp/services/librarian/smoketest.py
1542@@ -9,9 +9,9 @@
1543 from cStringIO import StringIO
1544 import datetime
1545 import sys
1546-import urllib
1547
1548 import pytz
1549+from six.moves.urllib.request import urlopen
1550 import transaction
1551 from zope.component import getUtility
1552
1553@@ -36,7 +36,7 @@ def store_file(client):
1554
1555 def read_file(url):
1556 try:
1557- data = urllib.urlopen(url).read()
1558+ data = urlopen(url).read()
1559 except MemoryError:
1560 # Re-raise catastrophic errors.
1561 raise
1562diff --git a/lib/lp/services/librarian/tests/test_client.py b/lib/lp/services/librarian/tests/test_client.py
1563index 4f061ce..15990b5 100644
1564--- a/lib/lp/services/librarian/tests/test_client.py
1565+++ b/lib/lp/services/librarian/tests/test_client.py
1566@@ -7,17 +7,17 @@ import os
1567 import re
1568 import textwrap
1569 import unittest
1570-from urllib2 import (
1571- HTTPError,
1572- URLError,
1573- urlopen,
1574- )
1575
1576 from fixtures import (
1577 EnvironmentVariable,
1578 TempDir,
1579 )
1580 from six.moves import http_client
1581+from six.moves.urllib.error import (
1582+ HTTPError,
1583+ URLError,
1584+ )
1585+from six.moves.urllib.request import urlopen
1586 import transaction
1587
1588 from lp.services.config import config
1589diff --git a/lib/lp/services/librarian/tests/test_smoketest.py b/lib/lp/services/librarian/tests/test_smoketest.py
1590index 949b7c2..9cd0e91 100644
1591--- a/lib/lp/services/librarian/tests/test_smoketest.py
1592+++ b/lib/lp/services/librarian/tests/test_smoketest.py
1593@@ -5,10 +5,11 @@
1594
1595 __metaclass__ = type
1596
1597-from contextlib import contextmanager
1598 from cStringIO import StringIO
1599+from functools import partial
1600+
1601+from fixtures import MockPatch
1602
1603-from lp.services.librarian import smoketest
1604 from lp.services.librarian.smoketest import (
1605 do_smoketest,
1606 FILE_DATA,
1607@@ -19,43 +20,24 @@ from lp.testing import TestCaseWithFactory
1608 from lp.testing.layers import ZopelessDatabaseLayer
1609
1610
1611-class GoodUrllib:
1612+def good_urlopen(url):
1613 """A urllib replacement for testing that returns good results."""
1614-
1615- def urlopen(self, url):
1616- return StringIO(FILE_DATA)
1617+ return StringIO(FILE_DATA)
1618
1619
1620-class BadUrllib:
1621+def bad_urlopen(url):
1622 """A urllib replacement for testing that returns bad results."""
1623+ return StringIO('bad data')
1624
1625- def urlopen(self, url):
1626- return StringIO('bad data')
1627
1628-
1629-class ErrorUrllib:
1630+def error_urlopen(url):
1631 """A urllib replacement for testing that raises an exception."""
1632-
1633- def urlopen(self, url):
1634- raise IOError('network error')
1635+ raise IOError('network error')
1636
1637
1638-class ExplosiveUrllib:
1639+def explosive_urlopen(exception, url):
1640 """A urllib replacement that raises an "explosive" exception."""
1641-
1642- def __init__(self, exception):
1643- self.exception = exception
1644-
1645- def urlopen(self, url):
1646- raise self.exception
1647-
1648-
1649-@contextmanager
1650-def fake_urllib(fake):
1651- original_urllib = smoketest.urllib
1652- smoketest.urllib = fake
1653- yield
1654- smoketest.urllib = original_urllib
1655+ raise exception
1656
1657
1658 class SmokeTestTestCase(TestCaseWithFactory):
1659@@ -77,7 +59,8 @@ class SmokeTestTestCase(TestCaseWithFactory):
1660 # If storing and retrieving both the public and private files work,
1661 # the main function will return 0 (which will be used as the processes
1662 # exit code to signal success).
1663- with fake_urllib(GoodUrllib()):
1664+ with MockPatch(
1665+ "lp.services.librarian.smoketest.urlopen", good_urlopen):
1666 self.assertEqual(
1667 do_smoketest(self.fake_librarian, self.fake_librarian,
1668 output=StringIO()),
1669@@ -86,7 +69,7 @@ class SmokeTestTestCase(TestCaseWithFactory):
1670 def test_bad_data(self):
1671 # If incorrect data is retrieved, the main function will return 1
1672 # (which will be used as the processes exit code to signal an error).
1673- with fake_urllib(BadUrllib()):
1674+ with MockPatch("lp.services.librarian.smoketest.urlopen", bad_urlopen):
1675 self.assertEqual(
1676 do_smoketest(self.fake_librarian, self.fake_librarian,
1677 output=StringIO()),
1678@@ -96,7 +79,8 @@ class SmokeTestTestCase(TestCaseWithFactory):
1679 # If an exception is raised when retrieving the data, the main
1680 # function will return 1 (which will be used as the processes exit
1681 # code to signal an error).
1682- with fake_urllib(ErrorUrllib()):
1683+ with MockPatch(
1684+ "lp.services.librarian.smoketest.urlopen", error_urlopen):
1685 self.assertEqual(
1686 do_smoketest(self.fake_librarian, self.fake_librarian,
1687 output=StringIO()),
1688@@ -106,7 +90,9 @@ class SmokeTestTestCase(TestCaseWithFactory):
1689 # If an "explosive" exception (an exception that should not be caught)
1690 # is raised when retrieving the data it is re-raised.
1691 for exception in MemoryError, SystemExit, KeyboardInterrupt:
1692- with fake_urllib(ExplosiveUrllib(exception)):
1693+ with MockPatch(
1694+ "lp.services.librarian.smoketest.urlopen",
1695+ partial(explosive_urlopen, exception)):
1696 self.assertRaises(
1697 exception,
1698 do_smoketest, self.fake_librarian, self.fake_librarian,
1699diff --git a/lib/lp/services/librarianserver/db.py b/lib/lp/services/librarianserver/db.py
1700index 4494443..3483935 100644
1701--- a/lib/lp/services/librarianserver/db.py
1702+++ b/lib/lp/services/librarianserver/db.py
1703@@ -9,9 +9,12 @@ __all__ = [
1704 ]
1705
1706 import hashlib
1707-import urllib
1708
1709 from pymacaroons import Macaroon
1710+from six.moves.urllib.parse import (
1711+ quote,
1712+ unquote,
1713+ )
1714 from six.moves.xmlrpc_client import Fault
1715 from storm.expr import (
1716 And,
1717@@ -109,13 +112,12 @@ class Library:
1718 # The URL-encoding of the path may have changed somewhere
1719 # along the line, so reencode it canonically. LFA.filename
1720 # can't contain slashes, so they're safe to leave unencoded.
1721- # And urllib.quote erroneously excludes ~ from its safe set,
1722- # while RFC 3986 says it should be unescaped and Chromium
1723- # forcibly decodes it in any URL that it sees.
1724+ # And urllib.parse.quote erroneously excludes ~ from its
1725+ # safe set, while RFC 3986 says it should be unescaped and
1726+ # Chromium forcibly decodes it in any URL that it sees.
1727 #
1728 # This needs to match url_path_quote.
1729- normalised_path = urllib.quote(
1730- urllib.unquote(path), safe='/~+')
1731+ normalised_path = quote(unquote(path), safe='/~+')
1732 store = session_store()
1733 token_ok = not store.find(TimeLimitedToken,
1734 SQL("age(created) < interval '1 day'"),
1735diff --git a/lib/lp/services/librarianserver/swift.py b/lib/lp/services/librarianserver/swift.py
1736index 29515dc..db01746 100644
1737--- a/lib/lp/services/librarianserver/swift.py
1738+++ b/lib/lp/services/librarianserver/swift.py
1739@@ -19,9 +19,9 @@ import hashlib
1740 import os.path
1741 import re
1742 import time
1743-import urllib
1744
1745 import scandir
1746+from six.moves.urllib.parse import quote
1747 from swiftclient import client as swiftclient
1748
1749 from lp.services.config import config
1750@@ -233,8 +233,7 @@ def _put(log, swift_connection, lfc_id, container, obj_name, fs_path):
1751 lfc_id, disk_md5_hash, db_md5_hash))
1752 raise AssertionError('md5 mismatch')
1753
1754- manifest = '{0}/{1}/'.format(
1755- urllib.quote(container), urllib.quote(obj_name))
1756+ manifest = '{0}/{1}/'.format(quote(container), quote(obj_name))
1757 manifest_headers = {'X-Object-Manifest': manifest}
1758 swift_connection.put_object(
1759 container, obj_name, '', 0, headers=manifest_headers)
1760diff --git a/lib/lp/services/librarianserver/testing/fake.py b/lib/lp/services/librarianserver/testing/fake.py
1761index b7a4305..37d6bb6 100644
1762--- a/lib/lp/services/librarianserver/testing/fake.py
1763+++ b/lib/lp/services/librarianserver/testing/fake.py
1764@@ -17,9 +17,9 @@ __all__ = [
1765
1766 import hashlib
1767 from StringIO import StringIO
1768-from urlparse import urljoin
1769
1770 from fixtures import Fixture
1771+from six.moves.urllib.parse import urljoin
1772 import transaction
1773 from transaction.interfaces import ISynchronizer
1774 import zope.component
1775diff --git a/lib/lp/services/librarianserver/testing/tests/test_server_fixture.py b/lib/lp/services/librarianserver/testing/tests/test_server_fixture.py
1776index bcbf2e3..2d9315b 100644
1777--- a/lib/lp/services/librarianserver/testing/tests/test_server_fixture.py
1778+++ b/lib/lp/services/librarianserver/testing/tests/test_server_fixture.py
1779@@ -10,7 +10,8 @@ __metaclass__ = type
1780 import os
1781 import socket
1782 from textwrap import dedent
1783-from urllib import urlopen
1784+
1785+from six.moves.urllib.request import urlopen
1786
1787 from lp.services.config import config
1788 from lp.services.config.fixture import ConfigFixture
1789diff --git a/lib/lp/services/librarianserver/tests/test_db_outage.py b/lib/lp/services/librarianserver/tests/test_db_outage.py
1790index 9012faf..9f28dfe 100644
1791--- a/lib/lp/services/librarianserver/tests/test_db_outage.py
1792+++ b/lib/lp/services/librarianserver/tests/test_db_outage.py
1793@@ -8,9 +8,10 @@ Database outages happen by accident and during fastdowntime deployments."""
1794 __metaclass__ = type
1795
1796 from cStringIO import StringIO
1797-import urllib2
1798
1799 from fixtures import Fixture
1800+from six.moves.urllib.error import HTTPError
1801+from six.moves.urllib.request import urlopen
1802
1803 from lp.services.librarian.client import LibrarianClient
1804 from lp.services.librarianserver.testing.server import LibrarianServerFixture
1805@@ -87,9 +88,9 @@ class TestLibrarianDBOutage(TestCase):
1806 codes = set()
1807 for count in range(num_librarian_threads):
1808 try:
1809- urllib2.urlopen(self.url).read()
1810+ urlopen(self.url).read()
1811 codes.add(200)
1812- except urllib2.HTTPError as error:
1813+ except HTTPError as error:
1814 codes.add(error.code)
1815 self.assertTrue(len(codes) == 1, 'Mixed responses: %s' % str(codes))
1816 return codes.pop()
1817diff --git a/lib/lp/services/librarianserver/tests/test_web.py b/lib/lp/services/librarianserver/tests/test_web.py
1818index 3ba366a..c7ceeeb 100644
1819--- a/lib/lp/services/librarianserver/tests/test_web.py
1820+++ b/lib/lp/services/librarianserver/tests/test_web.py
1821@@ -9,12 +9,12 @@ import hashlib
1822 from io import BytesIO
1823 import os
1824 import unittest
1825-from urlparse import urlparse
1826
1827 from lazr.uri import URI
1828 import pytz
1829 import requests
1830 from six.moves import http_client
1831+from six.moves.urllib.parse import urlparse
1832 from storm.expr import SQL
1833 from testtools.matchers import EndsWith
1834 import transaction
1835diff --git a/lib/lp/services/librarianserver/web.py b/lib/lp/services/librarianserver/web.py
1836index 36b4642..90001dc 100644
1837--- a/lib/lp/services/librarianserver/web.py
1838+++ b/lib/lp/services/librarianserver/web.py
1839@@ -5,9 +5,9 @@ __metaclass__ = type
1840
1841 from datetime import datetime
1842 import time
1843-from urlparse import urlparse
1844
1845 from pymacaroons import Macaroon
1846+from six.moves.urllib.parse import urlparse
1847 from storm.exceptions import DisconnectionError
1848 from twisted.internet import (
1849 abstract,
1850diff --git a/lib/lp/services/oauth/stories/access-token.txt b/lib/lp/services/oauth/stories/access-token.txt
1851index f0e49d2..1269194 100644
1852--- a/lib/lp/services/oauth/stories/access-token.txt
1853+++ b/lib/lp/services/oauth/stories/access-token.txt
1854@@ -18,7 +18,7 @@ access token.
1855 >>> token.review(salgado, OAuthPermission.WRITE_PUBLIC)
1856 >>> logout()
1857
1858- >>> from urllib import urlencode
1859+ >>> from six.moves.urllib.parse import urlencode
1860 >>> data = dict(
1861 ... oauth_consumer_key='foobar123451432',
1862 ... oauth_version='1.0',
1863diff --git a/lib/lp/services/oauth/stories/authorize-token.txt b/lib/lp/services/oauth/stories/authorize-token.txt
1864index f985f1a..cc339bd 100644
1865--- a/lib/lp/services/oauth/stories/authorize-token.txt
1866+++ b/lib/lp/services/oauth/stories/authorize-token.txt
1867@@ -31,7 +31,7 @@ The +authorize-token page is restricted to logged in users, so users will
1868 first be asked to log in. (We won't show the actual login process because
1869 it involves OpenID, which would complicate this test quite a bit.)
1870
1871- >>> from urllib import urlencode
1872+ >>> from six.moves.urllib.parse import urlencode
1873 >>> params = dict(
1874 ... oauth_token=token.key, oauth_callback='http://launchpad.test/bzr')
1875 >>> url = "http://launchpad.test/+authorize-token?%s" % urlencode(params)
1876diff --git a/lib/lp/services/oauth/stories/request-token.txt b/lib/lp/services/oauth/stories/request-token.txt
1877index 9004db5..f72e378 100644
1878--- a/lib/lp/services/oauth/stories/request-token.txt
1879+++ b/lib/lp/services/oauth/stories/request-token.txt
1880@@ -3,7 +3,7 @@
1881 Our sample consumer (whose key is 'foobar123451432') asks Launchpad for
1882 a request token which may later be exchanged for an access token.
1883
1884- >>> from urllib import urlencode
1885+ >>> from six.moves.urllib.parse import urlencode
1886 >>> data = dict(
1887 ... oauth_consumer_key='foobar123451432',
1888 ... oauth_version='1.0',
1889diff --git a/lib/lp/services/scripts/base.py b/lib/lp/services/scripts/base.py
1890index 2a19eff..509df60 100644
1891--- a/lib/lp/services/scripts/base.py
1892+++ b/lib/lp/services/scripts/base.py
1893@@ -19,17 +19,17 @@ import logging
1894 from optparse import OptionParser
1895 import os.path
1896 import sys
1897-from urllib2 import (
1898- HTTPError,
1899- URLError,
1900- urlopen,
1901- )
1902
1903 from contrib.glock import (
1904 GlobalLock,
1905 LockAlreadyAcquired,
1906 )
1907 import pytz
1908+from six.moves.urllib.error import (
1909+ HTTPError,
1910+ URLError,
1911+ )
1912+from six.moves.urllib.request import urlopen
1913 import transaction
1914 from zope.component import getUtility
1915
1916diff --git a/lib/lp/services/sitesearch/__init__.py b/lib/lp/services/sitesearch/__init__.py
1917index b11f73d..da896ee 100644
1918--- a/lib/lp/services/sitesearch/__init__.py
1919+++ b/lib/lp/services/sitesearch/__init__.py
1920@@ -12,15 +12,15 @@ __all__ = [
1921 ]
1922
1923 import json
1924-import urllib
1925-from urlparse import (
1926- parse_qsl,
1927- urlunparse,
1928- )
1929
1930 from lazr.restful.utils import get_current_browser_request
1931 from lazr.uri import URI
1932 import requests
1933+from six.moves.urllib.parse import (
1934+ parse_qsl,
1935+ urlencode,
1936+ urlunparse,
1937+ )
1938 from zope.interface import implementer
1939
1940 from lp.services.config import config
1941@@ -88,7 +88,7 @@ class PageMatch:
1942 """Escapes invalid urls."""
1943 parts = urlparse(url)
1944 querydata = parse_qsl(parts.query)
1945- querystring = urllib.urlencode(querydata)
1946+ querystring = urlencode(querydata)
1947 urldata = list(parts)
1948 urldata[-2] = querystring
1949 return urlunparse(urldata)
1950@@ -242,7 +242,7 @@ class BingSearchService:
1951 search_params['q'] = terms.encode('utf8')
1952 search_params['offset'] = start
1953 search_params['customConfig'] = self.custom_config_id
1954- query_string = urllib.urlencode(sorted(search_params.items()))
1955+ query_string = urlencode(sorted(search_params.items()))
1956 return self.site + '?' + query_string
1957
1958 def create_search_headers(self):
1959diff --git a/lib/lp/services/verification/browser/logintoken.py b/lib/lp/services/verification/browser/logintoken.py
1960index c15321d..245d8f0 100644
1961--- a/lib/lp/services/verification/browser/logintoken.py
1962+++ b/lib/lp/services/verification/browser/logintoken.py
1963@@ -15,8 +15,10 @@ __all__ = [
1964 'ValidateGPGKeyView',
1965 ]
1966
1967-import urllib
1968-
1969+from six.moves.urllib.parse import (
1970+ urlencode,
1971+ urljoin,
1972+ )
1973 from zope.component import getUtility
1974 from zope.formlib.widget import CustomWidgetFactory
1975 from zope.formlib.widgets import TextAreaWidget
1976@@ -97,7 +99,7 @@ class LoginTokenView(LaunchpadView):
1977
1978 def render(self):
1979 if self.context.date_consumed is None:
1980- url = urllib.basejoin(
1981+ url = urljoin(
1982 str(self.request.URL), self.PAGES[self.context.tokentype])
1983 self.request.response.redirect(url)
1984 else:
1985@@ -400,7 +402,7 @@ class ValidateEmailView(BaseTokenView, LaunchpadFormView):
1986 # hack, but if it fails nothing will happen.
1987 # -- Guilherme Salgado 2005-07-09
1988 url = allvhosts.configs['mainsite'].rooturl
1989- query = urllib.urlencode([('field.dupe_person', dupe.name)])
1990+ query = urlencode([('field.dupe_person', dupe.name)])
1991 url += '/people/+requestmerge?' + query
1992 self.addError(structured(
1993 'This email address is already registered for another '
1994diff --git a/lib/lp/services/webapp/doc/webapp-publication.txt b/lib/lp/services/webapp/doc/webapp-publication.txt
1995index 5028368..046ae2f 100644
1996--- a/lib/lp/services/webapp/doc/webapp-publication.txt
1997+++ b/lib/lp/services/webapp/doc/webapp-publication.txt
1998@@ -510,7 +510,7 @@ python 'in' operator.
1999
2000 >>> from lp.services.webapp.servers import (
2001 ... LaunchpadBrowserRequest)
2002- >>> from urllib import urlencode
2003+ >>> from six.moves.urllib.parse import urlencode
2004 >>> environment = {'QUERY_STRING': urlencode({
2005 ... 'a_field': 'a_value',
2006 ... 'items_field': [1, 2, 3]}, doseq=True)}
2007diff --git a/lib/lp/services/webapp/errorlog.py b/lib/lp/services/webapp/errorlog.py
2008index cdb99c3..2092e90 100644
2009--- a/lib/lp/services/webapp/errorlog.py
2010+++ b/lib/lp/services/webapp/errorlog.py
2011@@ -9,7 +9,6 @@ import contextlib
2012 from itertools import repeat
2013 import operator
2014 import re
2015-import urlparse
2016
2017 from lazr.restful.utils import (
2018 get_current_browser_request,
2019@@ -20,6 +19,7 @@ import oops_amqp
2020 from oops_datedir_repo import DateDirRepo
2021 import oops_timeline
2022 import pytz
2023+from six.moves.urllib.parse import urlparse
2024 from zope.component.interfaces import ObjectEvent
2025 from zope.error.interfaces import IErrorReportingUtility
2026 from zope.event import notify
2027@@ -396,9 +396,8 @@ class ErrorReportingUtility:
2028 # broken-url-generator in LP: ignore it.
2029 if referer is None:
2030 return True
2031- referer_parts = urlparse.urlparse(referer)
2032- root_parts = urlparse.urlparse(
2033- allvhosts.configs['mainsite'].rooturl)
2034+ referer_parts = urlparse(referer)
2035+ root_parts = urlparse(allvhosts.configs['mainsite'].rooturl)
2036 if root_parts.netloc not in referer_parts.netloc:
2037 return True
2038 return False
2039diff --git a/lib/lp/services/webapp/login.py b/lib/lp/services/webapp/login.py
2040index 30fc146..43eb478 100644
2041--- a/lib/lp/services/webapp/login.py
2042+++ b/lib/lp/services/webapp/login.py
2043@@ -10,7 +10,6 @@ from datetime import (
2044 datetime,
2045 timedelta,
2046 )
2047-import urllib
2048
2049 from openid.consumer.consumer import (
2050 CANCEL,
2051@@ -27,6 +26,7 @@ from paste.httpexceptions import (
2052 HTTPException,
2053 )
2054 import six
2055+from six.moves.urllib.parse import urlencode
2056 import transaction
2057 from zope.authentication.interfaces import IUnauthenticatedPrincipal
2058 from zope.browserpage import ViewPageTemplateFile
2059@@ -220,7 +220,7 @@ class OpenIDLogin(LaunchpadView):
2060 passthrough_field = self.request.form.get(passthrough_name, None)
2061 if passthrough_field is not None:
2062 starting_data.append((passthrough_name, passthrough_field))
2063- starting_url = urllib.urlencode(starting_data)
2064+ starting_url = urlencode(starting_data)
2065 trust_root = allvhosts.configs['mainsite'].rooturl
2066 return_to = urlappend(trust_root, '+openid-callback')
2067 return_to = "%s?%s" % (return_to, starting_url)
2068@@ -240,7 +240,7 @@ class OpenIDLogin(LaunchpadView):
2069 def starting_url(self):
2070 starting_url = self.request.getURL(1)
2071 params = list(self.form_args)
2072- query_string = urllib.urlencode(params, doseq=True)
2073+ query_string = urlencode(params, doseq=True)
2074 if query_string:
2075 starting_url += "?%s" % query_string
2076 return starting_url
2077@@ -265,9 +265,8 @@ class OpenIDLogin(LaunchpadView):
2078 else:
2079 value_list = [value]
2080
2081- # urllib.urlencode will just encode unicode values to ASCII.
2082- # For our purposes, we can be a little more liberal and allow
2083- # UTF-8.
2084+ # urlencode will just encode unicode values to ASCII. For our
2085+ # purposes, we can be a little more liberal and allow UTF-8.
2086 yield (
2087 six.ensure_binary(name),
2088 [six.ensure_binary(value) for value in value_list])
2089@@ -591,7 +590,7 @@ class CookieLogoutPage:
2090 openid_root = config.launchpad.openid_provider_root
2091 target = '%s+logout?%s' % (
2092 config.codehosting.secure_codebrowse_root,
2093- urllib.urlencode(dict(next_to='%s+logout' % (openid_root, ))))
2094+ urlencode(dict(next_to='%s+logout' % (openid_root, ))))
2095 self.request.response.redirect(target)
2096 return ''
2097
2098diff --git a/lib/lp/services/webapp/openid.py b/lib/lp/services/webapp/openid.py
2099index a607d0f..622fa0e 100644
2100--- a/lib/lp/services/webapp/openid.py
2101+++ b/lib/lp/services/webapp/openid.py
2102@@ -13,12 +13,12 @@ __all__ = [
2103
2104 from functools import partial
2105 import os.path
2106-import urllib2
2107
2108 from openid.fetchers import (
2109 setDefaultFetcher,
2110 Urllib2Fetcher,
2111 )
2112+from six.moves.urllib.request import urlopen
2113
2114 from lp.services.config import config
2115
2116@@ -29,5 +29,5 @@ def set_default_openid_fetcher():
2117 fetcher = Urllib2Fetcher()
2118 if config.launchpad.enable_test_openid_provider:
2119 cafile = os.path.join(config.root, "configs/development/launchpad.crt")
2120- fetcher.urlopen = partial(urllib2.urlopen, cafile=cafile)
2121+ fetcher.urlopen = partial(urlopen, cafile=cafile)
2122 setDefaultFetcher(fetcher)
2123diff --git a/lib/lp/services/webapp/publication.py b/lib/lp/services/webapp/publication.py
2124index 1a1c1a4..6171ccf 100644
2125--- a/lib/lp/services/webapp/publication.py
2126+++ b/lib/lp/services/webapp/publication.py
2127@@ -13,7 +13,6 @@ import thread
2128 import threading
2129 import time
2130 import traceback
2131-import urllib
2132
2133 from lazr.restful.utils import safe_hasattr
2134 from lazr.uri import (
2135@@ -21,6 +20,7 @@ from lazr.uri import (
2136 URI,
2137 )
2138 from psycopg2.extensions import TransactionRollbackError
2139+from six.moves.urllib.parse import quote
2140 from storm.database import STATE_DISCONNECTED
2141 from storm.exceptions import (
2142 DisconnectionError,
2143@@ -350,7 +350,7 @@ class LaunchpadBrowserPublication(
2144
2145 non_restricted_url = self.getNonRestrictedURL(request)
2146 if non_restricted_url is not None:
2147- location += '?production=%s' % urllib.quote(non_restricted_url)
2148+ location += '?production=%s' % quote(non_restricted_url)
2149
2150 request.response.setResult('')
2151 request.response.redirect(location, temporary_if_possible=True)
2152diff --git a/lib/lp/services/webapp/tests/test_login.py b/lib/lp/services/webapp/tests/test_login.py
2153index 66b8b1d..b110149 100644
2154--- a/lib/lp/services/webapp/tests/test_login.py
2155+++ b/lib/lp/services/webapp/tests/test_login.py
2156@@ -18,8 +18,6 @@ from datetime import (
2157 timedelta,
2158 )
2159 import unittest
2160-import urllib
2161-import urlparse
2162
2163 from openid.consumer.consumer import (
2164 FAILURE,
2165@@ -32,6 +30,11 @@ from openid.extensions import (
2166 from openid.yadis.discover import DiscoveryFailure
2167 from six.moves import http_client
2168 from six.moves.urllib.error import HTTPError
2169+from six.moves.urllib.parse import (
2170+ parse_qsl,
2171+ quote,
2172+ urlsplit,
2173+ )
2174 from testtools.matchers import (
2175 Contains,
2176 ContainsDict,
2177@@ -773,7 +776,7 @@ class ForwardsCorrectly:
2178 """
2179
2180 def match(self, query_string):
2181- args = dict(urlparse.parse_qsl(query_string))
2182+ args = dict(parse_qsl(query_string))
2183 request = LaunchpadTestRequest(form=args)
2184 request.processInputs()
2185 # This is a hack to make the request.getURL(1) call issued by the view
2186@@ -781,8 +784,8 @@ class ForwardsCorrectly:
2187 request._app_names = ['foo']
2188 view = StubbedOpenIDLogin(object(), request)
2189 view()
2190- escaped_args = tuple(map(urllib.quote, args.items()[0]))
2191- expected_fragment = urllib.quote('%s=%s' % escaped_args)
2192+ escaped_args = tuple(map(quote, args.items()[0]))
2193+ expected_fragment = quote('%s=%s' % escaped_args)
2194 return Contains(
2195 expected_fragment).match(view.openid_request.return_to)
2196
2197@@ -811,8 +814,8 @@ class TestOpenIDLogin(TestCaseWithFactory):
2198 # Sometimes the form params are unicode because a decode('utf8')
2199 # worked in the form machinery... and if so they cannot be trivially
2200 # quoted but must be encoded first.
2201- key = urllib.quote(u'key\xf3'.encode('utf8'))
2202- value = urllib.quote(u'value\xf3'.encode('utf8'))
2203+ key = quote(u'key\xf3'.encode('utf8'))
2204+ value = quote(u'value\xf3'.encode('utf8'))
2205 query_string = "%s=%s" % (key, value)
2206 self.assertThat(query_string, ForwardsCorrectly())
2207
2208@@ -875,8 +878,8 @@ class TestOpenIDLogin(TestCaseWithFactory):
2209 macaroon_extension = extensions[1]
2210 self.assertIsInstance(macaroon_extension, MacaroonRequest)
2211 self.assertEqual(caveat_id, macaroon_extension.caveat_id)
2212- return_to_args = dict(urlparse.parse_qsl(
2213- urlparse.urlsplit(view.openid_request.return_to).query))
2214+ return_to_args = dict(parse_qsl(
2215+ urlsplit(view.openid_request.return_to).query))
2216 self.assertEqual(
2217 'field.actions.complete',
2218 return_to_args['discharge_macaroon_action'])
2219diff --git a/lib/lp/services/webapp/url.py b/lib/lp/services/webapp/url.py
2220index 89f1ed7..2530924 100644
2221--- a/lib/lp/services/webapp/url.py
2222+++ b/lib/lp/services/webapp/url.py
2223@@ -6,12 +6,12 @@
2224 __metaclass__ = type
2225 __all__ = ['urlappend', 'urlparse', 'urlsplit']
2226
2227-from urlparse import (
2228+from six.moves.urllib.parse import (
2229 urljoin,
2230 urlparse as original_urlparse,
2231 urlsplit as original_urlsplit,
2232 )
2233-import urlparse as urlparse_module
2234+import six.moves.urllib.parse as urlparse_module
2235
2236
2237 def _enable_sftp_in_urlparse():
2238diff --git a/lib/lp/services/webservice/wadl.py b/lib/lp/services/webservice/wadl.py
2239index f11e2e1..1bb1312 100644
2240--- a/lib/lp/services/webservice/wadl.py
2241+++ b/lib/lp/services/webservice/wadl.py
2242@@ -10,7 +10,8 @@ try:
2243 except ImportError:
2244 import importlib_resources as resources
2245 import subprocess
2246-import urlparse
2247+
2248+from six.moves.urllib.parse import urljoin
2249
2250 from lp.services.webapp.interaction import (
2251 ANONYMOUS,
2252@@ -26,7 +27,7 @@ from lp.services.webapp.vhosts import allvhosts
2253 def _generate_web_service_root(version, mimetype):
2254 """Generate the webservice description for the given version and mimetype.
2255 """
2256- url = urlparse.urljoin(allvhosts.configs['api'].rooturl, version)
2257+ url = urljoin(allvhosts.configs['api'].rooturl, version)
2258 # Since we want HTTPS URLs we have to munge the request URL.
2259 url = url.replace('http://', 'https://')
2260 request = WebServiceTestRequest(version=version, environ={
2261diff --git a/lib/lp/snappy/browser/snap.py b/lib/lp/snappy/browser/snap.py
2262index 2461b29..dba1b18 100644
2263--- a/lib/lp/snappy/browser/snap.py
2264+++ b/lib/lp/snappy/browser/snap.py
2265@@ -18,13 +18,12 @@ __all__ = [
2266 'SnapView',
2267 ]
2268
2269-from urllib import urlencode
2270-
2271 from lazr.restful.fields import Reference
2272 from lazr.restful.interface import (
2273 copy_field,
2274 use_template,
2275 )
2276+from six.moves.urllib.parse import urlencode
2277 from zope.component import getUtility
2278 from zope.error.interfaces import IErrorReportingUtility
2279 from zope.formlib.widget import CustomWidgetFactory
2280diff --git a/lib/lp/snappy/browser/tests/test_snap.py b/lib/lp/snappy/browser/tests/test_snap.py
2281index 5470916..0a481dc 100644
2282--- a/lib/lp/snappy/browser/tests/test_snap.py
2283+++ b/lib/lp/snappy/browser/tests/test_snap.py
2284@@ -13,15 +13,15 @@ from datetime import (
2285 )
2286 import json
2287 import re
2288-from urlparse import (
2289- parse_qs,
2290- urlsplit,
2291- )
2292
2293 from fixtures import FakeLogger
2294 from pymacaroons import Macaroon
2295 import pytz
2296 import responses
2297+from six.moves.urllib.parse import (
2298+ parse_qs,
2299+ urlsplit,
2300+ )
2301 import soupmatchers
2302 from testtools.matchers import (
2303 AfterPreprocessing,
2304diff --git a/lib/lp/snappy/model/snap.py b/lib/lp/snappy/model/snap.py
2305index 96aecd2..229eaa4 100644
2306--- a/lib/lp/snappy/model/snap.py
2307+++ b/lib/lp/snappy/model/snap.py
2308@@ -15,13 +15,13 @@ from datetime import (
2309 timedelta,
2310 )
2311 from operator import attrgetter
2312-from urlparse import urlsplit
2313
2314 from breezy import urlutils
2315 from lazr.lifecycle.event import ObjectCreatedEvent
2316 from pymacaroons import Macaroon
2317 import pytz
2318 import six
2319+from six.moves.urllib.parse import urlsplit
2320 from storm.expr import (
2321 And,
2322 Desc,
2323diff --git a/lib/lp/snappy/model/snapstoreclient.py b/lib/lp/snappy/model/snapstoreclient.py
2324index e9b7e8f..7d094aa 100644
2325--- a/lib/lp/snappy/model/snapstoreclient.py
2326+++ b/lib/lp/snappy/model/snapstoreclient.py
2327@@ -18,13 +18,13 @@ except ImportError:
2328 JSONDecodeError = ValueError
2329 import string
2330 import time
2331-from urlparse import urlsplit
2332
2333 from lazr.restful.utils import get_current_browser_request
2334 from pymacaroons import Macaroon
2335 import requests
2336 from requests_toolbelt import MultipartEncoder
2337 import six
2338+from six.moves.urllib.parse import urlsplit
2339 from zope.component import getUtility
2340 from zope.interface import implementer
2341 from zope.security.proxy import removeSecurityProxy
2342diff --git a/lib/lp/snappy/tests/test_snap.py b/lib/lp/snappy/tests/test_snap.py
2343index b7d2d8a..809b617 100644
2344--- a/lib/lp/snappy/tests/test_snap.py
2345+++ b/lib/lp/snappy/tests/test_snap.py
2346@@ -15,7 +15,6 @@ from datetime import (
2347 import json
2348 from operator import attrgetter
2349 from textwrap import dedent
2350-from urlparse import urlsplit
2351
2352 from fixtures import (
2353 FakeLogger,
2354@@ -26,6 +25,7 @@ from nacl.public import PrivateKey
2355 from pymacaroons import Macaroon
2356 import pytz
2357 import responses
2358+from six.moves.urllib.parse import urlsplit
2359 from storm.exceptions import LostObjectError
2360 from storm.locals import Store
2361 from testtools.matchers import (
2362diff --git a/lib/lp/snappy/tests/test_snapbuild.py b/lib/lp/snappy/tests/test_snapbuild.py
2363index 67e2be1..4a6000e 100644
2364--- a/lib/lp/snappy/tests/test_snapbuild.py
2365+++ b/lib/lp/snappy/tests/test_snapbuild.py
2366@@ -11,11 +11,11 @@ from datetime import (
2367 datetime,
2368 timedelta,
2369 )
2370-from urllib2 import urlopen
2371
2372 from fixtures import FakeLogger
2373 from pymacaroons import Macaroon
2374 import pytz
2375+from six.moves.urllib.request import urlopen
2376 from testtools.matchers import (
2377 ContainsDict,
2378 Equals,
2379diff --git a/lib/lp/soyuz/browser/widgets/archive.py b/lib/lp/soyuz/browser/widgets/archive.py
2380index 0505a2e..505ccb4 100644
2381--- a/lib/lp/soyuz/browser/widgets/archive.py
2382+++ b/lib/lp/soyuz/browser/widgets/archive.py
2383@@ -8,7 +8,7 @@ __all__ = [
2384 'PPANameWidget',
2385 ]
2386
2387-import urlparse
2388+from six.moves.urllib.parse import urljoin
2389
2390 from lp.app.widgets.textwidgets import URIComponentWidget
2391 from lp.services.config import config
2392@@ -25,4 +25,4 @@ class PPANameWidget(URIComponentWidget):
2393 root = config.personalpackagearchive.private_base_url
2394 else:
2395 root = config.personalpackagearchive.base_url
2396- return urlparse.urljoin(root, owner.name) + '/'
2397+ return urljoin(root, owner.name) + '/'
2398diff --git a/lib/lp/soyuz/interfaces/archive.py b/lib/lp/soyuz/interfaces/archive.py
2399index d5e7e06..209d1a0 100644
2400--- a/lib/lp/soyuz/interfaces/archive.py
2401+++ b/lib/lp/soyuz/interfaces/archive.py
2402@@ -55,7 +55,6 @@ __all__ = [
2403 ]
2404
2405 import re
2406-from urlparse import urlparse
2407
2408 from lazr.restful.declarations import (
2409 call_with,
2410@@ -81,6 +80,7 @@ from lazr.restful.fields import (
2411 Reference,
2412 )
2413 from six.moves import http_client
2414+from six.moves.urllib.parse import urlparse
2415 from zope.interface import (
2416 Attribute,
2417 Interface,
2418diff --git a/lib/lp/soyuz/scripts/ppa_apache_log_parser.py b/lib/lp/soyuz/scripts/ppa_apache_log_parser.py
2419index 0085acb..64bfcd1 100644
2420--- a/lib/lp/soyuz/scripts/ppa_apache_log_parser.py
2421+++ b/lib/lp/soyuz/scripts/ppa_apache_log_parser.py
2422@@ -4,7 +4,8 @@
2423 __all__ = ['DBUSER', 'get_ppa_file_key']
2424
2425 import os.path
2426-import urllib
2427+
2428+from six.moves.urllib.parse import unquote
2429
2430 from lp.archiveuploader.utils import re_isadeb
2431
2432@@ -13,7 +14,7 @@ DBUSER = 'ppa-apache-log-parser'
2433
2434
2435 def get_ppa_file_key(path):
2436- split_path = os.path.normpath(urllib.unquote(path)).split('/')
2437+ split_path = os.path.normpath(unquote(path)).split('/')
2438 if len(split_path) != 9:
2439 return None
2440
2441diff --git a/lib/lp/soyuz/tests/test_livefsbuild.py b/lib/lp/soyuz/tests/test_livefsbuild.py
2442index 99ceccc..ccd3507 100644
2443--- a/lib/lp/soyuz/tests/test_livefsbuild.py
2444+++ b/lib/lp/soyuz/tests/test_livefsbuild.py
2445@@ -11,7 +11,6 @@ from datetime import (
2446 datetime,
2447 timedelta,
2448 )
2449-from urllib2 import urlopen
2450
2451 from fixtures import FakeLogger
2452 import pytz
2453@@ -21,6 +20,7 @@ from testtools.matchers import (
2454 MatchesDict,
2455 MatchesStructure,
2456 )
2457+from six.moves.urllib.request import urlopen
2458 from zope.component import getUtility
2459 from zope.security.proxy import removeSecurityProxy
2460
2461diff --git a/lib/lp/soyuz/tests/test_packageupload.py b/lib/lp/soyuz/tests/test_packageupload.py
2462index 6e78cdf..5afc59a 100644
2463--- a/lib/lp/soyuz/tests/test_packageupload.py
2464+++ b/lib/lp/soyuz/tests/test_packageupload.py
2465@@ -9,7 +9,6 @@ from datetime import timedelta
2466 import io
2467 import os.path
2468 import shutil
2469-from urllib2 import urlopen
2470
2471 from debian.deb822 import Changes
2472 from lazr.restfulclient.errors import (
2473@@ -21,6 +20,7 @@ from testtools.matchers import (
2474 MatchesListwise,
2475 MatchesStructure,
2476 )
2477+from six.moves.urllib.request import urlopen
2478 import transaction
2479 from zope.component import (
2480 getUtility,
2481diff --git a/lib/lp/testing/keyserver/tests/test_harness.py b/lib/lp/testing/keyserver/tests/test_harness.py
2482index b8df7b4..933c9f3 100644
2483--- a/lib/lp/testing/keyserver/tests/test_harness.py
2484+++ b/lib/lp/testing/keyserver/tests/test_harness.py
2485@@ -3,7 +3,7 @@
2486
2487 __metaclass__ = type
2488
2489-from urllib import urlopen
2490+from six.moves.urllib.request import urlopen
2491
2492 from lp.services.config import config
2493 from lp.testing import TestCase
2494diff --git a/lib/lp/testing/layers.py b/lib/lp/testing/layers.py
2495index 65fb63e..0a1303e 100644
2496--- a/lib/lp/testing/layers.py
2497+++ b/lib/lp/testing/layers.py
2498@@ -71,7 +71,6 @@ from unittest import (
2499 TestCase,
2500 TestResult,
2501 )
2502-from urllib import urlopen
2503 import uuid
2504
2505 from fixtures import (
2506@@ -79,10 +78,12 @@ from fixtures import (
2507 MonkeyPatch,
2508 )
2509 import psycopg2
2510+from six.moves.urllib.error import URLError
2511 from six.moves.urllib.parse import (
2512 quote,
2513 urlparse,
2514 )
2515+from six.moves.urllib.request import urlopen
2516 from storm.zope.interfaces import IZStorm
2517 import transaction
2518 from webob.request import environ_from_url as orig_environ_from_url
2519@@ -1968,14 +1969,11 @@ class LayerProcessController:
2520 try:
2521 connection = urlopen(root_url)
2522 connection.read()
2523- except IOError as error:
2524+ except URLError as error:
2525 # We are interested in a wrapped socket.error.
2526- # urlopen() really sucks here.
2527- if len(error.args) <= 1:
2528+ if not isinstance(error.reason, socket.error):
2529 raise
2530- if not isinstance(error.args[1], socket.error):
2531- raise
2532- if error.args[1].args[0] != errno.ECONNREFUSED:
2533+ if error.reason.args[0] != errno.ECONNREFUSED:
2534 raise
2535 returncode = cls.appserver.poll()
2536 if returncode is not None:
2537diff --git a/lib/lp/testing/pages.py b/lib/lp/testing/pages.py
2538index 073f5b8..e552b4b 100644
2539--- a/lib/lp/testing/pages.py
2540+++ b/lib/lp/testing/pages.py
2541@@ -16,7 +16,6 @@ import os
2542 import pprint
2543 import re
2544 import unittest
2545-from urlparse import urljoin
2546
2547 from bs4.element import (
2548 CData,
2549@@ -36,6 +35,7 @@ from contrib.oauth import (
2550 )
2551 from lazr.restful.testing.webservice import WebServiceCaller
2552 import six
2553+from six.moves.urllib.parse import urljoin
2554 from soupsieve import escape as css_escape
2555 import transaction
2556 from webtest import (
2557diff --git a/lib/lp/testing/tests/test_layers_functional.py b/lib/lp/testing/tests/test_layers_functional.py
2558index 5a97097..1dbfeda 100644
2559--- a/lib/lp/testing/tests/test_layers_functional.py
2560+++ b/lib/lp/testing/tests/test_layers_functional.py
2561@@ -15,7 +15,6 @@ from cStringIO import StringIO
2562 import os
2563 import signal
2564 import smtplib
2565-from urllib import urlopen
2566 import uuid
2567
2568 import amqp
2569@@ -25,6 +24,8 @@ from fixtures import (
2570 TestWithFixtures,
2571 )
2572 from lazr.config import as_host_port
2573+from six.moves.urllib.error import HTTPError
2574+from six.moves.urllib.request import urlopen
2575 from zope.component import (
2576 ComponentLookupError,
2577 getUtility,
2578@@ -354,10 +355,7 @@ class LibrarianResetTestCase(TestCase):
2579 LibrarianLayer.testTearDown()
2580 LibrarianLayer.testSetUp()
2581 # Which should have nuked the old file.
2582- # XXX: StuartBishop 2006-06-30 Bug=51370:
2583- # We should get a DownloadFailed exception here.
2584- data = urlopen(LibrarianTestCase.url).read()
2585- self.assertNotEqual(data, self.sample_data)
2586+ self.assertRaises(HTTPError, urlopen, LibrarianTestCase.url)
2587
2588
2589 class LibrarianHideTestCase(TestCase):
2590diff --git a/lib/lp/testopenid/stories/basics.txt b/lib/lp/testopenid/stories/basics.txt
2591index 3ccbaf4..56c6f4e 100644
2592--- a/lib/lp/testopenid/stories/basics.txt
2593+++ b/lib/lp/testopenid/stories/basics.txt
2594@@ -29,7 +29,7 @@ After determining the URL of the OpenID server, the next thing a consumer
2595 needs to do is associate with the server and get a shared secret via a
2596 POST request.
2597
2598- >>> from urllib import urlencode
2599+ >>> from six.moves.urllib.parse import urlencode
2600 >>> anon_browser.open(
2601 ... 'http://testopenid.test/+openid', data=urlencode({
2602 ... 'openid.mode': 'associate',
2603diff --git a/lib/lp/translations/browser/person.py b/lib/lp/translations/browser/person.py
2604index 9b2e992..e8f3e1d 100644
2605--- a/lib/lp/translations/browser/person.py
2606+++ b/lib/lp/translations/browser/person.py
2607@@ -16,9 +16,9 @@ from datetime import (
2608 timedelta,
2609 )
2610 from itertools import islice
2611-import urllib
2612
2613 import pytz
2614+from six.moves.urllib.parse import urlencode
2615 from zope.browserpage import ViewPageTemplateFile
2616 from zope.component import getUtility
2617 from zope.formlib.widget import CustomWidgetFactory
2618@@ -114,7 +114,7 @@ class TranslateLinksAggregator(WorkListLinksAggregator):
2619
2620 def compose_pofile_filter_url(pofile, person):
2621 """Compose URL for `Person`'s contributions to `POFile`."""
2622- person_name = urllib.urlencode({'person': person.name})
2623+ person_name = urlencode({'person': person.name})
2624 return canonical_url(pofile) + "/+filter?%s" % person_name
2625
2626
2627diff --git a/lib/lp/translations/browser/pofile.py b/lib/lp/translations/browser/pofile.py
2628index a193059..e14555c 100644
2629--- a/lib/lp/translations/browser/pofile.py
2630+++ b/lib/lp/translations/browser/pofile.py
2631@@ -17,9 +17,9 @@ __all__ = [
2632
2633 import os.path
2634 import re
2635-import urllib
2636
2637 from lazr.restful.utils import smartquote
2638+from six.moves.urllib.parse import urlencode
2639 from zope.component import getUtility
2640 from zope.publisher.browser import FileUpload
2641
2642@@ -568,7 +568,7 @@ class POFileTranslateView(BaseTranslationView, POFileMetadataViewMixin):
2643 return self.request.response.redirect(
2644 canonical_url(self.user, view_name='+licensing',
2645 rootsite='translations') +
2646- '?' + urllib.urlencode({'back_to': url}))
2647+ '?' + urlencode({'back_to': url}))
2648
2649 # The handling of errors is slightly tricky here. Because this
2650 # form displays multiple POMsgSetViews, we need to track the
2651diff --git a/lib/lp/translations/browser/tests/test_persontranslationview.py b/lib/lp/translations/browser/tests/test_persontranslationview.py
2652index f64f8aa..33abcbc 100644
2653--- a/lib/lp/translations/browser/tests/test_persontranslationview.py
2654+++ b/lib/lp/translations/browser/tests/test_persontranslationview.py
2655@@ -3,8 +3,7 @@
2656
2657 __metaclass__ = type
2658
2659-import urllib
2660-
2661+from six.moves.urllib.parse import urlencode
2662 from zope.security.proxy import removeSecurityProxy
2663
2664 from lp.app.enums import ServiceUsage
2665@@ -199,7 +198,7 @@ class TestPersonTranslationView(TestCaseWithFactory):
2666 pofiles_worked_on = self._makePOFiles(11, previously_worked_on=True)
2667
2668 # the expected results
2669- person_name = urllib.urlencode({'person': self.view.context.name})
2670+ person_name = urlencode({'person': self.view.context.name})
2671 expected_links = [
2672 (pofile.potemplate.translationtarget.title,
2673 canonical_url(pofile, view_name="+filter") + "?%s" % person_name)
2674diff --git a/lib/lp/translations/browser/translationmessage.py b/lib/lp/translations/browser/translationmessage.py
2675index c2bdc04..ef75f6c 100644
2676--- a/lib/lp/translations/browser/translationmessage.py
2677+++ b/lib/lp/translations/browser/translationmessage.py
2678@@ -21,10 +21,12 @@ __all__ = [
2679 import datetime
2680 import operator
2681 import re
2682-import urllib
2683
2684 import pytz
2685-from six.moves.urllib.parse import parse_qsl
2686+from six.moves.urllib.parse import (
2687+ parse_qsl,
2688+ urlencode,
2689+ )
2690 from zope import datetime as zope_datetime
2691 from zope.browserpage import ViewPageTemplateFile
2692 from zope.component import getUtility
2693@@ -863,7 +865,7 @@ class BaseTranslationView(LaunchpadView):
2694 else:
2695 base_url = new_url
2696
2697- new_query = urllib.urlencode(sorted(parameters.items()))
2698+ new_query = urlencode(sorted(parameters.items()))
2699
2700 if new_query:
2701 new_url = '%s?%s' % (base_url, new_query)
2702diff --git a/lib/lp/translations/doc/poexport-request-productseries.txt b/lib/lp/translations/doc/poexport-request-productseries.txt
2703index 8de39ba..4e39210 100644
2704--- a/lib/lp/translations/doc/poexport-request-productseries.txt
2705+++ b/lib/lp/translations/doc/poexport-request-productseries.txt
2706@@ -84,9 +84,9 @@ The email contains a URL linking to where the exported file can be downloaded.
2707
2708 Let's download it and make sure the contents look ok.
2709
2710- >>> import urllib2
2711+ >>> from six.moves.urllib.request import urlopen
2712 >>> from lp.services.helpers import string_to_tarfile
2713- >>> tarball = string_to_tarfile(urllib2.urlopen(url).read())
2714+ >>> tarball = string_to_tarfile(urlopen(url).read())
2715 >>> for name in sorted(tarball.getnames()):
2716 ... print(name)
2717 evolution-2.2
2718diff --git a/lib/lp/translations/doc/poexport-request.txt b/lib/lp/translations/doc/poexport-request.txt
2719index a476426..a95d7c0 100644
2720--- a/lib/lp/translations/doc/poexport-request.txt
2721+++ b/lib/lp/translations/doc/poexport-request.txt
2722@@ -87,9 +87,9 @@ The email contains a URL linking to where the exported file can be downloaded.
2723
2724 Let's download it and make sure the contents look ok.
2725
2726- >>> import urllib2
2727+ >>> from six.moves.urllib.request import urlopen
2728 >>> from lp.services.helpers import string_to_tarfile
2729- >>> tarball = string_to_tarfile(urllib2.urlopen(url).read())
2730+ >>> tarball = string_to_tarfile(urlopen(url).read())
2731 >>> for name in sorted(tarball.getnames()):
2732 ... print(name)
2733 pmount
2734@@ -208,7 +208,7 @@ Check whether we generated a good .mo file.
2735
2736 >>> body = emails.pop().get_payload()
2737 >>> url = extract_url(body)
2738- >>> is_valid_mofile(urllib2.urlopen(url).read())
2739+ >>> is_valid_mofile(urlopen(url).read())
2740 True
2741
2742
2743diff --git a/lib/lp/translations/stories/importqueue/xx-translation-import-queue.txt b/lib/lp/translations/stories/importqueue/xx-translation-import-queue.txt
2744index 4ba2c4b..8e9f427 100644
2745--- a/lib/lp/translations/stories/importqueue/xx-translation-import-queue.txt
2746+++ b/lib/lp/translations/stories/importqueue/xx-translation-import-queue.txt
2747@@ -208,8 +208,8 @@ There is an option to remove entries from the queue.
2748
2749 No Privileges Person tries to remove entries but to no effect.
2750
2751- >>> import urllib
2752- >>> post_data = urllib.urlencode(
2753+ >>> from six.moves.urllib.parse import urlencode
2754+ >>> post_data = urlencode(
2755 ... {
2756 ... 'field.filter_target': 'all',
2757 ... 'field.filter_status': 'all',
2758@@ -456,7 +456,7 @@ the erroneous parenthesis included.
2759 Here we'll simulate such a request and show that the resulting unrecognized
2760 filter_extension values do not generate an error. See bug 388997.
2761
2762- >>> post_data = urllib.urlencode(
2763+ >>> post_data = urlencode(
2764 ... {
2765 ... 'field.filter_target': 'all',
2766 ... 'field.filter_status': 'all',
2767diff --git a/utilities/paste b/utilities/paste
2768index 873807b..e2c630e 100755
2769--- a/utilities/paste
2770+++ b/utilities/paste
2771@@ -14,11 +14,10 @@ from optparse import OptionParser
2772 import os
2773 import pwd
2774 import sys
2775-import urllib
2776-from urlparse import urljoin
2777 import webbrowser
2778
2779 from fixtures import MonkeyPatch
2780+from six.moves.urllib.parse import urljoin
2781 from zope.testbrowser.browser import Browser
2782
2783 # Should we be able to override any of these?
2784diff --git a/utilities/roundup-sniffer.py b/utilities/roundup-sniffer.py
2785index f13088f..c101ecc 100755
2786--- a/utilities/roundup-sniffer.py
2787+++ b/utilities/roundup-sniffer.py
2788@@ -46,8 +46,9 @@ from os.path import (
2789 from pprint import pprint
2790 import sys
2791 from time import sleep
2792-from urllib import urlencode
2793-import urllib2
2794+
2795+from six.moves.urllib.parse import urlencode
2796+from six.moves.urllib.request import urlopen
2797
2798 from lp.services.beautifulsoup import BeautifulSoup
2799
2800@@ -67,8 +68,7 @@ class RoundupSniffer:
2801 """Fetch the URL, consulting the cache first."""
2802 filename = join(self.cache_dir, urlsafe_b64encode(url))
2803 if not exists(filename):
2804- open(filename, 'wb').write(
2805- urllib2.urlopen(url).read())
2806+ open(filename, 'wb').write(urlopen(url).read())
2807 return open(filename, 'rb')
2808
2809 def get_all_bugs(self):

Subscribers

People subscribed via source and target branches

to status/vote changes: