launchpad-reviewers team mailing list archive
-
launchpad-reviewers team
-
Mailing list archive
-
Message #24281
[Merge] ~cjwatson/launchpad:six-urllib into launchpad:master
Colin Watson has proposed merging ~cjwatson/launchpad:six-urllib into launchpad:master.
Commit message:
Import urllib and friends from six.moves
Requested reviews:
Launchpad code reviewers (launchpad-reviewers)
For more details, see:
https://code.launchpad.net/~cjwatson/launchpad/+git/launchpad/+merge/378460
This part of the standard library was rearranged in Python 3.
lp.testing.layers and its tests needed a few extra adjustments, since they were relying on some details of urllib.urlopen's error handling that differed from those in urllib2.urlopen / urllib.request.urlopen.
--
Your team Launchpad code reviewers is requested to review the proposed merge of ~cjwatson/launchpad:six-urllib into launchpad:master.
diff --git a/lib/launchpad_loggerhead/app.py b/lib/launchpad_loggerhead/app.py
index cd98725..59225d7 100644
--- a/lib/launchpad_loggerhead/app.py
+++ b/lib/launchpad_loggerhead/app.py
@@ -4,8 +4,6 @@
import logging
import os
import threading
-import urllib
-import urlparse
import xmlrpclib
from breezy import (
@@ -42,6 +40,10 @@ from paste.request import (
parse_querystring,
path_info_pop,
)
+from six.moves.urllib.parse import (
+ urlencode,
+ urljoin,
+ )
from lp.code.interfaces.codehosting import (
BRANCH_TRANSPORT,
@@ -127,7 +129,7 @@ class RootApp:
raise HTTPMovedPermanently(openid_request.redirectURL(
config.codehosting.secure_codebrowse_root,
config.codehosting.secure_codebrowse_root + '+login/?'
- + urllib.urlencode({'back_to': back_to})))
+ + urlencode({'back_to': back_to})))
def _complete_login(self, environ, start_response):
"""Complete the OpenID authentication process.
@@ -261,7 +263,7 @@ class RootApp:
environ['PATH_INFO'] = trail
environ['SCRIPT_NAME'] += consumed.rstrip('/')
branch_url = lp_server.get_url() + branch_name
- branch_link = urlparse.urljoin(
+ branch_link = urljoin(
config.codebrowse.launchpad_root, branch_name)
cachepath = os.path.join(
config.codebrowse.cachepath, branch_name[1:])
diff --git a/lib/lp/answers/browser/faqcollection.py b/lib/lp/answers/browser/faqcollection.py
index 03b0c28..67c07a7 100644
--- a/lib/lp/answers/browser/faqcollection.py
+++ b/lib/lp/answers/browser/faqcollection.py
@@ -10,7 +10,7 @@ __all__ = [
'SearchFAQsView',
]
-from urllib import urlencode
+from six.moves.urllib.parse import urlencode
from lp import _
from lp.answers.enums import (
diff --git a/lib/lp/answers/browser/questiontarget.py b/lib/lp/answers/browser/questiontarget.py
index 9fcf9e0..a80b36f 100644
--- a/lib/lp/answers/browser/questiontarget.py
+++ b/lib/lp/answers/browser/questiontarget.py
@@ -21,13 +21,13 @@ __all__ = [
]
from operator import attrgetter
-from urllib import urlencode
from lazr.restful.interfaces import (
IJSONRequestCache,
IWebServiceClientRequest,
)
from simplejson import dumps
+from six.moves.urllib.parse import urlencode
from zope.browserpage import ViewPageTemplateFile
from zope.component import (
getMultiAdapter,
diff --git a/lib/lp/answers/browser/tests/test_questiontarget.py b/lib/lp/answers/browser/tests/test_questiontarget.py
index 2c842c0..cd510b6 100644
--- a/lib/lp/answers/browser/tests/test_questiontarget.py
+++ b/lib/lp/answers/browser/tests/test_questiontarget.py
@@ -8,7 +8,6 @@ from __future__ import absolute_import, print_function, unicode_literals
__metaclass__ = type
import os
-from urllib import quote
from lazr.restful.interfaces import (
IJSONRequestCache,
@@ -16,6 +15,7 @@ from lazr.restful.interfaces import (
)
from simplejson import dumps
import six
+from six.moves.urllib.parse import quote
from zope.component import getUtility
from zope.security.proxy import removeSecurityProxy
from zope.traversing.browser import absoluteURL
diff --git a/lib/lp/app/browser/launchpad.py b/lib/lp/app/browser/launchpad.py
index 50ccd6c..d12c849 100644
--- a/lib/lp/app/browser/launchpad.py
+++ b/lib/lp/app/browser/launchpad.py
@@ -27,9 +27,11 @@ import operator
import os
import re
import time
-import urllib
-from six.moves.urllib.parse import parse_qs
+from six.moves.urllib.parse import (
+ parse_qs,
+ urlencode,
+ )
from zope import i18n
from zope.component import (
getGlobalSiteManager,
@@ -633,8 +635,7 @@ class LoginStatus:
if query_string:
query_dict = parse_qs(query_string, keep_blank_values=True)
query_dict.pop('loggingout', None)
- query_string = urllib.urlencode(
- sorted(query_dict.items()), doseq=True)
+ query_string = urlencode(sorted(query_dict.items()), doseq=True)
# If we still have a query_string after things we don't want
# have been removed, add it onto the url.
if query_string:
diff --git a/lib/lp/app/browser/tales.py b/lib/lp/app/browser/tales.py
index 3466a83..c732f5e 100644
--- a/lib/lp/app/browser/tales.py
+++ b/lib/lp/app/browser/tales.py
@@ -16,12 +16,12 @@ import os.path
import rfc822
import sys
from textwrap import dedent
-import urllib
from lazr.enum import enumerated_type_registry
from lazr.restful.utils import get_current_browser_request
from lazr.uri import URI
import pytz
+from six.moves.urllib.parse import quote
from zope.browserpage import ViewPageTemplateFile
from zope.component import (
adapter,
@@ -1645,7 +1645,7 @@ class ProductReleaseFileFormatterAPI(ObjectFormatterAPI):
url = urlappend(canonical_url(self._release), '+download')
# Quote the filename to eliminate non-ascii characters which
# are invalid in the url.
- return urlappend(url, urllib.quote(lfa.filename.encode('utf-8')))
+ return urlappend(url, quote(lfa.filename.encode('utf-8')))
class BranchFormatterAPI(ObjectFormatterAPI):
diff --git a/lib/lp/app/browser/tests/test_vocabulary.py b/lib/lp/app/browser/tests/test_vocabulary.py
index 2036621..2904d8c 100644
--- a/lib/lp/app/browser/tests/test_vocabulary.py
+++ b/lib/lp/app/browser/tests/test_vocabulary.py
@@ -6,10 +6,10 @@
__metaclass__ = type
from datetime import datetime
-from urllib import urlencode
import pytz
import simplejson
+from six.moves.urllib.parse import urlencode
from zope.component import (
getSiteManager,
getUtility,
diff --git a/lib/lp/bugs/browser/buglisting.py b/lib/lp/bugs/browser/buglisting.py
index 3cef8f9..c0e67a6 100644
--- a/lib/lp/bugs/browser/buglisting.py
+++ b/lib/lp/bugs/browser/buglisting.py
@@ -22,7 +22,6 @@ __all__ = [
]
import os.path
-import urllib
from lazr.delegates import delegate_to
from lazr.restful.interfaces import IJSONRequestCache
@@ -33,6 +32,8 @@ from simplejson.encoder import JSONEncoderForHTML
from six.moves.urllib.parse import (
parse_qs,
parse_qsl,
+ quote,
+ urlencode,
)
from zope.authentication.interfaces import IUnauthenticatedPrincipal
from zope.browserpage import ViewPageTemplateFile
@@ -245,7 +246,7 @@ def rewrite_old_bugtask_status_query_string(query_string):
if query_elements == query_elements_mapped:
return query_string
else:
- return urllib.urlencode(query_elements_mapped, doseq=True)
+ return urlencode(query_elements_mapped, doseq=True)
def target_has_expirable_bugs_listing(target):
@@ -593,7 +594,7 @@ def get_buglisting_search_filter_url(
if orderby is not None:
search_params.append(('orderby', orderby))
- query_string = urllib.urlencode(search_params, doseq=True)
+ query_string = urlencode(search_params, doseq=True)
search_filter_url = "+bugs?search=Search"
if query_string != '':
@@ -688,7 +689,7 @@ class BugTaskListingItem:
'reporter': reporter.displayname,
'status': self.status.title,
'status_class': 'status' + self.status.name,
- 'tags': [{'url': base_tag_url + urllib.quote(tag), 'tag': tag}
+ 'tags': [{'url': base_tag_url + quote(tag), 'tag': tag}
for tag in self.tags],
'title': self.bug.title,
}
diff --git a/lib/lp/bugs/browser/bugtarget.py b/lib/lp/bugs/browser/bugtarget.py
index 5b0366a..6daaabe 100644
--- a/lib/lp/bugs/browser/bugtarget.py
+++ b/lib/lp/bugs/browser/bugtarget.py
@@ -24,12 +24,15 @@ from datetime import datetime
from functools import partial
import httplib
from operator import itemgetter
-import urllib
from lazr.restful.interface import copy_field
from lazr.restful.interfaces import IJSONRequestCache
from pytz import timezone
from simplejson import dumps
+from six.moves.urllib.parse import (
+ quote,
+ urlencode,
+ )
from sqlobject import SQLObjectNotFound
from zope.browserpage import ViewPageTemplateFile
from zope.component import getUtility
@@ -1100,7 +1103,7 @@ class ProjectGroupFileBugGuidedView(LaunchpadFormView):
base = canonical_url(
data['product'], view_name='+filebug', rootsite='bugs')
title = data['title'].encode('utf8')
- query = urllib.urlencode([
+ query = urlencode([
('field.title', title),
('field.tags', ' '.join(data['tags'])),
])
@@ -1217,7 +1220,7 @@ class BugTargetBugTagsView(LaunchpadView):
def _getSearchURL(self, tag):
"""Return the search URL for the tag."""
# Use path_only here to reduce the size of the rendered page.
- return "+bugs?field.tag=%s" % urllib.quote(tag)
+ return "+bugs?field.tag=%s" % quote(tag)
@property
def tags_cloud_data(self):
diff --git a/lib/lp/bugs/browser/bugtask.py b/lib/lp/bugs/browser/bugtask.py
index 3cc4ad5..77e7f28 100644
--- a/lib/lp/bugs/browser/bugtask.py
+++ b/lib/lp/bugs/browser/bugtask.py
@@ -35,7 +35,6 @@ from datetime import (
from itertools import groupby
from operator import attrgetter
import re
-import urllib
from lazr.delegates import delegate_to
from lazr.lifecycle.event import ObjectModifiedEvent
@@ -50,6 +49,7 @@ from lazr.restful.interfaces import (
from lazr.restful.utils import smartquote
from pytz import utc
from simplejson import dumps
+from six.moves.urllib.parse import quote
import transaction
from zope import formlib
from zope.browserpage import ViewPageTemplateFile
@@ -864,7 +864,7 @@ class BugTaskView(LaunchpadView, BugViewMixin, FeedsMixin):
if tag in target_official_tags:
links.append((tag, '%s?field.tag=%s' % (
canonical_url(self.context.target, view_name='+bugs',
- force_local_path=True), urllib.quote(tag))))
+ force_local_path=True), quote(tag))))
return links
@property
@@ -876,7 +876,7 @@ class BugTaskView(LaunchpadView, BugViewMixin, FeedsMixin):
if tag not in target_official_tags:
links.append((tag, '%s?field.tag=%s' % (
canonical_url(self.context.target, view_name='+bugs',
- force_local_path=True), urllib.quote(tag))))
+ force_local_path=True), quote(tag))))
return links
@property
diff --git a/lib/lp/bugs/browser/person.py b/lib/lp/bugs/browser/person.py
index 77dff55..c25219d 100644
--- a/lib/lp/bugs/browser/person.py
+++ b/lib/lp/bugs/browser/person.py
@@ -18,8 +18,8 @@ __all__ = [
import copy
from operator import itemgetter
-import urllib
+from six.moves.urllib.parse import urlencode
from zope.component import getUtility
from lp.bugs.browser.buglisting import BugTaskSearchListingView
@@ -58,14 +58,14 @@ def get_package_search_url(dsp_bugs_url, extra_params=None):
"field.status": [
status.title for status in UNRESOLVED_BUGTASK_STATUSES]}
if extra_params is not None:
- # We must UTF-8 encode searchtext to play nicely with
- # urllib.urlencode, because it may contain non-ASCII characters.
+ # We must UTF-8 encode searchtext to play nicely with urlencode,
+ # because it may contain non-ASCII characters.
if 'field.searchtext' in extra_params:
extra_params["field.searchtext"] = (
extra_params["field.searchtext"].encode("utf8"))
params.update(extra_params)
return '%s?%s' % (
- dsp_bugs_url, urllib.urlencode(sorted(params.items()), doseq=True))
+ dsp_bugs_url, urlencode(sorted(params.items()), doseq=True))
class PersonBugsMenu(NavigationMenu):
diff --git a/lib/lp/bugs/browser/tests/test_bugsubscriptionfilter.py b/lib/lp/bugs/browser/tests/test_bugsubscriptionfilter.py
index 1ae3b1e..9454ff7 100644
--- a/lib/lp/bugs/browser/tests/test_bugsubscriptionfilter.py
+++ b/lib/lp/bugs/browser/tests/test_bugsubscriptionfilter.py
@@ -6,9 +6,9 @@
__metaclass__ = type
import json
-from urlparse import urlparse
from lxml import html
+from six.moves.urllib.parse import urlparse
from testtools.matchers import StartsWith
from lp.app.enums import InformationType
diff --git a/lib/lp/bugs/browser/tests/test_bugtask.py b/lib/lp/bugs/browser/tests/test_bugtask.py
index 3648a95..b134c89 100644
--- a/lib/lp/bugs/browser/tests/test_bugtask.py
+++ b/lib/lp/bugs/browser/tests/test_bugtask.py
@@ -8,12 +8,12 @@ from datetime import (
timedelta,
)
import re
-import urllib
from lazr.restful.interfaces import IJSONRequestCache
from pytz import UTC
import simplejson
import six
+from six.moves.urllib.parse import urlencode
import soupmatchers
from testscenarios import (
load_tests_apply_scenarios,
@@ -2000,7 +2000,7 @@ class TestBugTaskSearchListingView(BrowserTestCase):
query_vars['start'] = int(memo) - size
if not forwards:
query_vars['direction'] = 'backwards'
- query_string = urllib.urlencode(query_vars)
+ query_string = urlencode(query_vars)
request = LaunchpadTestRequest(
QUERY_STRING=query_string, orderby=orderby, HTTP_COOKIE=cookie)
if bugtask is None:
diff --git a/lib/lp/bugs/browser/tests/test_structuralsubscription.py b/lib/lp/bugs/browser/tests/test_structuralsubscription.py
index ff2b160..89d8e07 100644
--- a/lib/lp/bugs/browser/tests/test_structuralsubscription.py
+++ b/lib/lp/bugs/browser/tests/test_structuralsubscription.py
@@ -3,8 +3,7 @@
"""Tests for structural subscription traversal."""
-from urlparse import urlparse
-
+from six.moves.urllib.parse import urlparse
from zope.publisher.interfaces import NotFound
from lp.registry.browser.distribution import DistributionNavigation
diff --git a/lib/lp/bugs/externalbugtracker/github.py b/lib/lp/bugs/externalbugtracker/github.py
index e2c462b..6bf0688 100644
--- a/lib/lp/bugs/externalbugtracker/github.py
+++ b/lib/lp/bugs/externalbugtracker/github.py
@@ -14,11 +14,13 @@ __all__ = [
from contextlib import contextmanager
import httplib
import time
-from urllib import urlencode
-from urlparse import urlunsplit
import pytz
import requests
+from six.moves.urllib.parse import (
+ urlencode,
+ urlunsplit,
+ )
from zope.component import getUtility
from zope.interface import Interface
diff --git a/lib/lp/bugs/externalbugtracker/roundup.py b/lib/lp/bugs/externalbugtracker/roundup.py
index ea43711..f862aec 100644
--- a/lib/lp/bugs/externalbugtracker/roundup.py
+++ b/lib/lp/bugs/externalbugtracker/roundup.py
@@ -7,9 +7,9 @@ __metaclass__ = type
__all__ = ['Roundup']
import csv
-from urllib import quote_plus
from lazr.uri import URI
+from six.moves.urllib.parse import quote_plus
from lp.bugs.externalbugtracker import (
BugNotFound,
diff --git a/lib/lp/bugs/externalbugtracker/sourceforge.py b/lib/lp/bugs/externalbugtracker/sourceforge.py
index b61c4b1..fdc7441 100644
--- a/lib/lp/bugs/externalbugtracker/sourceforge.py
+++ b/lib/lp/bugs/externalbugtracker/sourceforge.py
@@ -7,7 +7,8 @@ __metaclass__ = type
__all__ = ['SourceForge']
import re
-import urllib
+
+from six.moves.urllib.parse import splitvalue
from lp.bugs.externalbugtracker import (
BugNotFound,
@@ -103,7 +104,7 @@ class SourceForge(ExternalBugTracker):
query_bits = query.split('&')
for bit in query_bits:
- key, value = urllib.splitvalue(bit)
+ key, value = splitvalue(bit)
query_dict[key] = value
try:
diff --git a/lib/lp/bugs/externalbugtracker/xmlrpc.py b/lib/lp/bugs/externalbugtracker/xmlrpc.py
index a3e99f4..6257d52 100644
--- a/lib/lp/bugs/externalbugtracker/xmlrpc.py
+++ b/lib/lp/bugs/externalbugtracker/xmlrpc.py
@@ -10,10 +10,6 @@ __all__ = [
from io import BytesIO
-from urlparse import (
- urlparse,
- urlunparse,
- )
from xmlrpclib import (
ProtocolError,
Transport,
@@ -23,6 +19,10 @@ from defusedxml.xmlrpc import monkey_patch
import requests
from requests.cookies import RequestsCookieJar
import six
+from six.moves.urllib.parse import (
+ urlparse,
+ urlunparse,
+ )
from lp.bugs.externalbugtracker.base import repost_on_redirect_hook
from lp.services.config import config
diff --git a/lib/lp/bugs/model/bugwatch.py b/lib/lp/bugs/model/bugwatch.py
index 18fd674..a9f430d 100644
--- a/lib/lp/bugs/model/bugwatch.py
+++ b/lib/lp/bugs/model/bugwatch.py
@@ -11,13 +11,15 @@ __all__ = [
from datetime import datetime
import re
-import urllib
-from urlparse import urlunsplit
from lazr.lifecycle.event import ObjectModifiedEvent
from lazr.lifecycle.snapshot import Snapshot
from lazr.uri import find_uris_in_text
from pytz import utc
+from six.moves.urllib.parse import (
+ splitvalue,
+ urlunsplit,
+ )
from sqlobject import (
ForeignKey,
SQLObjectNotFound,
@@ -722,7 +724,7 @@ class BugWatchSet:
scheme, host, path, query_string, frag = urlsplit(url)
query = {}
for query_part in query_string.split('&'):
- key, value = urllib.splitvalue(query_part)
+ key, value = splitvalue(query_part)
query[key] = value
bugtracker_data = parse_func(scheme, host, path, query)
diff --git a/lib/lp/bugs/scripts/sfremoteproductfinder.py b/lib/lp/bugs/scripts/sfremoteproductfinder.py
index b6342f3..66a88fd 100644
--- a/lib/lp/bugs/scripts/sfremoteproductfinder.py
+++ b/lib/lp/bugs/scripts/sfremoteproductfinder.py
@@ -8,9 +8,8 @@ __all__ = [
'SourceForgeRemoteProductFinder',
]
-import urllib
-
import requests
+from six.moves.urllib.parse import splitvalue
from zope.component import getUtility
from lp.app.interfaces.launchpad import ILaunchpadCelebrities
@@ -101,7 +100,7 @@ class SourceForgeRemoteProductFinder:
query_dict = {}
query_bits = query.split('&')
for bit in query_bits:
- key, value = urllib.splitvalue(bit)
+ key, value = splitvalue(bit)
query_dict[key] = value
try:
diff --git a/lib/lp/bugs/stories/bugs/xx-bugs-advanced-search-upstream-status.txt b/lib/lp/bugs/stories/bugs/xx-bugs-advanced-search-upstream-status.txt
index 92347e0..ce0f6db 100644
--- a/lib/lp/bugs/stories/bugs/xx-bugs-advanced-search-upstream-status.txt
+++ b/lib/lp/bugs/stories/bugs/xx-bugs-advanced-search-upstream-status.txt
@@ -144,7 +144,7 @@ status. Bookmarks of such searches work nevertheless.
The user opens a bookmark for "upstream status doesn't matter"
- >>> from urllib import urlencode
+ >>> from six.moves.urllib.parse import urlencode
>>> bookmark_params = {
... 'field.status_upstream': '',
... 'field.status_upstream-empty-marker': '1',
diff --git a/lib/lp/bugs/stories/webservice/xx-bug.txt b/lib/lp/bugs/stories/webservice/xx-bug.txt
index 9fc5a22..9154525 100644
--- a/lib/lp/bugs/stories/webservice/xx-bug.txt
+++ b/lib/lp/bugs/stories/webservice/xx-bug.txt
@@ -1297,7 +1297,7 @@ we must follow to download the data.
Location: http://.../numbers.txt
...
- >>> from urllib2 import urlopen
+ >>> from six.moves.urllib.request import urlopen
>>> data = None
>>> conn = urlopen(data_response.getHeader('Location'))
diff --git a/lib/lp/bugs/tests/test_bugwatch.py b/lib/lp/bugs/tests/test_bugwatch.py
index 7dd061f..50996b5 100644
--- a/lib/lp/bugs/tests/test_bugwatch.py
+++ b/lib/lp/bugs/tests/test_bugwatch.py
@@ -10,10 +10,10 @@ from datetime import (
timedelta,
)
import re
-from urlparse import urlunsplit
from lazr.lifecycle.snapshot import Snapshot
from pytz import utc
+from six.moves.urllib.parse import urlunsplit
from storm.store import Store
from testscenarios import (
load_tests_apply_scenarios,
diff --git a/lib/lp/buildmaster/interactor.py b/lib/lp/buildmaster/interactor.py
index 1605cda..4f809c4 100644
--- a/lib/lp/buildmaster/interactor.py
+++ b/lib/lp/buildmaster/interactor.py
@@ -12,8 +12,8 @@ from collections import namedtuple
import logging
import os.path
import tempfile
-from urlparse import urlparse
+from six.moves.urllib.parse import urlparse
import transaction
from twisted.internet import (
defer,
diff --git a/lib/lp/code/browser/branchlisting.py b/lib/lp/code/browser/branchlisting.py
index 176733b..f472ef0 100644
--- a/lib/lp/code/browser/branchlisting.py
+++ b/lib/lp/code/browser/branchlisting.py
@@ -27,13 +27,13 @@ __all__ = [
]
from operator import attrgetter
-import urlparse
from lazr.delegates import delegate_to
from lazr.enum import (
EnumeratedType,
Item,
)
+from six.moves.urllib.parse import parse_qs
from storm.expr import Desc
from zope.browserpage import ViewPageTemplateFile
from zope.component import getUtility
@@ -496,7 +496,7 @@ class BranchListingView(LaunchpadFormView, FeedsMixin):
@property
def template(self):
query_string = self.request.get('QUERY_STRING') or ''
- query_params = urlparse.parse_qs(query_string)
+ query_params = parse_qs(query_string)
render_table_only = 'batch_request' in query_params
if render_table_only:
return self.table_only_template
diff --git a/lib/lp/code/browser/codeimport.py b/lib/lp/code/browser/codeimport.py
index dc7d9e6..635347d 100644
--- a/lib/lp/code/browser/codeimport.py
+++ b/lib/lp/code/browser/codeimport.py
@@ -20,12 +20,12 @@ __all__ = [
]
from textwrap import dedent
-from urlparse import urlparse
from lazr.restful.interface import (
copy_field,
use_template,
)
+from six.moves.urllib.parse import urlparse
from zope.component import (
getUtility,
queryAdapter,
diff --git a/lib/lp/code/browser/tests/test_gitsubscription.py b/lib/lp/code/browser/tests/test_gitsubscription.py
index 62346ff..2997c93 100644
--- a/lib/lp/code/browser/tests/test_gitsubscription.py
+++ b/lib/lp/code/browser/tests/test_gitsubscription.py
@@ -7,9 +7,8 @@ from __future__ import absolute_import, print_function, unicode_literals
__metaclass__ = type
-from urllib import urlencode
-
from fixtures import FakeLogger
+from six.moves.urllib.parse import urlencode
from testtools.matchers import MatchesStructure
from zope.security.interfaces import Unauthorized
from zope.testbrowser.browser import LinkNotFoundError
diff --git a/lib/lp/code/interfaces/codehosting.py b/lib/lp/code/interfaces/codehosting.py
index 447e978..b5adc52 100644
--- a/lib/lp/code/interfaces/codehosting.py
+++ b/lib/lp/code/interfaces/codehosting.py
@@ -22,10 +22,10 @@ __all__ = [
]
import os.path
-import urllib
from lazr.uri import URI
import six
+from six.moves.urllib.parse import quote
from zope.interface import Interface
from lp.app.validators.name import valid_name
@@ -204,7 +204,7 @@ def compose_public_url(scheme, unique_name, suffix=None):
host = URI(config.codehosting.supermirror_root).host
# After quoting and encoding, the path should be perfectly
# safe as a plain ASCII string, str() just enforces this
- path = '/' + str(urllib.quote(six.ensure_binary(unique_name), safe='/~+'))
+ path = '/' + str(quote(six.ensure_binary(unique_name), safe='/~+'))
if suffix:
path = os.path.join(path, suffix)
return str(URI(scheme=scheme, host=host, path=path))
diff --git a/lib/lp/code/model/githosting.py b/lib/lp/code/model/githosting.py
index d895967..94d6538 100644
--- a/lib/lp/code/model/githosting.py
+++ b/lib/lp/code/model/githosting.py
@@ -11,12 +11,14 @@ __all__ = [
import base64
import json
import sys
-from urllib import quote
-from urlparse import urljoin
from lazr.restful.utils import get_current_browser_request
import requests
from six import reraise
+from six.moves.urllib.parse import (
+ quote,
+ urljoin,
+ )
from zope.interface import implementer
from lp.code.errors import (
diff --git a/lib/lp/code/model/gitref.py b/lib/lp/code/model/gitref.py
index 526b815..3667eb7 100644
--- a/lib/lp/code/model/gitref.py
+++ b/lib/lp/code/model/gitref.py
@@ -12,16 +12,16 @@ __all__ = [
from functools import partial
import json
import re
-from urllib import (
- quote,
- quote_plus,
- )
-from urlparse import urlsplit
from lazr.lifecycle.event import ObjectCreatedEvent
import pytz
import requests
import six
+from six.moves.urllib.parse import (
+ quote,
+ quote_plus,
+ urlsplit,
+ )
from storm.locals import (
DateTime,
Int,
diff --git a/lib/lp/code/model/gitrepository.py b/lib/lp/code/model/gitrepository.py
index eb9c251..6260c16 100644
--- a/lib/lp/code/model/gitrepository.py
+++ b/lib/lp/code/model/gitrepository.py
@@ -25,7 +25,6 @@ from itertools import (
groupby,
)
from operator import attrgetter
-from urllib import quote_plus
from breezy import urlutils
from lazr.enum import DBItem
@@ -33,6 +32,7 @@ from lazr.lifecycle.event import ObjectModifiedEvent
from lazr.lifecycle.snapshot import Snapshot
import pytz
import six
+from six.moves.urllib.parse import quote_plus
from storm.databases.postgres import Returning
from storm.expr import (
And,
diff --git a/lib/lp/code/stories/branches/xx-private-branch-listings.txt b/lib/lp/code/stories/branches/xx-private-branch-listings.txt
index 03c21db..e0ee96e 100644
--- a/lib/lp/code/stories/branches/xx-private-branch-listings.txt
+++ b/lib/lp/code/stories/branches/xx-private-branch-listings.txt
@@ -141,7 +141,7 @@ Person code listing pages
The person code listings is the other obvious place to filter out the
viewable branches.
- >>> import urllib
+ >>> from six.moves.urllib.parse import urlencode
>>> def print_person_code_listing(browser, category=None):
... params = {'batch': '15'}
... if category is not None:
@@ -149,7 +149,7 @@ viewable branches.
... # The batch argument is given to override the default batch
... # size of five.
... full_url = 'http://code.launchpad.test/~name12?%s' % (
- ... urllib.urlencode(params),)
+ ... urlencode(params),)
... browser.open(full_url)
... table = find_tag_by_id(browser.contents, 'branchtable')
... branches = []
diff --git a/lib/lp/code/stories/branches/xx-subscribing-branches.txt b/lib/lp/code/stories/branches/xx-subscribing-branches.txt
index c4a3a73..315a00e 100644
--- a/lib/lp/code/stories/branches/xx-subscribing-branches.txt
+++ b/lib/lp/code/stories/branches/xx-subscribing-branches.txt
@@ -120,7 +120,7 @@ shown to the user.
Clicking the back button and then clicking on either Change or
Unsubscribe will give a message that we are not subscribed.
- >>> from urllib import urlencode
+ >>> from six.moves.urllib.parse import urlencode
>>> browser.addHeader('Referer', 'https://launchpad.test/')
>>> browser.open(
... form_url,
diff --git a/lib/lp/codehosting/codeimport/tests/test_workermonitor.py b/lib/lp/codehosting/codeimport/tests/test_workermonitor.py
index 04a43ba..047cbc9 100644
--- a/lib/lp/codehosting/codeimport/tests/test_workermonitor.py
+++ b/lib/lp/codehosting/codeimport/tests/test_workermonitor.py
@@ -13,12 +13,12 @@ import shutil
import StringIO
import subprocess
import tempfile
-import urllib
from bzrlib.branch import Branch
from bzrlib.tests import TestCaseInTempDir
from dulwich.repo import Repo as GitRepo
import oops_twisted
+from six.moves.urllib.request import urlopen
from testtools.twistedsupport import (
assert_fails_with,
AsynchronousDeferredRunTest,
@@ -368,7 +368,7 @@ class TestWorkerMonitorUnit(TestCase):
def check_file_uploaded(result):
transaction.abort()
url = worker_monitor.codeimport_endpoint.calls[0][3]
- text = urllib.urlopen(url).read()
+ text = urlopen(url).read()
self.assertEqual(log_text, text)
return worker_monitor.finishJob(
diff --git a/lib/lp/codehosting/codeimport/worker.py b/lib/lp/codehosting/codeimport/worker.py
index 8916e8d..e20a6f3 100644
--- a/lib/lp/codehosting/codeimport/worker.py
+++ b/lib/lp/codehosting/codeimport/worker.py
@@ -24,10 +24,6 @@ import io
import os
import shutil
import subprocess
-from urlparse import (
- urlsplit,
- urlunsplit,
- )
# FIRST Ensure correct plugins are loaded. Do not delete this comment or the
# line below this comment.
@@ -78,6 +74,10 @@ from lazr.uri import (
)
from pymacaroons import Macaroon
import SCM
+from six.moves.urllib.parse import (
+ urlsplit,
+ urlunsplit,
+ )
from lp.code.interfaces.branch import get_blacklisted_hostnames
from lp.codehosting.codeimport.foreigntree import CVSWorkingTree
diff --git a/lib/lp/codehosting/puller/tests/test_errors.py b/lib/lp/codehosting/puller/tests/test_errors.py
index 1819e95..d69d4a3 100644
--- a/lib/lp/codehosting/puller/tests/test_errors.py
+++ b/lib/lp/codehosting/puller/tests/test_errors.py
@@ -9,7 +9,6 @@ import httplib
import os
import socket
import tempfile
-import urllib2
from breezy.errors import (
BzrError,
@@ -23,6 +22,7 @@ from breezy.url_policy_open import (
BranchReferenceForbidden,
)
from lazr.uri import InvalidURIError
+from six.moves.urllib.error import HTTPError
from lp.code.enums import BranchType
from lp.codehosting.puller.worker import (
@@ -141,7 +141,7 @@ class TestErrorCatching(TestCase):
# If the source branch requires HTTP authentication, say so in the
# error message.
msg = self.getMirrorFailureForException(
- urllib2.HTTPError(
+ HTTPError(
'http://something', httplib.UNAUTHORIZED,
'Authorization Required', 'some headers',
os.fdopen(tempfile.mkstemp()[0])))
diff --git a/lib/lp/codehosting/puller/worker.py b/lib/lp/codehosting/puller/worker.py
index d0ed4ca..80764a6 100644
--- a/lib/lp/codehosting/puller/worker.py
+++ b/lib/lp/codehosting/puller/worker.py
@@ -6,7 +6,6 @@ __metaclass__ = type
import httplib
import socket
import sys
-import urllib2
# FIRST Ensure correct plugins are loaded. Do not delete this comment or the
# line below this comment.
@@ -41,6 +40,7 @@ from lazr.uri import (
InvalidURIError,
URI,
)
+from six.moves.urllib.error import HTTPError
from lp.code.bzr import (
BranchFormat,
@@ -387,7 +387,7 @@ class PullerWorker:
# add further encountered errors from the production runs here
# ------ HERE ---------
#
- except urllib2.HTTPError as e:
+ except HTTPError as e:
msg = str(e)
if int(e.code) == httplib.UNAUTHORIZED:
# Maybe this will be caught in bzrlib one day, and then we'll
diff --git a/lib/lp/codehosting/scanner/buglinks.py b/lib/lp/codehosting/scanner/buglinks.py
index 95d8eaa..f9609c3 100644
--- a/lib/lp/codehosting/scanner/buglinks.py
+++ b/lib/lp/codehosting/scanner/buglinks.py
@@ -8,9 +8,8 @@ __all__ = [
'BugBranchLinker',
]
-import urlparse
-
from breezy.bugtracker import InvalidBugStatus
+from six.moves.urllib.parse import urlsplit
from zope.component import getUtility
from lp.app.errors import NotFoundError
@@ -25,7 +24,7 @@ class BugBranchLinker:
self.db_branch = db_branch
def _getBugFromUrl(self, url):
- protocol, host, path, ignored, ignored = urlparse.urlsplit(url)
+ protocol, host, path, ignored, ignored = urlsplit(url)
# Skip URLs that don't point to Launchpad.
if host != 'launchpad.net':
diff --git a/lib/lp/codehosting/sshserver/session.py b/lib/lp/codehosting/sshserver/session.py
index 91d31e4..38bea68 100644
--- a/lib/lp/codehosting/sshserver/session.py
+++ b/lib/lp/codehosting/sshserver/session.py
@@ -12,11 +12,11 @@ import os
import signal
import socket
import sys
-import urlparse
from lazr.sshserver.events import AvatarEvent
from lazr.sshserver.session import DoNothingSession
from six import reraise
+from six.moves.urllib.parse import urlparse
from twisted.internet import (
error,
interfaces,
@@ -471,7 +471,7 @@ def launch_smart_server(avatar):
environment = dict(os.environ)
# Extract the hostname from the supermirror root config.
- hostname = urlparse.urlparse(config.codehosting.supermirror_root)[1]
+ hostname = urlparse(config.codehosting.supermirror_root)[1]
environment['BRZ_EMAIL'] = '%s@%s' % (avatar.username, hostname)
# TODO: Use a FeatureFlag to enable this in a more fine-grained approach.
# If the forking daemon has been spawned, then we can use it if the
diff --git a/lib/lp/codehosting/tests/test_acceptance.py b/lib/lp/codehosting/tests/test_acceptance.py
index 1b4e1bf..23ee03e 100644
--- a/lib/lp/codehosting/tests/test_acceptance.py
+++ b/lib/lp/codehosting/tests/test_acceptance.py
@@ -11,7 +11,6 @@ import signal
import subprocess
import sys
import time
-import urllib2
import xmlrpclib
import breezy.branch
@@ -19,6 +18,7 @@ from breezy.tests import TestCaseWithTransport
from breezy.tests.per_repository import all_repository_format_scenarios
from breezy.urlutils import local_path_from_url
from breezy.workingtree import WorkingTree
+from six.moves.urllib.request import urlopen
from testscenarios import (
load_tests_apply_scenarios,
WithScenarios,
@@ -747,7 +747,7 @@ class SmartserverTests(WithScenarios, SSHTestCase):
self.assertEqual('tcp:', config.codehosting.web_status_port[:4])
port = int(config.codehosting.web_status_port[4:])
web_status_url = 'http://localhost:%d/' % port
- urllib2.urlopen(web_status_url)
+ urlopen(web_status_url)
load_tests = load_tests_apply_scenarios
diff --git a/lib/lp/hardwaredb/stories/hwdb/xx-hwdb.txt b/lib/lp/hardwaredb/stories/hwdb/xx-hwdb.txt
index 988534b..98128d1 100644
--- a/lib/lp/hardwaredb/stories/hwdb/xx-hwdb.txt
+++ b/lib/lp/hardwaredb/stories/hwdb/xx-hwdb.txt
@@ -196,7 +196,7 @@ present in the form and submits this data. Unfortunately, it sometimes
simply omits some fields. In such a case, we return an extra header
"Required fields not contained in POST data".
- >>> from urllib import urlencode
+ >>> from six.moves.urllib.parse import urlencode
>>> form_data = {
... 'field.date_created': '2009-01-01',
... 'field.format': 'VERSION_1',
diff --git a/lib/lp/hardwaredb/stories/webservice/xx-hwdb.txt b/lib/lp/hardwaredb/stories/webservice/xx-hwdb.txt
index 0ca3d45..73194eb 100644
--- a/lib/lp/hardwaredb/stories/webservice/xx-hwdb.txt
+++ b/lib/lp/hardwaredb/stories/webservice/xx-hwdb.txt
@@ -565,7 +565,7 @@ We can limit the result set to submissions where the device is accessed
by a specific driver. Device 2 is a USB controller, so will get the sample
submission when we set the parameter driver to the usb driver...
- >>> from urllib import urlencode
+ >>> from six.moves.urllib.parse import urlencode
>>> parameters = {
... 'ws.op': 'getSubmissions',
... 'driver': usb_driver['self_link'],
diff --git a/lib/lp/registry/browser/mailinglists.py b/lib/lp/registry/browser/mailinglists.py
index ea422df..9fa6dd2 100644
--- a/lib/lp/registry/browser/mailinglists.py
+++ b/lib/lp/registry/browser/mailinglists.py
@@ -11,8 +11,8 @@ __all__ = [
from textwrap import TextWrapper
-from urllib import quote
+from six.moves.urllib.parse import quote
from zope.component import getUtility
from lp.app.browser.tales import PersonFormatterAPI
diff --git a/lib/lp/registry/browser/person.py b/lib/lp/registry/browser/person.py
index 35cf170..bb0ef69 100644
--- a/lib/lp/registry/browser/person.py
+++ b/lib/lp/registry/browser/person.py
@@ -54,7 +54,6 @@ from operator import (
itemgetter,
)
from textwrap import dedent
-import urllib
from lazr.config import as_timedelta
from lazr.delegates import delegate_to
@@ -63,6 +62,10 @@ from lazr.restful.interfaces import IWebServiceClientRequest
from lazr.restful.utils import smartquote
from lazr.uri import URI
import pytz
+from six.moves.urllib.parse import (
+ quote,
+ urlencode,
+ )
from storm.zope.interfaces import IResultSet
from zope.browserpage import ViewPageTemplateFile
from zope.component import (
@@ -1717,7 +1720,7 @@ class PersonView(LaunchpadView, FeedsMixin, ContactViaWebLinksMixin):
"""Return an URL to a page which lists all bugs assigned to this
person that are In Progress.
"""
- query_string = urllib.urlencode(
+ query_string = urlencode(
[('field.status', BugTaskStatus.INPROGRESS.title)])
url = "%s/+assignedbugs" % canonical_url(self.context)
return ("%(url)s?search=Search&%(query_string)s"
@@ -2952,7 +2955,7 @@ class PersonEditEmailsView(LaunchpadFormView):
"to be confirmed as yours." % newemail)
else:
owner = email.person
- owner_name = urllib.quote(owner.name)
+ owner_name = quote(owner.name)
merge_url = (
'%s/+requestmerge?field.dupe_person=%s'
% (canonical_url(getUtility(IPersonSet)), owner_name))
diff --git a/lib/lp/registry/browser/product.py b/lib/lp/registry/browser/product.py
index b2161c5..c059dbb 100644
--- a/lib/lp/registry/browser/product.py
+++ b/lib/lp/registry/browser/product.py
@@ -42,7 +42,6 @@ __all__ = [
from operator import attrgetter
-from urlparse import urlunsplit
from breezy import urlutils
from breezy.revision import NULL_REVISION
@@ -52,6 +51,7 @@ from lazr.restful.interface import (
use_template,
)
from lazr.restful.interfaces import IJSONRequestCache
+from six.moves.urllib.parse import urlunsplit
from zope.browserpage import ViewPageTemplateFile
from zope.component import getUtility
from zope.event import notify
diff --git a/lib/lp/registry/browser/sourcepackage.py b/lib/lp/registry/browser/sourcepackage.py
index 19a6e89..50fa669 100644
--- a/lib/lp/registry/browser/sourcepackage.py
+++ b/lib/lp/registry/browser/sourcepackage.py
@@ -18,7 +18,6 @@ __all__ = [
]
import string
-import urllib
from apt_pkg import (
upstream_version,
@@ -29,6 +28,7 @@ from lazr.enum import (
Item,
)
from lazr.restful.interface import copy_field
+from six.moves.urllib.parse import urlencode
from zope.browserpage import ViewPageTemplateFile
from zope.component import (
adapter,
@@ -125,8 +125,7 @@ def get_register_upstream_url(source_package):
for binary in source_package.releases[0].sample_binary_packages:
summary_set.add(binary.summary)
params['field.summary'] = '\n'.join(sorted(summary_set))
- query_string = urllib.urlencode(
- sorted(params.items()), doseq=True)
+ query_string = urlencode(sorted(params.items()), doseq=True)
return '/projects/+new?%s' % query_string
diff --git a/lib/lp/registry/browser/team.py b/lib/lp/registry/browser/team.py
index 73f30c5..416a8a4 100644
--- a/lib/lp/registry/browser/team.py
+++ b/lib/lp/registry/browser/team.py
@@ -37,13 +37,13 @@ from datetime import (
timedelta,
)
import math
-from urllib import unquote
from lazr.restful.interface import copy_field
from lazr.restful.interfaces import IJSONRequestCache
from lazr.restful.utils import smartquote
import pytz
import simplejson
+from six.moves.urllib.parse import unquote
from zope.browserpage import ViewPageTemplateFile
from zope.component import getUtility
from zope.formlib.form import (
diff --git a/lib/lp/registry/browser/tests/test_distroseries.py b/lib/lp/registry/browser/tests/test_distroseries.py
index de45a78..1f8d5d5 100644
--- a/lib/lp/registry/browser/tests/test_distroseries.py
+++ b/lib/lp/registry/browser/tests/test_distroseries.py
@@ -9,12 +9,14 @@ from datetime import timedelta
import difflib
import re
from textwrap import TextWrapper
-from urllib import urlencode
-from urlparse import urlparse
from fixtures import FakeLogger
from lazr.restful.interfaces import IJSONRequestCache
from lxml import html
+from six.moves.urllib.parse import (
+ urlencode,
+ urlparse,
+ )
import soupmatchers
from storm.zope.interfaces import IResultSet
from testtools.content import (
diff --git a/lib/lp/registry/browser/tests/test_product.py b/lib/lp/registry/browser/tests/test_product.py
index 2232c93..4271302 100644
--- a/lib/lp/registry/browser/tests/test_product.py
+++ b/lib/lp/registry/browser/tests/test_product.py
@@ -8,10 +8,12 @@ __metaclass__ = type
__all__ = ['make_product_form']
import re
-from urlparse import urlsplit
from lazr.restful.interfaces import IJSONRequestCache
-from six.moves.urllib.parse import urlencode
+from six.moves.urllib.parse import (
+ urlencode,
+ urlsplit,
+ )
from soupmatchers import (
HTMLContains,
Tag,
diff --git a/lib/lp/registry/browser/tests/test_sourcepackage_views.py b/lib/lp/registry/browser/tests/test_sourcepackage_views.py
index 3893d1e..c3cac87 100644
--- a/lib/lp/registry/browser/tests/test_sourcepackage_views.py
+++ b/lib/lp/registry/browser/tests/test_sourcepackage_views.py
@@ -5,9 +5,10 @@
__metaclass__ = type
-import urllib
-
-from six.moves.urllib.parse import parse_qsl
+from six.moves.urllib.parse import (
+ parse_qsl,
+ splitquery,
+ )
from soupmatchers import (
HTMLContains,
Tag,
@@ -60,7 +61,7 @@ class TestSourcePackageViewHelpers(TestCaseWithFactory):
return distroseries.getSourcePackage(source_package_name)
def assertInQueryString(self, url, field, value):
- base, query = urllib.splitquery(url)
+ base, query = splitquery(url)
params = parse_qsl(query)
self.assertTrue((field, value) in params)
@@ -72,7 +73,7 @@ class TestSourcePackageViewHelpers(TestCaseWithFactory):
distroseries=distroseries,
sourcepackagename='python-super-package')
url = get_register_upstream_url(source_package)
- base, query = urllib.splitquery(url)
+ base, query = splitquery(url)
self.assertEqual('/projects/+new', base)
params = parse_qsl(query)
expected_params = [
diff --git a/lib/lp/registry/scripts/distributionmirror_prober.py b/lib/lp/registry/scripts/distributionmirror_prober.py
index ab25488..ce3a00e 100644
--- a/lib/lp/registry/scripts/distributionmirror_prober.py
+++ b/lib/lp/registry/scripts/distributionmirror_prober.py
@@ -12,10 +12,14 @@ import itertools
import logging
import os.path
from StringIO import StringIO
-import urllib
-import urlparse
import requests
+from six.moves.urllib.parse import (
+ unquote,
+ urljoin,
+ urlparse,
+ urlunparse,
+ )
from twisted.internet import (
defer,
protocol,
@@ -320,8 +324,8 @@ class RedirectAwareProberFactory(ProberFactory):
scheme, host, port, orig_path = _parse(self.url)
scheme, host, port, new_path = _parse(url)
- if (urllib.unquote(orig_path.split('/')[-1])
- != urllib.unquote(new_path.split('/')[-1])):
+ if (unquote(orig_path.split('/')[-1])
+ != unquote(new_path.split('/')[-1])):
# Server redirected us to a file which doesn't seem to be what we
# requested. It's likely to be a stupid server which redirects
# instead of 404ing (https://launchpad.net/bugs/204460).
@@ -617,12 +621,12 @@ def _get_cdimage_file_list():
url = config.distributionmirrorprober.cdimage_file_list_url
# In test environments, this may be a file: URL. Adjust it to be in a
# form that requests can cope with (i.e. using an absolute path).
- parsed_url = urlparse.urlparse(url)
+ parsed_url = urlparse(url)
if parsed_url.scheme == 'file' and not os.path.isabs(parsed_url.path):
assert parsed_url.path == parsed_url[2]
parsed_url = list(parsed_url)
parsed_url[2] = os.path.join(config.root, parsed_url[2])
- url = urlparse.urlunparse(parsed_url)
+ url = urlunparse(parsed_url)
try:
return urlfetch(
url, headers={'Pragma': 'no-cache', 'Cache-control': 'no-cache'},
@@ -684,7 +688,7 @@ def probe_archive_mirror(mirror, logfile, unchecked_keys, logger):
all_paths = itertools.chain(packages_paths, sources_paths)
request_manager = RequestManager()
for series, pocket, component, path in all_paths:
- url = urlparse.urljoin(base_url, path)
+ url = urljoin(base_url, path)
callbacks = ArchiveMirrorProberCallbacks(
mirror, series, pocket, component, url, logfile)
unchecked_keys.append(url)
@@ -734,7 +738,7 @@ def probe_cdimage_mirror(mirror, logfile, unchecked_keys, logger):
deferredList = []
request_manager = RequestManager()
for path in paths:
- url = urlparse.urljoin(base_url, path)
+ url = urljoin(base_url, path)
# Use a RedirectAwareProberFactory because CD mirrors are allowed
# to redirect, and we need to cope with that.
prober = RedirectAwareProberFactory(url)
@@ -760,7 +764,7 @@ def should_skip_host(host):
def _parse(url, defaultPort=80):
"""Parse the given URL returning the scheme, host, port and path."""
- scheme, host, path, dummy, dummy, dummy = urlparse.urlparse(url)
+ scheme, host, path, dummy, dummy, dummy = urlparse(url)
port = defaultPort
if ':' in host:
host, port = host.split(':')
diff --git a/lib/lp/registry/scripts/productreleasefinder/finder.py b/lib/lp/registry/scripts/productreleasefinder/finder.py
index 07e2aaa..3fd49f0 100644
--- a/lib/lp/registry/scripts/productreleasefinder/finder.py
+++ b/lib/lp/registry/scripts/productreleasefinder/finder.py
@@ -13,11 +13,11 @@ import mimetypes
import os
import re
import tempfile
-import urlparse
from cscvs.dircompare import path
import pytz
import requests
+from six.moves.urllib.parse import urlsplit
from zope.component import getUtility
from lp.app.validators.name import invalid_name_pattern
@@ -199,7 +199,7 @@ class ProductReleaseFinder:
def handleRelease(self, product_name, series_name, url, file_names):
"""If the given URL looks like a release tarball, download it
and create a corresponding ProductRelease."""
- filename = urlparse.urlsplit(url)[2]
+ filename = urlsplit(url)[2]
slash = filename.rfind("/")
if slash != -1:
filename = filename[slash + 1:]
diff --git a/lib/lp/registry/scripts/productreleasefinder/walker.py b/lib/lp/registry/scripts/productreleasefinder/walker.py
index fa9e374..4865a46 100644
--- a/lib/lp/registry/scripts/productreleasefinder/walker.py
+++ b/lib/lp/registry/scripts/productreleasefinder/walker.py
@@ -14,11 +14,6 @@ __all__ = [
import ftplib
import socket
-from urllib import unquote_plus
-from urlparse import (
- urljoin,
- urlsplit,
- )
from cscvs.dircompare.path import (
as_dir,
@@ -30,6 +25,11 @@ from lazr.uri import (
)
import requests
import scandir
+from six.moves.urllib.parse import (
+ unquote_plus,
+ urljoin,
+ urlsplit,
+ )
from lp.registry.scripts.productreleasefinder import log
from lp.services.beautifulsoup import BeautifulSoup
diff --git a/lib/lp/registry/stories/product/xx-product-files.txt b/lib/lp/registry/stories/product/xx-product-files.txt
index 403e23e..5751b75 100644
--- a/lib/lp/registry/stories/product/xx-product-files.txt
+++ b/lib/lp/registry/stories/product/xx-product-files.txt
@@ -401,8 +401,8 @@ Downloading and deleting files
Download one of the files.
- >>> from urllib import urlopen
- >>> from urlparse import urlparse
+ >>> from six.moves.urllib.parse import urlparse
+ >>> from six.moves.urllib.request import urlopen
XXX Downloading via the testbrowser does not work
XXX unless the file is served by the Zope publisher.
diff --git a/lib/lp/scripts/utilities/js/combo.py b/lib/lp/scripts/utilities/js/combo.py
index b02e381..8b0969f 100644
--- a/lib/lp/scripts/utilities/js/combo.py
+++ b/lib/lp/scripts/utilities/js/combo.py
@@ -6,9 +6,11 @@ from __future__ import absolute_import, print_function, unicode_literals
__metaclass__ = type
import os
-import urlparse
-from six.moves.urllib.parse import parse_qsl
+from six.moves.urllib.parse import (
+ parse_qsl,
+ urlsplit,
+ )
from lp.scripts.utilities.js.jsbuild import (
CSSComboFile,
@@ -21,7 +23,7 @@ def parse_url(url):
Returns the list of arguments in the original order.
"""
- scheme, loc, path, query, frag = urlparse.urlsplit(url)
+ scheme, loc, path, query, frag = urlsplit(url)
return parse_qs(query)
diff --git a/lib/lp/services/config/__init__.py b/lib/lp/services/config/__init__.py
index a223fc6..71a32b2 100644
--- a/lib/lp/services/config/__init__.py
+++ b/lib/lp/services/config/__init__.py
@@ -15,14 +15,14 @@ import glob
import logging
import os
import sys
-from urlparse import (
- urlparse,
- urlunparse,
- )
import importlib_resources
from lazr.config import ImplicitTypeSchema
from lazr.config.interfaces import ConfigErrors
+from six.moves.urllib.parse import (
+ urlparse,
+ urlunparse,
+ )
import ZConfig
from lp.services.osutils import open_for_writing
diff --git a/lib/lp/services/feeds/feed.py b/lib/lp/services/feeds/feed.py
index 6b041a9..76ff15b 100644
--- a/lib/lp/services/feeds/feed.py
+++ b/lib/lp/services/feeds/feed.py
@@ -20,8 +20,8 @@ __all__ = [
import operator
import os
import time
-from urlparse import urljoin
+from six.moves.urllib.parse import urljoin
from zope.browserpage import ViewPageTemplateFile
from zope.component import getUtility
from zope.datetime import rfc1123_date
diff --git a/lib/lp/services/gpg/handler.py b/lib/lp/services/gpg/handler.py
index 4daa8e6..e51b0ae 100644
--- a/lib/lp/services/gpg/handler.py
+++ b/lib/lp/services/gpg/handler.py
@@ -20,11 +20,11 @@ from StringIO import StringIO
import subprocess
import sys
import tempfile
-import urllib
import gpgme
from lazr.restful.utils import get_current_browser_request
import requests
+from six.moves.urllib.parse import urlencode
from zope.interface import implementer
from zope.security.proxy import removeSecurityProxy
@@ -467,7 +467,7 @@ class GPGHandler:
config.gpghandler.host, config.gpghandler.port)
conn = httplib.HTTPConnection(keyserver_http_url)
- params = urllib.urlencode({'keytext': content})
+ params = urlencode({'keytext': content})
headers = {
"Content-type": "application/x-www-form-urlencoded",
"Accept": "text/plain",
@@ -512,8 +512,7 @@ class GPGHandler:
base = 'https://%s' % host
else:
base = 'http://%s:%s' % (host, config.gpghandler.port)
- return '%s/pks/lookup?%s' % (
- base, urllib.urlencode(sorted(params.items())))
+ return '%s/pks/lookup?%s' % (base, urlencode(sorted(params.items())))
def _getPubKey(self, fingerprint):
"""See IGPGHandler for further information."""
diff --git a/lib/lp/services/librarian/client.py b/lib/lp/services/librarian/client.py
index edb1d72..8bb0668 100644
--- a/lib/lp/services/librarian/client.py
+++ b/lib/lp/services/librarian/client.py
@@ -23,16 +23,20 @@ from socket import (
)
import threading
import time
-import urllib
-import urllib2
-from urlparse import (
+
+from lazr.restful.utils import get_current_browser_request
+import six
+from six.moves.urllib.error import (
+ HTTPError,
+ URLError,
+ )
+from six.moves.urllib.parse import (
+ quote,
urljoin,
urlparse,
urlunparse,
)
-
-from lazr.restful.utils import get_current_browser_request
-import six
+from six.moves.urllib.request import urlopen
from storm.store import Store
from zope.interface import implementer
@@ -55,12 +59,12 @@ from lp.services.timeline.requesttimeline import get_request_timeline
def url_path_quote(filename):
"""Quote `filename` for use in a URL."""
- # RFC 3986 says ~ should not be generated escaped, but urllib.quote
+ # RFC 3986 says ~ should not be generated escaped, but urllib.parse.quote
# predates it. Additionally, + is safe to use unescaped in paths and is
# frequently used in Debian versions, so leave it alone.
#
# This needs to match Library.getAlias' TimeLimitedToken handling.
- return urllib.quote(filename, safe='/~+')
+ return quote(filename, safe='/~+')
def get_libraryfilealias_download_path(aliasID, filename):
@@ -339,7 +343,7 @@ class FileDownloadClient:
# url = ('http://%s:%d/search?digest=%s' % (
# host, port, hexdigest)
# )
- # results = urllib2.urlopen(url).read()
+ # results = urlopen(url).read()
# lines = results.split('\n')
# count, paths = lines[0], lines[1:]
# if int(count) != len(paths):
@@ -500,21 +504,21 @@ class FileDownloadClient:
"""Helper for getFileByAlias."""
while 1:
try:
- return _File(urllib2.urlopen(url), url)
- except urllib2.URLError as error:
+ return _File(urlopen(url), url)
+ except URLError as error:
# 404 errors indicate a data inconsistency: more than one
# attempt to open the file is pointless.
#
# Note that URLError is a base class of HTTPError.
- if isinstance(error, urllib2.HTTPError) and error.code == 404:
+ if isinstance(error, HTTPError) and error.code == 404:
raise LookupError(aliasID)
# HTTPErrors with a 5xx error code ("server problem")
# are a reason to retry the access again, as well as
# generic, non-HTTP, URLErrors like "connection refused".
- if (isinstance(error, urllib2.HTTPError)
+ if (isinstance(error, HTTPError)
and 500 <= error.code <= 599
- or isinstance(error, urllib2.URLError) and
- not isinstance(error, urllib2.HTTPError)):
+ or isinstance(error, URLError) and
+ not isinstance(error, HTTPError)):
if time.time() <= try_until:
time.sleep(1)
else:
diff --git a/lib/lp/services/librarian/doc/librarian.txt b/lib/lp/services/librarian/doc/librarian.txt
index 58f7979..01f7ef9 100644
--- a/lib/lp/services/librarian/doc/librarian.txt
+++ b/lib/lp/services/librarian/doc/librarian.txt
@@ -252,7 +252,7 @@ the client until it begins a new transaction.
>>> print url
http://.../text.txt
- >>> from urllib2 import urlopen
+ >>> from six.moves.urllib.request import urlopen
>>> urlopen(url).read()
'This is some data'
diff --git a/lib/lp/services/librarian/model.py b/lib/lp/services/librarian/model.py
index adb2b12..0cd870e 100644
--- a/lib/lp/services/librarian/model.py
+++ b/lib/lp/services/librarian/model.py
@@ -13,10 +13,10 @@ __all__ = [
from datetime import datetime
import hashlib
-from urlparse import urlparse
from lazr.delegates import delegate_to
import pytz
+from six.moves.urllib.parse import urlparse
from sqlobject import (
BoolCol,
ForeignKey,
diff --git a/lib/lp/services/librarian/smoketest.py b/lib/lp/services/librarian/smoketest.py
index 36c3c81..6c389a5 100644
--- a/lib/lp/services/librarian/smoketest.py
+++ b/lib/lp/services/librarian/smoketest.py
@@ -9,9 +9,9 @@
from cStringIO import StringIO
import datetime
import sys
-import urllib
import pytz
+from six.moves.urllib.request import urlopen
import transaction
from zope.component import getUtility
@@ -36,7 +36,7 @@ def store_file(client):
def read_file(url):
try:
- data = urllib.urlopen(url).read()
+ data = urlopen(url).read()
except MemoryError:
# Re-raise catastrophic errors.
raise
diff --git a/lib/lp/services/librarian/tests/test_client.py b/lib/lp/services/librarian/tests/test_client.py
index a30a9bc..45b074a 100644
--- a/lib/lp/services/librarian/tests/test_client.py
+++ b/lib/lp/services/librarian/tests/test_client.py
@@ -8,16 +8,16 @@ import os
import re
import textwrap
import unittest
-from urllib2 import (
- HTTPError,
- URLError,
- urlopen,
- )
from fixtures import (
EnvironmentVariable,
TempDir,
)
+from six.moves.urllib.error import (
+ HTTPError,
+ URLError,
+ )
+from six.moves.urllib.request import urlopen
import transaction
from lp.services.config import config
diff --git a/lib/lp/services/librarian/tests/test_smoketest.py b/lib/lp/services/librarian/tests/test_smoketest.py
index 949b7c2..9cd0e91 100644
--- a/lib/lp/services/librarian/tests/test_smoketest.py
+++ b/lib/lp/services/librarian/tests/test_smoketest.py
@@ -5,10 +5,11 @@
__metaclass__ = type
-from contextlib import contextmanager
from cStringIO import StringIO
+from functools import partial
+
+from fixtures import MockPatch
-from lp.services.librarian import smoketest
from lp.services.librarian.smoketest import (
do_smoketest,
FILE_DATA,
@@ -19,43 +20,24 @@ from lp.testing import TestCaseWithFactory
from lp.testing.layers import ZopelessDatabaseLayer
-class GoodUrllib:
+def good_urlopen(url):
"""A urllib replacement for testing that returns good results."""
-
- def urlopen(self, url):
- return StringIO(FILE_DATA)
+ return StringIO(FILE_DATA)
-class BadUrllib:
+def bad_urlopen(url):
"""A urllib replacement for testing that returns bad results."""
+ return StringIO('bad data')
- def urlopen(self, url):
- return StringIO('bad data')
-
-class ErrorUrllib:
+def error_urlopen(url):
"""A urllib replacement for testing that raises an exception."""
-
- def urlopen(self, url):
- raise IOError('network error')
+ raise IOError('network error')
-class ExplosiveUrllib:
+def explosive_urlopen(exception, url):
"""A urllib replacement that raises an "explosive" exception."""
-
- def __init__(self, exception):
- self.exception = exception
-
- def urlopen(self, url):
- raise self.exception
-
-
-@contextmanager
-def fake_urllib(fake):
- original_urllib = smoketest.urllib
- smoketest.urllib = fake
- yield
- smoketest.urllib = original_urllib
+ raise exception
class SmokeTestTestCase(TestCaseWithFactory):
@@ -77,7 +59,8 @@ class SmokeTestTestCase(TestCaseWithFactory):
# If storing and retrieving both the public and private files work,
# the main function will return 0 (which will be used as the processes
# exit code to signal success).
- with fake_urllib(GoodUrllib()):
+ with MockPatch(
+ "lp.services.librarian.smoketest.urlopen", good_urlopen):
self.assertEqual(
do_smoketest(self.fake_librarian, self.fake_librarian,
output=StringIO()),
@@ -86,7 +69,7 @@ class SmokeTestTestCase(TestCaseWithFactory):
def test_bad_data(self):
# If incorrect data is retrieved, the main function will return 1
# (which will be used as the processes exit code to signal an error).
- with fake_urllib(BadUrllib()):
+ with MockPatch("lp.services.librarian.smoketest.urlopen", bad_urlopen):
self.assertEqual(
do_smoketest(self.fake_librarian, self.fake_librarian,
output=StringIO()),
@@ -96,7 +79,8 @@ class SmokeTestTestCase(TestCaseWithFactory):
# If an exception is raised when retrieving the data, the main
# function will return 1 (which will be used as the processes exit
# code to signal an error).
- with fake_urllib(ErrorUrllib()):
+ with MockPatch(
+ "lp.services.librarian.smoketest.urlopen", error_urlopen):
self.assertEqual(
do_smoketest(self.fake_librarian, self.fake_librarian,
output=StringIO()),
@@ -106,7 +90,9 @@ class SmokeTestTestCase(TestCaseWithFactory):
# If an "explosive" exception (an exception that should not be caught)
# is raised when retrieving the data it is re-raised.
for exception in MemoryError, SystemExit, KeyboardInterrupt:
- with fake_urllib(ExplosiveUrllib(exception)):
+ with MockPatch(
+ "lp.services.librarian.smoketest.urlopen",
+ partial(explosive_urlopen, exception)):
self.assertRaises(
exception,
do_smoketest, self.fake_librarian, self.fake_librarian,
diff --git a/lib/lp/services/librarianserver/db.py b/lib/lp/services/librarianserver/db.py
index 4494443..3483935 100644
--- a/lib/lp/services/librarianserver/db.py
+++ b/lib/lp/services/librarianserver/db.py
@@ -9,9 +9,12 @@ __all__ = [
]
import hashlib
-import urllib
from pymacaroons import Macaroon
+from six.moves.urllib.parse import (
+ quote,
+ unquote,
+ )
from six.moves.xmlrpc_client import Fault
from storm.expr import (
And,
@@ -109,13 +112,12 @@ class Library:
# The URL-encoding of the path may have changed somewhere
# along the line, so reencode it canonically. LFA.filename
# can't contain slashes, so they're safe to leave unencoded.
- # And urllib.quote erroneously excludes ~ from its safe set,
- # while RFC 3986 says it should be unescaped and Chromium
- # forcibly decodes it in any URL that it sees.
+ # And urllib.parse.quote erroneously excludes ~ from its
+ # safe set, while RFC 3986 says it should be unescaped and
+ # Chromium forcibly decodes it in any URL that it sees.
#
# This needs to match url_path_quote.
- normalised_path = urllib.quote(
- urllib.unquote(path), safe='/~+')
+ normalised_path = quote(unquote(path), safe='/~+')
store = session_store()
token_ok = not store.find(TimeLimitedToken,
SQL("age(created) < interval '1 day'"),
diff --git a/lib/lp/services/librarianserver/swift.py b/lib/lp/services/librarianserver/swift.py
index 29515dc..db01746 100644
--- a/lib/lp/services/librarianserver/swift.py
+++ b/lib/lp/services/librarianserver/swift.py
@@ -19,9 +19,9 @@ import hashlib
import os.path
import re
import time
-import urllib
import scandir
+from six.moves.urllib.parse import quote
from swiftclient import client as swiftclient
from lp.services.config import config
@@ -233,8 +233,7 @@ def _put(log, swift_connection, lfc_id, container, obj_name, fs_path):
lfc_id, disk_md5_hash, db_md5_hash))
raise AssertionError('md5 mismatch')
- manifest = '{0}/{1}/'.format(
- urllib.quote(container), urllib.quote(obj_name))
+ manifest = '{0}/{1}/'.format(quote(container), quote(obj_name))
manifest_headers = {'X-Object-Manifest': manifest}
swift_connection.put_object(
container, obj_name, '', 0, headers=manifest_headers)
diff --git a/lib/lp/services/librarianserver/testing/fake.py b/lib/lp/services/librarianserver/testing/fake.py
index b7a4305..37d6bb6 100644
--- a/lib/lp/services/librarianserver/testing/fake.py
+++ b/lib/lp/services/librarianserver/testing/fake.py
@@ -17,9 +17,9 @@ __all__ = [
import hashlib
from StringIO import StringIO
-from urlparse import urljoin
from fixtures import Fixture
+from six.moves.urllib.parse import urljoin
import transaction
from transaction.interfaces import ISynchronizer
import zope.component
diff --git a/lib/lp/services/librarianserver/testing/tests/test_server_fixture.py b/lib/lp/services/librarianserver/testing/tests/test_server_fixture.py
index bcbf2e3..2d9315b 100644
--- a/lib/lp/services/librarianserver/testing/tests/test_server_fixture.py
+++ b/lib/lp/services/librarianserver/testing/tests/test_server_fixture.py
@@ -10,7 +10,8 @@ __metaclass__ = type
import os
import socket
from textwrap import dedent
-from urllib import urlopen
+
+from six.moves.urllib.request import urlopen
from lp.services.config import config
from lp.services.config.fixture import ConfigFixture
diff --git a/lib/lp/services/librarianserver/tests/test_db_outage.py b/lib/lp/services/librarianserver/tests/test_db_outage.py
index 9012faf..9f28dfe 100644
--- a/lib/lp/services/librarianserver/tests/test_db_outage.py
+++ b/lib/lp/services/librarianserver/tests/test_db_outage.py
@@ -8,9 +8,10 @@ Database outages happen by accident and during fastdowntime deployments."""
__metaclass__ = type
from cStringIO import StringIO
-import urllib2
from fixtures import Fixture
+from six.moves.urllib.error import HTTPError
+from six.moves.urllib.request import urlopen
from lp.services.librarian.client import LibrarianClient
from lp.services.librarianserver.testing.server import LibrarianServerFixture
@@ -87,9 +88,9 @@ class TestLibrarianDBOutage(TestCase):
codes = set()
for count in range(num_librarian_threads):
try:
- urllib2.urlopen(self.url).read()
+ urlopen(self.url).read()
codes.add(200)
- except urllib2.HTTPError as error:
+ except HTTPError as error:
codes.add(error.code)
self.assertTrue(len(codes) == 1, 'Mixed responses: %s' % str(codes))
return codes.pop()
diff --git a/lib/lp/services/librarianserver/tests/test_web.py b/lib/lp/services/librarianserver/tests/test_web.py
index b2136ab..f6bbac2 100644
--- a/lib/lp/services/librarianserver/tests/test_web.py
+++ b/lib/lp/services/librarianserver/tests/test_web.py
@@ -10,11 +10,11 @@ import httplib
from io import BytesIO
import os
import unittest
-from urlparse import urlparse
from lazr.uri import URI
import pytz
import requests
+from six.moves.urllib.parse import urlparse
from storm.expr import SQL
from testtools.matchers import EndsWith
import transaction
diff --git a/lib/lp/services/librarianserver/web.py b/lib/lp/services/librarianserver/web.py
index 36b4642..90001dc 100644
--- a/lib/lp/services/librarianserver/web.py
+++ b/lib/lp/services/librarianserver/web.py
@@ -5,9 +5,9 @@ __metaclass__ = type
from datetime import datetime
import time
-from urlparse import urlparse
from pymacaroons import Macaroon
+from six.moves.urllib.parse import urlparse
from storm.exceptions import DisconnectionError
from twisted.internet import (
abstract,
diff --git a/lib/lp/services/oauth/stories/access-token.txt b/lib/lp/services/oauth/stories/access-token.txt
index f0e49d2..1269194 100644
--- a/lib/lp/services/oauth/stories/access-token.txt
+++ b/lib/lp/services/oauth/stories/access-token.txt
@@ -18,7 +18,7 @@ access token.
>>> token.review(salgado, OAuthPermission.WRITE_PUBLIC)
>>> logout()
- >>> from urllib import urlencode
+ >>> from six.moves.urllib.parse import urlencode
>>> data = dict(
... oauth_consumer_key='foobar123451432',
... oauth_version='1.0',
diff --git a/lib/lp/services/oauth/stories/authorize-token.txt b/lib/lp/services/oauth/stories/authorize-token.txt
index f985f1a..cc339bd 100644
--- a/lib/lp/services/oauth/stories/authorize-token.txt
+++ b/lib/lp/services/oauth/stories/authorize-token.txt
@@ -31,7 +31,7 @@ The +authorize-token page is restricted to logged in users, so users will
first be asked to log in. (We won't show the actual login process because
it involves OpenID, which would complicate this test quite a bit.)
- >>> from urllib import urlencode
+ >>> from six.moves.urllib.parse import urlencode
>>> params = dict(
... oauth_token=token.key, oauth_callback='http://launchpad.test/bzr')
>>> url = "http://launchpad.test/+authorize-token?%s" % urlencode(params)
diff --git a/lib/lp/services/oauth/stories/request-token.txt b/lib/lp/services/oauth/stories/request-token.txt
index 9004db5..f72e378 100644
--- a/lib/lp/services/oauth/stories/request-token.txt
+++ b/lib/lp/services/oauth/stories/request-token.txt
@@ -3,7 +3,7 @@
Our sample consumer (whose key is 'foobar123451432') asks Launchpad for
a request token which may later be exchanged for an access token.
- >>> from urllib import urlencode
+ >>> from six.moves.urllib.parse import urlencode
>>> data = dict(
... oauth_consumer_key='foobar123451432',
... oauth_version='1.0',
diff --git a/lib/lp/services/scripts/base.py b/lib/lp/services/scripts/base.py
index 2a19eff..509df60 100644
--- a/lib/lp/services/scripts/base.py
+++ b/lib/lp/services/scripts/base.py
@@ -19,17 +19,17 @@ import logging
from optparse import OptionParser
import os.path
import sys
-from urllib2 import (
- HTTPError,
- URLError,
- urlopen,
- )
from contrib.glock import (
GlobalLock,
LockAlreadyAcquired,
)
import pytz
+from six.moves.urllib.error import (
+ HTTPError,
+ URLError,
+ )
+from six.moves.urllib.request import urlopen
import transaction
from zope.component import getUtility
diff --git a/lib/lp/services/sitesearch/__init__.py b/lib/lp/services/sitesearch/__init__.py
index b11f73d..da896ee 100644
--- a/lib/lp/services/sitesearch/__init__.py
+++ b/lib/lp/services/sitesearch/__init__.py
@@ -12,15 +12,15 @@ __all__ = [
]
import json
-import urllib
-from urlparse import (
- parse_qsl,
- urlunparse,
- )
from lazr.restful.utils import get_current_browser_request
from lazr.uri import URI
import requests
+from six.moves.urllib.parse import (
+ parse_qsl,
+ urlencode,
+ urlunparse,
+ )
from zope.interface import implementer
from lp.services.config import config
@@ -88,7 +88,7 @@ class PageMatch:
"""Escapes invalid urls."""
parts = urlparse(url)
querydata = parse_qsl(parts.query)
- querystring = urllib.urlencode(querydata)
+ querystring = urlencode(querydata)
urldata = list(parts)
urldata[-2] = querystring
return urlunparse(urldata)
@@ -242,7 +242,7 @@ class BingSearchService:
search_params['q'] = terms.encode('utf8')
search_params['offset'] = start
search_params['customConfig'] = self.custom_config_id
- query_string = urllib.urlencode(sorted(search_params.items()))
+ query_string = urlencode(sorted(search_params.items()))
return self.site + '?' + query_string
def create_search_headers(self):
diff --git a/lib/lp/services/verification/browser/logintoken.py b/lib/lp/services/verification/browser/logintoken.py
index c15321d..245d8f0 100644
--- a/lib/lp/services/verification/browser/logintoken.py
+++ b/lib/lp/services/verification/browser/logintoken.py
@@ -15,8 +15,10 @@ __all__ = [
'ValidateGPGKeyView',
]
-import urllib
-
+from six.moves.urllib.parse import (
+ urlencode,
+ urljoin,
+ )
from zope.component import getUtility
from zope.formlib.widget import CustomWidgetFactory
from zope.formlib.widgets import TextAreaWidget
@@ -97,7 +99,7 @@ class LoginTokenView(LaunchpadView):
def render(self):
if self.context.date_consumed is None:
- url = urllib.basejoin(
+ url = urljoin(
str(self.request.URL), self.PAGES[self.context.tokentype])
self.request.response.redirect(url)
else:
@@ -400,7 +402,7 @@ class ValidateEmailView(BaseTokenView, LaunchpadFormView):
# hack, but if it fails nothing will happen.
# -- Guilherme Salgado 2005-07-09
url = allvhosts.configs['mainsite'].rooturl
- query = urllib.urlencode([('field.dupe_person', dupe.name)])
+ query = urlencode([('field.dupe_person', dupe.name)])
url += '/people/+requestmerge?' + query
self.addError(structured(
'This email address is already registered for another '
diff --git a/lib/lp/services/webapp/doc/webapp-publication.txt b/lib/lp/services/webapp/doc/webapp-publication.txt
index 5028368..046ae2f 100644
--- a/lib/lp/services/webapp/doc/webapp-publication.txt
+++ b/lib/lp/services/webapp/doc/webapp-publication.txt
@@ -510,7 +510,7 @@ python 'in' operator.
>>> from lp.services.webapp.servers import (
... LaunchpadBrowserRequest)
- >>> from urllib import urlencode
+ >>> from six.moves.urllib.parse import urlencode
>>> environment = {'QUERY_STRING': urlencode({
... 'a_field': 'a_value',
... 'items_field': [1, 2, 3]}, doseq=True)}
diff --git a/lib/lp/services/webapp/errorlog.py b/lib/lp/services/webapp/errorlog.py
index cdb99c3..2092e90 100644
--- a/lib/lp/services/webapp/errorlog.py
+++ b/lib/lp/services/webapp/errorlog.py
@@ -9,7 +9,6 @@ import contextlib
from itertools import repeat
import operator
import re
-import urlparse
from lazr.restful.utils import (
get_current_browser_request,
@@ -20,6 +19,7 @@ import oops_amqp
from oops_datedir_repo import DateDirRepo
import oops_timeline
import pytz
+from six.moves.urllib.parse import urlparse
from zope.component.interfaces import ObjectEvent
from zope.error.interfaces import IErrorReportingUtility
from zope.event import notify
@@ -396,9 +396,8 @@ class ErrorReportingUtility:
# broken-url-generator in LP: ignore it.
if referer is None:
return True
- referer_parts = urlparse.urlparse(referer)
- root_parts = urlparse.urlparse(
- allvhosts.configs['mainsite'].rooturl)
+ referer_parts = urlparse(referer)
+ root_parts = urlparse(allvhosts.configs['mainsite'].rooturl)
if root_parts.netloc not in referer_parts.netloc:
return True
return False
diff --git a/lib/lp/services/webapp/login.py b/lib/lp/services/webapp/login.py
index 30fc146..43eb478 100644
--- a/lib/lp/services/webapp/login.py
+++ b/lib/lp/services/webapp/login.py
@@ -10,7 +10,6 @@ from datetime import (
datetime,
timedelta,
)
-import urllib
from openid.consumer.consumer import (
CANCEL,
@@ -27,6 +26,7 @@ from paste.httpexceptions import (
HTTPException,
)
import six
+from six.moves.urllib.parse import urlencode
import transaction
from zope.authentication.interfaces import IUnauthenticatedPrincipal
from zope.browserpage import ViewPageTemplateFile
@@ -220,7 +220,7 @@ class OpenIDLogin(LaunchpadView):
passthrough_field = self.request.form.get(passthrough_name, None)
if passthrough_field is not None:
starting_data.append((passthrough_name, passthrough_field))
- starting_url = urllib.urlencode(starting_data)
+ starting_url = urlencode(starting_data)
trust_root = allvhosts.configs['mainsite'].rooturl
return_to = urlappend(trust_root, '+openid-callback')
return_to = "%s?%s" % (return_to, starting_url)
@@ -240,7 +240,7 @@ class OpenIDLogin(LaunchpadView):
def starting_url(self):
starting_url = self.request.getURL(1)
params = list(self.form_args)
- query_string = urllib.urlencode(params, doseq=True)
+ query_string = urlencode(params, doseq=True)
if query_string:
starting_url += "?%s" % query_string
return starting_url
@@ -265,9 +265,8 @@ class OpenIDLogin(LaunchpadView):
else:
value_list = [value]
- # urllib.urlencode will just encode unicode values to ASCII.
- # For our purposes, we can be a little more liberal and allow
- # UTF-8.
+ # urlencode will just encode unicode values to ASCII. For our
+ # purposes, we can be a little more liberal and allow UTF-8.
yield (
six.ensure_binary(name),
[six.ensure_binary(value) for value in value_list])
@@ -591,7 +590,7 @@ class CookieLogoutPage:
openid_root = config.launchpad.openid_provider_root
target = '%s+logout?%s' % (
config.codehosting.secure_codebrowse_root,
- urllib.urlencode(dict(next_to='%s+logout' % (openid_root, ))))
+ urlencode(dict(next_to='%s+logout' % (openid_root, ))))
self.request.response.redirect(target)
return ''
diff --git a/lib/lp/services/webapp/openid.py b/lib/lp/services/webapp/openid.py
index a607d0f..622fa0e 100644
--- a/lib/lp/services/webapp/openid.py
+++ b/lib/lp/services/webapp/openid.py
@@ -13,12 +13,12 @@ __all__ = [
from functools import partial
import os.path
-import urllib2
from openid.fetchers import (
setDefaultFetcher,
Urllib2Fetcher,
)
+from six.moves.urllib.request import urlopen
from lp.services.config import config
@@ -29,5 +29,5 @@ def set_default_openid_fetcher():
fetcher = Urllib2Fetcher()
if config.launchpad.enable_test_openid_provider:
cafile = os.path.join(config.root, "configs/development/launchpad.crt")
- fetcher.urlopen = partial(urllib2.urlopen, cafile=cafile)
+ fetcher.urlopen = partial(urlopen, cafile=cafile)
setDefaultFetcher(fetcher)
diff --git a/lib/lp/services/webapp/publication.py b/lib/lp/services/webapp/publication.py
index 1a1c1a4..6171ccf 100644
--- a/lib/lp/services/webapp/publication.py
+++ b/lib/lp/services/webapp/publication.py
@@ -13,7 +13,6 @@ import thread
import threading
import time
import traceback
-import urllib
from lazr.restful.utils import safe_hasattr
from lazr.uri import (
@@ -21,6 +20,7 @@ from lazr.uri import (
URI,
)
from psycopg2.extensions import TransactionRollbackError
+from six.moves.urllib.parse import quote
from storm.database import STATE_DISCONNECTED
from storm.exceptions import (
DisconnectionError,
@@ -350,7 +350,7 @@ class LaunchpadBrowserPublication(
non_restricted_url = self.getNonRestrictedURL(request)
if non_restricted_url is not None:
- location += '?production=%s' % urllib.quote(non_restricted_url)
+ location += '?production=%s' % quote(non_restricted_url)
request.response.setResult('')
request.response.redirect(location, temporary_if_possible=True)
diff --git a/lib/lp/services/webapp/tests/test_login.py b/lib/lp/services/webapp/tests/test_login.py
index dd485b7..840a8b5 100644
--- a/lib/lp/services/webapp/tests/test_login.py
+++ b/lib/lp/services/webapp/tests/test_login.py
@@ -19,8 +19,6 @@ from datetime import (
)
import httplib
import unittest
-import urllib
-import urlparse
from openid.consumer.consumer import (
FAILURE,
@@ -32,6 +30,11 @@ from openid.extensions import (
)
from openid.yadis.discover import DiscoveryFailure
from six.moves.urllib.error import HTTPError
+from six.moves.urllib.parse import (
+ parse_qsl,
+ quote,
+ urlsplit,
+ )
from testtools.matchers import (
Contains,
ContainsDict,
@@ -773,7 +776,7 @@ class ForwardsCorrectly:
"""
def match(self, query_string):
- args = dict(urlparse.parse_qsl(query_string))
+ args = dict(parse_qsl(query_string))
request = LaunchpadTestRequest(form=args)
request.processInputs()
# This is a hack to make the request.getURL(1) call issued by the view
@@ -781,8 +784,8 @@ class ForwardsCorrectly:
request._app_names = ['foo']
view = StubbedOpenIDLogin(object(), request)
view()
- escaped_args = tuple(map(urllib.quote, args.items()[0]))
- expected_fragment = urllib.quote('%s=%s' % escaped_args)
+ escaped_args = tuple(map(quote, args.items()[0]))
+ expected_fragment = quote('%s=%s' % escaped_args)
return Contains(
expected_fragment).match(view.openid_request.return_to)
@@ -811,8 +814,8 @@ class TestOpenIDLogin(TestCaseWithFactory):
# Sometimes the form params are unicode because a decode('utf8')
# worked in the form machinery... and if so they cannot be trivially
# quoted but must be encoded first.
- key = urllib.quote(u'key\xf3'.encode('utf8'))
- value = urllib.quote(u'value\xf3'.encode('utf8'))
+ key = quote(u'key\xf3'.encode('utf8'))
+ value = quote(u'value\xf3'.encode('utf8'))
query_string = "%s=%s" % (key, value)
self.assertThat(query_string, ForwardsCorrectly())
@@ -875,8 +878,8 @@ class TestOpenIDLogin(TestCaseWithFactory):
macaroon_extension = extensions[1]
self.assertIsInstance(macaroon_extension, MacaroonRequest)
self.assertEqual(caveat_id, macaroon_extension.caveat_id)
- return_to_args = dict(urlparse.parse_qsl(
- urlparse.urlsplit(view.openid_request.return_to).query))
+ return_to_args = dict(parse_qsl(
+ urlsplit(view.openid_request.return_to).query))
self.assertEqual(
'field.actions.complete',
return_to_args['discharge_macaroon_action'])
diff --git a/lib/lp/services/webapp/url.py b/lib/lp/services/webapp/url.py
index 89f1ed7..2530924 100644
--- a/lib/lp/services/webapp/url.py
+++ b/lib/lp/services/webapp/url.py
@@ -6,12 +6,12 @@
__metaclass__ = type
__all__ = ['urlappend', 'urlparse', 'urlsplit']
-from urlparse import (
+from six.moves.urllib.parse import (
urljoin,
urlparse as original_urlparse,
urlsplit as original_urlsplit,
)
-import urlparse as urlparse_module
+import six.moves.urllib.parse as urlparse_module
def _enable_sftp_in_urlparse():
diff --git a/lib/lp/services/webservice/wadl.py b/lib/lp/services/webservice/wadl.py
index eb70668..0a8f526 100644
--- a/lib/lp/services/webservice/wadl.py
+++ b/lib/lp/services/webservice/wadl.py
@@ -6,9 +6,9 @@
__metaclass__ = type
import subprocess
-import urlparse
import importlib_resources
+from six.moves.urllib.parse import urljoin
from lp.services.webapp.interaction import (
ANONYMOUS,
@@ -24,7 +24,7 @@ from lp.services.webapp.vhosts import allvhosts
def _generate_web_service_root(version, mimetype):
"""Generate the webservice description for the given version and mimetype.
"""
- url = urlparse.urljoin(allvhosts.configs['api'].rooturl, version)
+ url = urljoin(allvhosts.configs['api'].rooturl, version)
# Since we want HTTPS URLs we have to munge the request URL.
url = url.replace('http://', 'https://')
request = WebServiceTestRequest(version=version, environ={
diff --git a/lib/lp/snappy/browser/snap.py b/lib/lp/snappy/browser/snap.py
index 2461b29..dba1b18 100644
--- a/lib/lp/snappy/browser/snap.py
+++ b/lib/lp/snappy/browser/snap.py
@@ -18,13 +18,12 @@ __all__ = [
'SnapView',
]
-from urllib import urlencode
-
from lazr.restful.fields import Reference
from lazr.restful.interface import (
copy_field,
use_template,
)
+from six.moves.urllib.parse import urlencode
from zope.component import getUtility
from zope.error.interfaces import IErrorReportingUtility
from zope.formlib.widget import CustomWidgetFactory
diff --git a/lib/lp/snappy/browser/tests/test_snap.py b/lib/lp/snappy/browser/tests/test_snap.py
index 5470916..0a481dc 100644
--- a/lib/lp/snappy/browser/tests/test_snap.py
+++ b/lib/lp/snappy/browser/tests/test_snap.py
@@ -13,15 +13,15 @@ from datetime import (
)
import json
import re
-from urlparse import (
- parse_qs,
- urlsplit,
- )
from fixtures import FakeLogger
from pymacaroons import Macaroon
import pytz
import responses
+from six.moves.urllib.parse import (
+ parse_qs,
+ urlsplit,
+ )
import soupmatchers
from testtools.matchers import (
AfterPreprocessing,
diff --git a/lib/lp/snappy/model/snap.py b/lib/lp/snappy/model/snap.py
index 96aecd2..229eaa4 100644
--- a/lib/lp/snappy/model/snap.py
+++ b/lib/lp/snappy/model/snap.py
@@ -15,13 +15,13 @@ from datetime import (
timedelta,
)
from operator import attrgetter
-from urlparse import urlsplit
from breezy import urlutils
from lazr.lifecycle.event import ObjectCreatedEvent
from pymacaroons import Macaroon
import pytz
import six
+from six.moves.urllib.parse import urlsplit
from storm.expr import (
And,
Desc,
diff --git a/lib/lp/snappy/model/snapstoreclient.py b/lib/lp/snappy/model/snapstoreclient.py
index e9b7e8f..7d094aa 100644
--- a/lib/lp/snappy/model/snapstoreclient.py
+++ b/lib/lp/snappy/model/snapstoreclient.py
@@ -18,13 +18,13 @@ except ImportError:
JSONDecodeError = ValueError
import string
import time
-from urlparse import urlsplit
from lazr.restful.utils import get_current_browser_request
from pymacaroons import Macaroon
import requests
from requests_toolbelt import MultipartEncoder
import six
+from six.moves.urllib.parse import urlsplit
from zope.component import getUtility
from zope.interface import implementer
from zope.security.proxy import removeSecurityProxy
diff --git a/lib/lp/snappy/tests/test_snap.py b/lib/lp/snappy/tests/test_snap.py
index b7d2d8a..809b617 100644
--- a/lib/lp/snappy/tests/test_snap.py
+++ b/lib/lp/snappy/tests/test_snap.py
@@ -15,7 +15,6 @@ from datetime import (
import json
from operator import attrgetter
from textwrap import dedent
-from urlparse import urlsplit
from fixtures import (
FakeLogger,
@@ -26,6 +25,7 @@ from nacl.public import PrivateKey
from pymacaroons import Macaroon
import pytz
import responses
+from six.moves.urllib.parse import urlsplit
from storm.exceptions import LostObjectError
from storm.locals import Store
from testtools.matchers import (
diff --git a/lib/lp/snappy/tests/test_snapbuild.py b/lib/lp/snappy/tests/test_snapbuild.py
index 67e2be1..4a6000e 100644
--- a/lib/lp/snappy/tests/test_snapbuild.py
+++ b/lib/lp/snappy/tests/test_snapbuild.py
@@ -11,11 +11,11 @@ from datetime import (
datetime,
timedelta,
)
-from urllib2 import urlopen
from fixtures import FakeLogger
from pymacaroons import Macaroon
import pytz
+from six.moves.urllib.request import urlopen
from testtools.matchers import (
ContainsDict,
Equals,
diff --git a/lib/lp/soyuz/browser/widgets/archive.py b/lib/lp/soyuz/browser/widgets/archive.py
index 0505a2e..505ccb4 100644
--- a/lib/lp/soyuz/browser/widgets/archive.py
+++ b/lib/lp/soyuz/browser/widgets/archive.py
@@ -8,7 +8,7 @@ __all__ = [
'PPANameWidget',
]
-import urlparse
+from six.moves.urllib.parse import urljoin
from lp.app.widgets.textwidgets import URIComponentWidget
from lp.services.config import config
@@ -25,4 +25,4 @@ class PPANameWidget(URIComponentWidget):
root = config.personalpackagearchive.private_base_url
else:
root = config.personalpackagearchive.base_url
- return urlparse.urljoin(root, owner.name) + '/'
+ return urljoin(root, owner.name) + '/'
diff --git a/lib/lp/soyuz/interfaces/archive.py b/lib/lp/soyuz/interfaces/archive.py
index d24e502..4d18641 100644
--- a/lib/lp/soyuz/interfaces/archive.py
+++ b/lib/lp/soyuz/interfaces/archive.py
@@ -56,7 +56,6 @@ __all__ = [
import httplib
import re
-from urlparse import urlparse
from lazr.restful.declarations import (
call_with,
@@ -81,6 +80,7 @@ from lazr.restful.fields import (
CollectionField,
Reference,
)
+from six.moves.urllib.parse import urlparse
from zope.interface import (
Attribute,
Interface,
diff --git a/lib/lp/soyuz/scripts/ppa_apache_log_parser.py b/lib/lp/soyuz/scripts/ppa_apache_log_parser.py
index 0085acb..64bfcd1 100644
--- a/lib/lp/soyuz/scripts/ppa_apache_log_parser.py
+++ b/lib/lp/soyuz/scripts/ppa_apache_log_parser.py
@@ -4,7 +4,8 @@
__all__ = ['DBUSER', 'get_ppa_file_key']
import os.path
-import urllib
+
+from six.moves.urllib.parse import unquote
from lp.archiveuploader.utils import re_isadeb
@@ -13,7 +14,7 @@ DBUSER = 'ppa-apache-log-parser'
def get_ppa_file_key(path):
- split_path = os.path.normpath(urllib.unquote(path)).split('/')
+ split_path = os.path.normpath(unquote(path)).split('/')
if len(split_path) != 9:
return None
diff --git a/lib/lp/soyuz/tests/test_livefsbuild.py b/lib/lp/soyuz/tests/test_livefsbuild.py
index 99ceccc..8b6491c 100644
--- a/lib/lp/soyuz/tests/test_livefsbuild.py
+++ b/lib/lp/soyuz/tests/test_livefsbuild.py
@@ -11,16 +11,19 @@ from datetime import (
datetime,
timedelta,
)
-from urllib2 import urlopen
from fixtures import FakeLogger
import pytz
+<<<<<<< lib/lp/soyuz/tests/test_livefsbuild.py
from testtools.matchers import (
ContainsDict,
Equals,
MatchesDict,
MatchesStructure,
)
+=======
+from six.moves.urllib.request import urlopen
+>>>>>>> lib/lp/soyuz/tests/test_livefsbuild.py
from zope.component import getUtility
from zope.security.proxy import removeSecurityProxy
diff --git a/lib/lp/soyuz/tests/test_packageupload.py b/lib/lp/soyuz/tests/test_packageupload.py
index cb4d038..408891f 100644
--- a/lib/lp/soyuz/tests/test_packageupload.py
+++ b/lib/lp/soyuz/tests/test_packageupload.py
@@ -9,13 +9,13 @@ from datetime import timedelta
import io
import os.path
import shutil
-from urllib2 import urlopen
from debian.deb822 import Changes
from lazr.restfulclient.errors import (
BadRequest,
Unauthorized,
)
+from six.moves.urllib.request import urlopen
from testtools.matchers import Equals
import transaction
from zope.component import (
diff --git a/lib/lp/testing/keyserver/tests/test_harness.py b/lib/lp/testing/keyserver/tests/test_harness.py
index b8df7b4..933c9f3 100644
--- a/lib/lp/testing/keyserver/tests/test_harness.py
+++ b/lib/lp/testing/keyserver/tests/test_harness.py
@@ -3,7 +3,7 @@
__metaclass__ = type
-from urllib import urlopen
+from six.moves.urllib.request import urlopen
from lp.services.config import config
from lp.testing import TestCase
diff --git a/lib/lp/testing/layers.py b/lib/lp/testing/layers.py
index 65fb63e..0a1303e 100644
--- a/lib/lp/testing/layers.py
+++ b/lib/lp/testing/layers.py
@@ -71,7 +71,6 @@ from unittest import (
TestCase,
TestResult,
)
-from urllib import urlopen
import uuid
from fixtures import (
@@ -79,10 +78,12 @@ from fixtures import (
MonkeyPatch,
)
import psycopg2
+from six.moves.urllib.error import URLError
from six.moves.urllib.parse import (
quote,
urlparse,
)
+from six.moves.urllib.request import urlopen
from storm.zope.interfaces import IZStorm
import transaction
from webob.request import environ_from_url as orig_environ_from_url
@@ -1968,14 +1969,11 @@ class LayerProcessController:
try:
connection = urlopen(root_url)
connection.read()
- except IOError as error:
+ except URLError as error:
# We are interested in a wrapped socket.error.
- # urlopen() really sucks here.
- if len(error.args) <= 1:
+ if not isinstance(error.reason, socket.error):
raise
- if not isinstance(error.args[1], socket.error):
- raise
- if error.args[1].args[0] != errno.ECONNREFUSED:
+ if error.reason.args[0] != errno.ECONNREFUSED:
raise
returncode = cls.appserver.poll()
if returncode is not None:
diff --git a/lib/lp/testing/pages.py b/lib/lp/testing/pages.py
index 073f5b8..e552b4b 100644
--- a/lib/lp/testing/pages.py
+++ b/lib/lp/testing/pages.py
@@ -16,7 +16,6 @@ import os
import pprint
import re
import unittest
-from urlparse import urljoin
from bs4.element import (
CData,
@@ -36,6 +35,7 @@ from contrib.oauth import (
)
from lazr.restful.testing.webservice import WebServiceCaller
import six
+from six.moves.urllib.parse import urljoin
from soupsieve import escape as css_escape
import transaction
from webtest import (
diff --git a/lib/lp/testing/tests/test_layers_functional.py b/lib/lp/testing/tests/test_layers_functional.py
index 5a97097..1dbfeda 100644
--- a/lib/lp/testing/tests/test_layers_functional.py
+++ b/lib/lp/testing/tests/test_layers_functional.py
@@ -15,7 +15,6 @@ from cStringIO import StringIO
import os
import signal
import smtplib
-from urllib import urlopen
import uuid
import amqp
@@ -25,6 +24,8 @@ from fixtures import (
TestWithFixtures,
)
from lazr.config import as_host_port
+from six.moves.urllib.error import HTTPError
+from six.moves.urllib.request import urlopen
from zope.component import (
ComponentLookupError,
getUtility,
@@ -354,10 +355,7 @@ class LibrarianResetTestCase(TestCase):
LibrarianLayer.testTearDown()
LibrarianLayer.testSetUp()
# Which should have nuked the old file.
- # XXX: StuartBishop 2006-06-30 Bug=51370:
- # We should get a DownloadFailed exception here.
- data = urlopen(LibrarianTestCase.url).read()
- self.assertNotEqual(data, self.sample_data)
+ self.assertRaises(HTTPError, urlopen, LibrarianTestCase.url)
class LibrarianHideTestCase(TestCase):
diff --git a/lib/lp/testopenid/stories/basics.txt b/lib/lp/testopenid/stories/basics.txt
index 3ccbaf4..56c6f4e 100644
--- a/lib/lp/testopenid/stories/basics.txt
+++ b/lib/lp/testopenid/stories/basics.txt
@@ -29,7 +29,7 @@ After determining the URL of the OpenID server, the next thing a consumer
needs to do is associate with the server and get a shared secret via a
POST request.
- >>> from urllib import urlencode
+ >>> from six.moves.urllib.parse import urlencode
>>> anon_browser.open(
... 'http://testopenid.test/+openid', data=urlencode({
... 'openid.mode': 'associate',
diff --git a/lib/lp/translations/browser/person.py b/lib/lp/translations/browser/person.py
index 9b2e992..e8f3e1d 100644
--- a/lib/lp/translations/browser/person.py
+++ b/lib/lp/translations/browser/person.py
@@ -16,9 +16,9 @@ from datetime import (
timedelta,
)
from itertools import islice
-import urllib
import pytz
+from six.moves.urllib.parse import urlencode
from zope.browserpage import ViewPageTemplateFile
from zope.component import getUtility
from zope.formlib.widget import CustomWidgetFactory
@@ -114,7 +114,7 @@ class TranslateLinksAggregator(WorkListLinksAggregator):
def compose_pofile_filter_url(pofile, person):
"""Compose URL for `Person`'s contributions to `POFile`."""
- person_name = urllib.urlencode({'person': person.name})
+ person_name = urlencode({'person': person.name})
return canonical_url(pofile) + "/+filter?%s" % person_name
diff --git a/lib/lp/translations/browser/pofile.py b/lib/lp/translations/browser/pofile.py
index a193059..e14555c 100644
--- a/lib/lp/translations/browser/pofile.py
+++ b/lib/lp/translations/browser/pofile.py
@@ -17,9 +17,9 @@ __all__ = [
import os.path
import re
-import urllib
from lazr.restful.utils import smartquote
+from six.moves.urllib.parse import urlencode
from zope.component import getUtility
from zope.publisher.browser import FileUpload
@@ -568,7 +568,7 @@ class POFileTranslateView(BaseTranslationView, POFileMetadataViewMixin):
return self.request.response.redirect(
canonical_url(self.user, view_name='+licensing',
rootsite='translations') +
- '?' + urllib.urlencode({'back_to': url}))
+ '?' + urlencode({'back_to': url}))
# The handling of errors is slightly tricky here. Because this
# form displays multiple POMsgSetViews, we need to track the
diff --git a/lib/lp/translations/browser/tests/test_persontranslationview.py b/lib/lp/translations/browser/tests/test_persontranslationview.py
index f64f8aa..33abcbc 100644
--- a/lib/lp/translations/browser/tests/test_persontranslationview.py
+++ b/lib/lp/translations/browser/tests/test_persontranslationview.py
@@ -3,8 +3,7 @@
__metaclass__ = type
-import urllib
-
+from six.moves.urllib.parse import urlencode
from zope.security.proxy import removeSecurityProxy
from lp.app.enums import ServiceUsage
@@ -199,7 +198,7 @@ class TestPersonTranslationView(TestCaseWithFactory):
pofiles_worked_on = self._makePOFiles(11, previously_worked_on=True)
# the expected results
- person_name = urllib.urlencode({'person': self.view.context.name})
+ person_name = urlencode({'person': self.view.context.name})
expected_links = [
(pofile.potemplate.translationtarget.title,
canonical_url(pofile, view_name="+filter") + "?%s" % person_name)
diff --git a/lib/lp/translations/browser/translationmessage.py b/lib/lp/translations/browser/translationmessage.py
index c2bdc04..ef75f6c 100644
--- a/lib/lp/translations/browser/translationmessage.py
+++ b/lib/lp/translations/browser/translationmessage.py
@@ -21,10 +21,12 @@ __all__ = [
import datetime
import operator
import re
-import urllib
import pytz
-from six.moves.urllib.parse import parse_qsl
+from six.moves.urllib.parse import (
+ parse_qsl,
+ urlencode,
+ )
from zope import datetime as zope_datetime
from zope.browserpage import ViewPageTemplateFile
from zope.component import getUtility
@@ -863,7 +865,7 @@ class BaseTranslationView(LaunchpadView):
else:
base_url = new_url
- new_query = urllib.urlencode(sorted(parameters.items()))
+ new_query = urlencode(sorted(parameters.items()))
if new_query:
new_url = '%s?%s' % (base_url, new_query)
diff --git a/lib/lp/translations/doc/poexport-request-productseries.txt b/lib/lp/translations/doc/poexport-request-productseries.txt
index 8de39ba..4e39210 100644
--- a/lib/lp/translations/doc/poexport-request-productseries.txt
+++ b/lib/lp/translations/doc/poexport-request-productseries.txt
@@ -84,9 +84,9 @@ The email contains a URL linking to where the exported file can be downloaded.
Let's download it and make sure the contents look ok.
- >>> import urllib2
+ >>> from six.moves.urllib.request import urlopen
>>> from lp.services.helpers import string_to_tarfile
- >>> tarball = string_to_tarfile(urllib2.urlopen(url).read())
+ >>> tarball = string_to_tarfile(urlopen(url).read())
>>> for name in sorted(tarball.getnames()):
... print(name)
evolution-2.2
diff --git a/lib/lp/translations/doc/poexport-request.txt b/lib/lp/translations/doc/poexport-request.txt
index a476426..a95d7c0 100644
--- a/lib/lp/translations/doc/poexport-request.txt
+++ b/lib/lp/translations/doc/poexport-request.txt
@@ -87,9 +87,9 @@ The email contains a URL linking to where the exported file can be downloaded.
Let's download it and make sure the contents look ok.
- >>> import urllib2
+ >>> from six.moves.urllib.request import urlopen
>>> from lp.services.helpers import string_to_tarfile
- >>> tarball = string_to_tarfile(urllib2.urlopen(url).read())
+ >>> tarball = string_to_tarfile(urlopen(url).read())
>>> for name in sorted(tarball.getnames()):
... print(name)
pmount
@@ -208,7 +208,7 @@ Check whether we generated a good .mo file.
>>> body = emails.pop().get_payload()
>>> url = extract_url(body)
- >>> is_valid_mofile(urllib2.urlopen(url).read())
+ >>> is_valid_mofile(urlopen(url).read())
True
diff --git a/lib/lp/translations/stories/importqueue/xx-translation-import-queue.txt b/lib/lp/translations/stories/importqueue/xx-translation-import-queue.txt
index 4ba2c4b..8e9f427 100644
--- a/lib/lp/translations/stories/importqueue/xx-translation-import-queue.txt
+++ b/lib/lp/translations/stories/importqueue/xx-translation-import-queue.txt
@@ -208,8 +208,8 @@ There is an option to remove entries from the queue.
No Privileges Person tries to remove entries but to no effect.
- >>> import urllib
- >>> post_data = urllib.urlencode(
+ >>> from six.moves.urllib.parse import urlencode
+ >>> post_data = urlencode(
... {
... 'field.filter_target': 'all',
... 'field.filter_status': 'all',
@@ -456,7 +456,7 @@ the erroneous parenthesis included.
Here we'll simulate such a request and show that the resulting unrecognized
filter_extension values do not generate an error. See bug 388997.
- >>> post_data = urllib.urlencode(
+ >>> post_data = urlencode(
... {
... 'field.filter_target': 'all',
... 'field.filter_status': 'all',
diff --git a/utilities/paste b/utilities/paste
index b2df8e0..3105a34 100755
--- a/utilities/paste
+++ b/utilities/paste
@@ -15,11 +15,10 @@ from optparse import OptionParser
import os
import pwd
import sys
-import urllib
-from urlparse import urljoin
import webbrowser
from fixtures import MonkeyPatch
+from six.moves.urllib.parse import urljoin
from zope.testbrowser.browser import Browser
# Should we be able to override any of these?
diff --git a/utilities/roundup-sniffer.py b/utilities/roundup-sniffer.py
index f13088f..c101ecc 100755
--- a/utilities/roundup-sniffer.py
+++ b/utilities/roundup-sniffer.py
@@ -46,8 +46,9 @@ from os.path import (
from pprint import pprint
import sys
from time import sleep
-from urllib import urlencode
-import urllib2
+
+from six.moves.urllib.parse import urlencode
+from six.moves.urllib.request import urlopen
from lp.services.beautifulsoup import BeautifulSoup
@@ -67,8 +68,7 @@ class RoundupSniffer:
"""Fetch the URL, consulting the cache first."""
filename = join(self.cache_dir, urlsafe_b64encode(url))
if not exists(filename):
- open(filename, 'wb').write(
- urllib2.urlopen(url).read())
+ open(filename, 'wb').write(urlopen(url).read())
return open(filename, 'rb')
def get_all_bugs(self):