summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorZuul <zuul@review.openstack.org>2018-10-05 17:14:15 +0000
committerGerrit Code Review <review@openstack.org>2018-10-05 17:14:15 +0000
commit3e579256a36e66960495da2f303b5a6e37f644a6 (patch)
tree852d460d45bd5a90147c0a5a0b5ee12859c94f20
parent41f6560dd6d4b6cae92c1ceea2038720666621f9 (diff)
parent2530ccff8f0a0105d1626c65616080b27682e129 (diff)
downloadneutron-3e579256a36e66960495da2f303b5a6e37f644a6.tar.gz
Merge "Upgrade pylint to a version that works with python3"
-rw-r--r--lower-constraints.txt4
-rw-r--r--neutron/agent/metadata/agent.py4
-rw-r--r--neutron/api/api_common.py18
-rw-r--r--neutron/pecan_wsgi/controllers/root.py4
-rw-r--r--neutron/tests/unit/api/v2/test_base.py26
-rw-r--r--neutron/tests/unit/test_wsgi.py11
-rw-r--r--test-requirements.txt3
7 files changed, 36 insertions, 34 deletions
diff --git a/lower-constraints.txt b/lower-constraints.txt
index ea8086d585..6ff647ca97 100644
--- a/lower-constraints.txt
+++ b/lower-constraints.txt
@@ -2,7 +2,7 @@ alabaster==0.7.10
alembic==0.8.10
amqp==2.1.1
appdirs==1.3.0
-astroid==1.3.8
+astroid==1.6.5
Babel==2.3.4
bandit==1.1.0
bashate==0.5.1
@@ -101,7 +101,7 @@ pycparser==2.18
pyflakes==0.8.1
Pygments==2.2.0
pyinotify==0.9.6
-pylint==1.4.5
+pylint==1.9.2
PyMySQL==0.7.6
pyparsing==2.1.0
pyperclip==1.5.27
diff --git a/neutron/agent/metadata/agent.py b/neutron/agent/metadata/agent.py
index 27205f7c7f..a129cd9c18 100644
--- a/neutron/agent/metadata/agent.py
+++ b/neutron/agent/metadata/agent.py
@@ -25,7 +25,7 @@ from oslo_service import loopingcall
from oslo_utils import encodeutils
import requests
import six
-import six.moves.urllib.parse as urlparse
+from six.moves import urllib
import webob
from neutron._i18n import _
@@ -179,7 +179,7 @@ class MetadataProxyHandler(object):
nova_host_port = '%s:%s' % (self.conf.nova_metadata_host,
self.conf.nova_metadata_port)
- url = urlparse.urlunsplit((
+ url = urllib.parse.urlunsplit((
self.conf.nova_metadata_protocol,
nova_host_port,
req.path_info,
diff --git a/neutron/api/api_common.py b/neutron/api/api_common.py
index f7a30aca9e..64db4e9c2e 100644
--- a/neutron/api/api_common.py
+++ b/neutron/api/api_common.py
@@ -22,7 +22,7 @@ from oslo_config import cfg
import oslo_i18n
from oslo_log import log as logging
from oslo_serialization import jsonutils
-from six.moves.urllib import parse
+from six.moves import urllib
from webob import exc
from neutron._i18n import _
@@ -127,7 +127,7 @@ def get_previous_link(request, items, id_key):
params['marker'] = marker
params['page_reverse'] = True
return "%s?%s" % (prepare_url(get_path_url(request)),
- parse.urlencode(params))
+ urllib.parse.urlencode(params))
def get_next_link(request, items, id_key):
@@ -138,7 +138,7 @@ def get_next_link(request, items, id_key):
params['marker'] = marker
params.pop('page_reverse', None)
return "%s?%s" % (prepare_url(get_path_url(request)),
- parse.urlencode(params))
+ urllib.parse.urlencode(params))
def prepare_url(orig_url):
@@ -147,24 +147,24 @@ def prepare_url(orig_url):
# Copied directly from nova/api/openstack/common.py
if not prefix:
return orig_url
- url_parts = list(parse.urlsplit(orig_url))
- prefix_parts = list(parse.urlsplit(prefix))
+ url_parts = list(urllib.parse.urlsplit(orig_url))
+ prefix_parts = list(urllib.parse.urlsplit(prefix))
url_parts[0:2] = prefix_parts[0:2]
url_parts[2] = prefix_parts[2] + url_parts[2]
- return parse.urlunsplit(url_parts).rstrip('/')
+ return urllib.parse.urlunsplit(url_parts).rstrip('/')
def get_path_url(request):
"""Return correct link if X-Forwarded-Proto exists in headers."""
protocol = request.headers.get('X-Forwarded-Proto')
- parsed = parse.urlparse(request.path_url)
+ parsed = urllib.parse.urlparse(request.path_url)
if protocol and parsed.scheme != protocol:
- new_parsed = parse.ParseResult(
+ new_parsed = urllib.parse.ParseResult(
protocol, parsed.netloc,
parsed.path, parsed.params,
parsed.query, parsed.fragment)
- return parse.urlunparse(new_parsed)
+ return urllib.parse.urlunparse(new_parsed)
else:
return request.path_url
diff --git a/neutron/pecan_wsgi/controllers/root.py b/neutron/pecan_wsgi/controllers/root.py
index 9bfa69c0ae..168cd751d8 100644
--- a/neutron/pecan_wsgi/controllers/root.py
+++ b/neutron/pecan_wsgi/controllers/root.py
@@ -22,7 +22,7 @@ from oslo_config import cfg
from oslo_log import log
import pecan
from pecan import request
-import six.moves.urllib.parse as urlparse
+from six.moves import urllib
from neutron.api.views import versions as versions_view
from neutron import manager
@@ -99,7 +99,7 @@ class V2Controller(object):
layout = []
for name, collection in _CORE_RESOURCES.items():
- href = urlparse.urljoin(pecan.request.path_url, collection)
+ href = urllib.parse.urljoin(pecan.request.path_url, collection)
resource = {'name': name,
'collection': collection,
'links': [{'rel': 'self',
diff --git a/neutron/tests/unit/api/v2/test_base.py b/neutron/tests/unit/api/v2/test_base.py
index dd14c991d9..5576eee732 100644
--- a/neutron/tests/unit/api/v2/test_base.py
+++ b/neutron/tests/unit/api/v2/test_base.py
@@ -30,7 +30,7 @@ from oslo_db import exception as db_exc
from oslo_policy import policy as oslo_policy
from oslo_utils import uuidutils
import six
-import six.moves.urllib.parse as urlparse
+from six.moves import urllib
import webob
from webob import exc
import webtest
@@ -592,16 +592,16 @@ class JSONV2TestCase(APIv2TestBase, testlib_api.WebTestCase):
self.assertEqual(1, len(next_links))
self.assertEqual(1, len(previous_links))
- url = urlparse.urlparse(next_links[0]['href'])
+ url = urllib.parse.urlparse(next_links[0]['href'])
self.assertEqual(url.path, _get_path('networks'))
params['marker'] = [id2]
- self.assertEqual(params, urlparse.parse_qs(url.query))
+ self.assertEqual(params, urllib.parse.parse_qs(url.query))
- url = urlparse.urlparse(previous_links[0]['href'])
+ url = urllib.parse.urlparse(previous_links[0]['href'])
self.assertEqual(url.path, _get_path('networks'))
params['marker'] = [id1]
params['page_reverse'] = ['True']
- self.assertEqual(params, urlparse.parse_qs(url.query))
+ self.assertEqual(params, urllib.parse.parse_qs(url.query))
def test_list_pagination_with_last_page(self):
id = str(_uuid())
@@ -631,12 +631,12 @@ class JSONV2TestCase(APIv2TestBase, testlib_api.WebTestCase):
previous_links.append(r)
self.assertEqual(1, len(previous_links))
- url = urlparse.urlparse(previous_links[0]['href'])
+ url = urllib.parse.urlparse(previous_links[0]['href'])
self.assertEqual(url.path, _get_path('networks'))
expect_params = params.copy()
expect_params['marker'] = [id]
expect_params['page_reverse'] = ['True']
- self.assertEqual(expect_params, urlparse.parse_qs(url.query))
+ self.assertEqual(expect_params, urllib.parse.parse_qs(url.query))
def test_list_pagination_with_empty_page(self):
return_value = []
@@ -657,12 +657,12 @@ class JSONV2TestCase(APIv2TestBase, testlib_api.WebTestCase):
previous_links.append(r)
self.assertEqual(1, len(previous_links))
- url = urlparse.urlparse(previous_links[0]['href'])
+ url = urllib.parse.urlparse(previous_links[0]['href'])
self.assertEqual(url.path, _get_path('networks'))
expect_params = params.copy()
del expect_params['marker']
expect_params['page_reverse'] = ['True']
- self.assertEqual(expect_params, urlparse.parse_qs(url.query))
+ self.assertEqual(expect_params, urllib.parse.parse_qs(url.query))
def test_list_pagination_reverse_with_last_page(self):
id = str(_uuid())
@@ -693,13 +693,13 @@ class JSONV2TestCase(APIv2TestBase, testlib_api.WebTestCase):
next_links.append(r)
self.assertEqual(1, len(next_links))
- url = urlparse.urlparse(next_links[0]['href'])
+ url = urllib.parse.urlparse(next_links[0]['href'])
self.assertEqual(url.path, _get_path('networks'))
expected_params = params.copy()
del expected_params['page_reverse']
expected_params['marker'] = [id]
self.assertEqual(expected_params,
- urlparse.parse_qs(url.query))
+ urllib.parse.parse_qs(url.query))
def test_list_pagination_reverse_with_empty_page(self):
return_value = []
@@ -720,12 +720,12 @@ class JSONV2TestCase(APIv2TestBase, testlib_api.WebTestCase):
next_links.append(r)
self.assertEqual(1, len(next_links))
- url = urlparse.urlparse(next_links[0]['href'])
+ url = urllib.parse.urlparse(next_links[0]['href'])
self.assertEqual(url.path, _get_path('networks'))
expect_params = params.copy()
del expect_params['marker']
del expect_params['page_reverse']
- self.assertEqual(expect_params, urlparse.parse_qs(url.query))
+ self.assertEqual(expect_params, urllib.parse.parse_qs(url.query))
def test_create(self):
net_id = _uuid()
diff --git a/neutron/tests/unit/test_wsgi.py b/neutron/tests/unit/test_wsgi.py
index e7fdb1e1d6..1feb388c39 100644
--- a/neutron/tests/unit/test_wsgi.py
+++ b/neutron/tests/unit/test_wsgi.py
@@ -20,7 +20,7 @@ import ssl
import mock
from neutron_lib import exceptions as exception
from oslo_config import cfg
-import six.moves.urllib.request as urlrequest
+from six.moves import urllib
import testtools
import webob
import webob.exc
@@ -43,12 +43,13 @@ def open_no_proxy(*args, **kwargs):
# introduced in python 2.7.9 under PEP-0476
# https://github.com/python/peps/blob/master/pep-0476.txt
if hasattr(ssl, "_create_unverified_context"):
- opener = urlrequest.build_opener(
- urlrequest.ProxyHandler({}),
- urlrequest.HTTPSHandler(context=ssl._create_unverified_context())
+ opener = urllib.request.build_opener(
+ urllib.request.ProxyHandler({}),
+ urllib.request.HTTPSHandler(
+ context=ssl._create_unverified_context())
)
else:
- opener = urlrequest.build_opener(urlrequest.ProxyHandler({}))
+ opener = urllib.request.build_opener(urllib.request.ProxyHandler({}))
return opener.open(*args, **kwargs)
diff --git a/test-requirements.txt b/test-requirements.txt
index 3a022481eb..4027fec533 100644
--- a/test-requirements.txt
+++ b/test-requirements.txt
@@ -18,7 +18,8 @@ oslotest>=3.2.0 # Apache-2.0
stestr>=1.0.0 # Apache-2.0
reno>=2.5.0 # Apache-2.0
ddt>=1.0.1 # MIT
-pylint==1.4.5 # GPLv2
+astroid==1.6.5 # LGPLv2.1
+pylint==1.9.2 # GPLv2
# Needed to run DB commands in virtualenvs
PyMySQL>=0.7.6 # MIT License
bashate>=0.5.1 # Apache-2.0