Merge "Upgrade pylint to a version that works with python3"
This commit is contained in:
commit
3e579256a3
@ -2,7 +2,7 @@ alabaster==0.7.10
|
|||||||
alembic==0.8.10
|
alembic==0.8.10
|
||||||
amqp==2.1.1
|
amqp==2.1.1
|
||||||
appdirs==1.3.0
|
appdirs==1.3.0
|
||||||
astroid==1.3.8
|
astroid==1.6.5
|
||||||
Babel==2.3.4
|
Babel==2.3.4
|
||||||
bandit==1.1.0
|
bandit==1.1.0
|
||||||
bashate==0.5.1
|
bashate==0.5.1
|
||||||
@ -101,7 +101,7 @@ pycparser==2.18
|
|||||||
pyflakes==0.8.1
|
pyflakes==0.8.1
|
||||||
Pygments==2.2.0
|
Pygments==2.2.0
|
||||||
pyinotify==0.9.6
|
pyinotify==0.9.6
|
||||||
pylint==1.4.5
|
pylint==1.9.2
|
||||||
PyMySQL==0.7.6
|
PyMySQL==0.7.6
|
||||||
pyparsing==2.1.0
|
pyparsing==2.1.0
|
||||||
pyperclip==1.5.27
|
pyperclip==1.5.27
|
||||||
|
@ -25,7 +25,7 @@ from oslo_service import loopingcall
|
|||||||
from oslo_utils import encodeutils
|
from oslo_utils import encodeutils
|
||||||
import requests
|
import requests
|
||||||
import six
|
import six
|
||||||
import six.moves.urllib.parse as urlparse
|
from six.moves import urllib
|
||||||
import webob
|
import webob
|
||||||
|
|
||||||
from neutron._i18n import _
|
from neutron._i18n import _
|
||||||
@ -179,7 +179,7 @@ class MetadataProxyHandler(object):
|
|||||||
|
|
||||||
nova_host_port = '%s:%s' % (self.conf.nova_metadata_host,
|
nova_host_port = '%s:%s' % (self.conf.nova_metadata_host,
|
||||||
self.conf.nova_metadata_port)
|
self.conf.nova_metadata_port)
|
||||||
url = urlparse.urlunsplit((
|
url = urllib.parse.urlunsplit((
|
||||||
self.conf.nova_metadata_protocol,
|
self.conf.nova_metadata_protocol,
|
||||||
nova_host_port,
|
nova_host_port,
|
||||||
req.path_info,
|
req.path_info,
|
||||||
|
@ -22,7 +22,7 @@ from oslo_config import cfg
|
|||||||
import oslo_i18n
|
import oslo_i18n
|
||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
from oslo_serialization import jsonutils
|
from oslo_serialization import jsonutils
|
||||||
from six.moves.urllib import parse
|
from six.moves import urllib
|
||||||
from webob import exc
|
from webob import exc
|
||||||
|
|
||||||
from neutron._i18n import _
|
from neutron._i18n import _
|
||||||
@ -127,7 +127,7 @@ def get_previous_link(request, items, id_key):
|
|||||||
params['marker'] = marker
|
params['marker'] = marker
|
||||||
params['page_reverse'] = True
|
params['page_reverse'] = True
|
||||||
return "%s?%s" % (prepare_url(get_path_url(request)),
|
return "%s?%s" % (prepare_url(get_path_url(request)),
|
||||||
parse.urlencode(params))
|
urllib.parse.urlencode(params))
|
||||||
|
|
||||||
|
|
||||||
def get_next_link(request, items, id_key):
|
def get_next_link(request, items, id_key):
|
||||||
@ -138,7 +138,7 @@ def get_next_link(request, items, id_key):
|
|||||||
params['marker'] = marker
|
params['marker'] = marker
|
||||||
params.pop('page_reverse', None)
|
params.pop('page_reverse', None)
|
||||||
return "%s?%s" % (prepare_url(get_path_url(request)),
|
return "%s?%s" % (prepare_url(get_path_url(request)),
|
||||||
parse.urlencode(params))
|
urllib.parse.urlencode(params))
|
||||||
|
|
||||||
|
|
||||||
def prepare_url(orig_url):
|
def prepare_url(orig_url):
|
||||||
@ -147,24 +147,24 @@ def prepare_url(orig_url):
|
|||||||
# Copied directly from nova/api/openstack/common.py
|
# Copied directly from nova/api/openstack/common.py
|
||||||
if not prefix:
|
if not prefix:
|
||||||
return orig_url
|
return orig_url
|
||||||
url_parts = list(parse.urlsplit(orig_url))
|
url_parts = list(urllib.parse.urlsplit(orig_url))
|
||||||
prefix_parts = list(parse.urlsplit(prefix))
|
prefix_parts = list(urllib.parse.urlsplit(prefix))
|
||||||
url_parts[0:2] = prefix_parts[0:2]
|
url_parts[0:2] = prefix_parts[0:2]
|
||||||
url_parts[2] = prefix_parts[2] + url_parts[2]
|
url_parts[2] = prefix_parts[2] + url_parts[2]
|
||||||
return parse.urlunsplit(url_parts).rstrip('/')
|
return urllib.parse.urlunsplit(url_parts).rstrip('/')
|
||||||
|
|
||||||
|
|
||||||
def get_path_url(request):
|
def get_path_url(request):
|
||||||
"""Return correct link if X-Forwarded-Proto exists in headers."""
|
"""Return correct link if X-Forwarded-Proto exists in headers."""
|
||||||
protocol = request.headers.get('X-Forwarded-Proto')
|
protocol = request.headers.get('X-Forwarded-Proto')
|
||||||
parsed = parse.urlparse(request.path_url)
|
parsed = urllib.parse.urlparse(request.path_url)
|
||||||
|
|
||||||
if protocol and parsed.scheme != protocol:
|
if protocol and parsed.scheme != protocol:
|
||||||
new_parsed = parse.ParseResult(
|
new_parsed = urllib.parse.ParseResult(
|
||||||
protocol, parsed.netloc,
|
protocol, parsed.netloc,
|
||||||
parsed.path, parsed.params,
|
parsed.path, parsed.params,
|
||||||
parsed.query, parsed.fragment)
|
parsed.query, parsed.fragment)
|
||||||
return parse.urlunparse(new_parsed)
|
return urllib.parse.urlunparse(new_parsed)
|
||||||
else:
|
else:
|
||||||
return request.path_url
|
return request.path_url
|
||||||
|
|
||||||
|
@ -22,7 +22,7 @@ from oslo_config import cfg
|
|||||||
from oslo_log import log
|
from oslo_log import log
|
||||||
import pecan
|
import pecan
|
||||||
from pecan import request
|
from pecan import request
|
||||||
import six.moves.urllib.parse as urlparse
|
from six.moves import urllib
|
||||||
|
|
||||||
from neutron.api.views import versions as versions_view
|
from neutron.api.views import versions as versions_view
|
||||||
from neutron import manager
|
from neutron import manager
|
||||||
@ -99,7 +99,7 @@ class V2Controller(object):
|
|||||||
|
|
||||||
layout = []
|
layout = []
|
||||||
for name, collection in _CORE_RESOURCES.items():
|
for name, collection in _CORE_RESOURCES.items():
|
||||||
href = urlparse.urljoin(pecan.request.path_url, collection)
|
href = urllib.parse.urljoin(pecan.request.path_url, collection)
|
||||||
resource = {'name': name,
|
resource = {'name': name,
|
||||||
'collection': collection,
|
'collection': collection,
|
||||||
'links': [{'rel': 'self',
|
'links': [{'rel': 'self',
|
||||||
|
@ -30,7 +30,7 @@ from oslo_db import exception as db_exc
|
|||||||
from oslo_policy import policy as oslo_policy
|
from oslo_policy import policy as oslo_policy
|
||||||
from oslo_utils import uuidutils
|
from oslo_utils import uuidutils
|
||||||
import six
|
import six
|
||||||
import six.moves.urllib.parse as urlparse
|
from six.moves import urllib
|
||||||
import webob
|
import webob
|
||||||
from webob import exc
|
from webob import exc
|
||||||
import webtest
|
import webtest
|
||||||
@ -592,16 +592,16 @@ class JSONV2TestCase(APIv2TestBase, testlib_api.WebTestCase):
|
|||||||
self.assertEqual(1, len(next_links))
|
self.assertEqual(1, len(next_links))
|
||||||
self.assertEqual(1, len(previous_links))
|
self.assertEqual(1, len(previous_links))
|
||||||
|
|
||||||
url = urlparse.urlparse(next_links[0]['href'])
|
url = urllib.parse.urlparse(next_links[0]['href'])
|
||||||
self.assertEqual(url.path, _get_path('networks'))
|
self.assertEqual(url.path, _get_path('networks'))
|
||||||
params['marker'] = [id2]
|
params['marker'] = [id2]
|
||||||
self.assertEqual(params, urlparse.parse_qs(url.query))
|
self.assertEqual(params, urllib.parse.parse_qs(url.query))
|
||||||
|
|
||||||
url = urlparse.urlparse(previous_links[0]['href'])
|
url = urllib.parse.urlparse(previous_links[0]['href'])
|
||||||
self.assertEqual(url.path, _get_path('networks'))
|
self.assertEqual(url.path, _get_path('networks'))
|
||||||
params['marker'] = [id1]
|
params['marker'] = [id1]
|
||||||
params['page_reverse'] = ['True']
|
params['page_reverse'] = ['True']
|
||||||
self.assertEqual(params, urlparse.parse_qs(url.query))
|
self.assertEqual(params, urllib.parse.parse_qs(url.query))
|
||||||
|
|
||||||
def test_list_pagination_with_last_page(self):
|
def test_list_pagination_with_last_page(self):
|
||||||
id = str(_uuid())
|
id = str(_uuid())
|
||||||
@ -631,12 +631,12 @@ class JSONV2TestCase(APIv2TestBase, testlib_api.WebTestCase):
|
|||||||
previous_links.append(r)
|
previous_links.append(r)
|
||||||
self.assertEqual(1, len(previous_links))
|
self.assertEqual(1, len(previous_links))
|
||||||
|
|
||||||
url = urlparse.urlparse(previous_links[0]['href'])
|
url = urllib.parse.urlparse(previous_links[0]['href'])
|
||||||
self.assertEqual(url.path, _get_path('networks'))
|
self.assertEqual(url.path, _get_path('networks'))
|
||||||
expect_params = params.copy()
|
expect_params = params.copy()
|
||||||
expect_params['marker'] = [id]
|
expect_params['marker'] = [id]
|
||||||
expect_params['page_reverse'] = ['True']
|
expect_params['page_reverse'] = ['True']
|
||||||
self.assertEqual(expect_params, urlparse.parse_qs(url.query))
|
self.assertEqual(expect_params, urllib.parse.parse_qs(url.query))
|
||||||
|
|
||||||
def test_list_pagination_with_empty_page(self):
|
def test_list_pagination_with_empty_page(self):
|
||||||
return_value = []
|
return_value = []
|
||||||
@ -657,12 +657,12 @@ class JSONV2TestCase(APIv2TestBase, testlib_api.WebTestCase):
|
|||||||
previous_links.append(r)
|
previous_links.append(r)
|
||||||
self.assertEqual(1, len(previous_links))
|
self.assertEqual(1, len(previous_links))
|
||||||
|
|
||||||
url = urlparse.urlparse(previous_links[0]['href'])
|
url = urllib.parse.urlparse(previous_links[0]['href'])
|
||||||
self.assertEqual(url.path, _get_path('networks'))
|
self.assertEqual(url.path, _get_path('networks'))
|
||||||
expect_params = params.copy()
|
expect_params = params.copy()
|
||||||
del expect_params['marker']
|
del expect_params['marker']
|
||||||
expect_params['page_reverse'] = ['True']
|
expect_params['page_reverse'] = ['True']
|
||||||
self.assertEqual(expect_params, urlparse.parse_qs(url.query))
|
self.assertEqual(expect_params, urllib.parse.parse_qs(url.query))
|
||||||
|
|
||||||
def test_list_pagination_reverse_with_last_page(self):
|
def test_list_pagination_reverse_with_last_page(self):
|
||||||
id = str(_uuid())
|
id = str(_uuid())
|
||||||
@ -693,13 +693,13 @@ class JSONV2TestCase(APIv2TestBase, testlib_api.WebTestCase):
|
|||||||
next_links.append(r)
|
next_links.append(r)
|
||||||
self.assertEqual(1, len(next_links))
|
self.assertEqual(1, len(next_links))
|
||||||
|
|
||||||
url = urlparse.urlparse(next_links[0]['href'])
|
url = urllib.parse.urlparse(next_links[0]['href'])
|
||||||
self.assertEqual(url.path, _get_path('networks'))
|
self.assertEqual(url.path, _get_path('networks'))
|
||||||
expected_params = params.copy()
|
expected_params = params.copy()
|
||||||
del expected_params['page_reverse']
|
del expected_params['page_reverse']
|
||||||
expected_params['marker'] = [id]
|
expected_params['marker'] = [id]
|
||||||
self.assertEqual(expected_params,
|
self.assertEqual(expected_params,
|
||||||
urlparse.parse_qs(url.query))
|
urllib.parse.parse_qs(url.query))
|
||||||
|
|
||||||
def test_list_pagination_reverse_with_empty_page(self):
|
def test_list_pagination_reverse_with_empty_page(self):
|
||||||
return_value = []
|
return_value = []
|
||||||
@ -720,12 +720,12 @@ class JSONV2TestCase(APIv2TestBase, testlib_api.WebTestCase):
|
|||||||
next_links.append(r)
|
next_links.append(r)
|
||||||
self.assertEqual(1, len(next_links))
|
self.assertEqual(1, len(next_links))
|
||||||
|
|
||||||
url = urlparse.urlparse(next_links[0]['href'])
|
url = urllib.parse.urlparse(next_links[0]['href'])
|
||||||
self.assertEqual(url.path, _get_path('networks'))
|
self.assertEqual(url.path, _get_path('networks'))
|
||||||
expect_params = params.copy()
|
expect_params = params.copy()
|
||||||
del expect_params['marker']
|
del expect_params['marker']
|
||||||
del expect_params['page_reverse']
|
del expect_params['page_reverse']
|
||||||
self.assertEqual(expect_params, urlparse.parse_qs(url.query))
|
self.assertEqual(expect_params, urllib.parse.parse_qs(url.query))
|
||||||
|
|
||||||
def test_create(self):
|
def test_create(self):
|
||||||
net_id = _uuid()
|
net_id = _uuid()
|
||||||
|
@ -20,7 +20,7 @@ import ssl
|
|||||||
import mock
|
import mock
|
||||||
from neutron_lib import exceptions as exception
|
from neutron_lib import exceptions as exception
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
import six.moves.urllib.request as urlrequest
|
from six.moves import urllib
|
||||||
import testtools
|
import testtools
|
||||||
import webob
|
import webob
|
||||||
import webob.exc
|
import webob.exc
|
||||||
@ -43,12 +43,13 @@ def open_no_proxy(*args, **kwargs):
|
|||||||
# introduced in python 2.7.9 under PEP-0476
|
# introduced in python 2.7.9 under PEP-0476
|
||||||
# https://github.com/python/peps/blob/master/pep-0476.txt
|
# https://github.com/python/peps/blob/master/pep-0476.txt
|
||||||
if hasattr(ssl, "_create_unverified_context"):
|
if hasattr(ssl, "_create_unverified_context"):
|
||||||
opener = urlrequest.build_opener(
|
opener = urllib.request.build_opener(
|
||||||
urlrequest.ProxyHandler({}),
|
urllib.request.ProxyHandler({}),
|
||||||
urlrequest.HTTPSHandler(context=ssl._create_unverified_context())
|
urllib.request.HTTPSHandler(
|
||||||
|
context=ssl._create_unverified_context())
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
opener = urlrequest.build_opener(urlrequest.ProxyHandler({}))
|
opener = urllib.request.build_opener(urllib.request.ProxyHandler({}))
|
||||||
return opener.open(*args, **kwargs)
|
return opener.open(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
@ -18,7 +18,8 @@ oslotest>=3.2.0 # Apache-2.0
|
|||||||
stestr>=1.0.0 # Apache-2.0
|
stestr>=1.0.0 # Apache-2.0
|
||||||
reno>=2.5.0 # Apache-2.0
|
reno>=2.5.0 # Apache-2.0
|
||||||
ddt>=1.0.1 # MIT
|
ddt>=1.0.1 # MIT
|
||||||
pylint==1.4.5 # GPLv2
|
astroid==1.6.5 # LGPLv2.1
|
||||||
|
pylint==1.9.2 # GPLv2
|
||||||
# Needed to run DB commands in virtualenvs
|
# Needed to run DB commands in virtualenvs
|
||||||
PyMySQL>=0.7.6 # MIT License
|
PyMySQL>=0.7.6 # MIT License
|
||||||
bashate>=0.5.1 # Apache-2.0
|
bashate>=0.5.1 # Apache-2.0
|
||||||
|
Loading…
Reference in New Issue
Block a user