Python3: use six.iteritems() instead of dict.iteritems()
This also adds a check to neutron/hacking/checks.py that should catch this error in the future. Blueprint: neutron-python3 Change-Id: Ie7b833ffa173772d39b85ee3ecaddace18e1274f
This commit is contained in:
parent
5eddb2d274
commit
3751f9ab34
@ -16,6 +16,7 @@ Neutron Specific Commandments
|
|||||||
- [N324] Prevent use of deprecated contextlib.nested.
|
- [N324] Prevent use of deprecated contextlib.nested.
|
||||||
- [N325] Python 3: Do not use xrange.
|
- [N325] Python 3: Do not use xrange.
|
||||||
- [N326] Python 3: do not use basestring.
|
- [N326] Python 3: do not use basestring.
|
||||||
|
- [N327] Python 3: do not use dict.iteritems.
|
||||||
|
|
||||||
Creating Unit Tests
|
Creating Unit Tests
|
||||||
-------------------
|
-------------------
|
||||||
|
@ -555,7 +555,7 @@ def _build_flow_expr_str(flow_dict, cmd):
|
|||||||
raise exceptions.InvalidInput(error_message=msg)
|
raise exceptions.InvalidInput(error_message=msg)
|
||||||
actions = "actions=%s" % flow_dict.pop('actions')
|
actions = "actions=%s" % flow_dict.pop('actions')
|
||||||
|
|
||||||
for key, value in flow_dict.iteritems():
|
for key, value in six.iteritems(flow_dict):
|
||||||
if key == 'proto':
|
if key == 'proto':
|
||||||
flow_expr_arr.append(value)
|
flow_expr_arr.append(value)
|
||||||
else:
|
else:
|
||||||
|
@ -15,6 +15,7 @@
|
|||||||
import netaddr
|
import netaddr
|
||||||
|
|
||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
|
import six
|
||||||
|
|
||||||
from neutron.agent.l3 import namespaces
|
from neutron.agent.l3 import namespaces
|
||||||
from neutron.agent.linux import ip_lib
|
from neutron.agent.linux import ip_lib
|
||||||
@ -499,7 +500,7 @@ class RouterInfo(object):
|
|||||||
if ex_gw_port:
|
if ex_gw_port:
|
||||||
def _gateway_ports_equal(port1, port2):
|
def _gateway_ports_equal(port1, port2):
|
||||||
def _get_filtered_dict(d, ignore):
|
def _get_filtered_dict(d, ignore):
|
||||||
return dict((k, v) for k, v in d.iteritems()
|
return dict((k, v) for k, v in six.iteritems(d)
|
||||||
if k not in ignore)
|
if k not in ignore)
|
||||||
|
|
||||||
keys_to_ignore = set(['binding:host_id'])
|
keys_to_ignore = set(['binding:host_id'])
|
||||||
|
@ -74,7 +74,7 @@ class DictModel(dict):
|
|||||||
else:
|
else:
|
||||||
return item
|
return item
|
||||||
|
|
||||||
for key, value in self.iteritems():
|
for key, value in six.iteritems(self):
|
||||||
if isinstance(value, (list, tuple)):
|
if isinstance(value, (list, tuple)):
|
||||||
# Keep the same type but convert dicts to DictModels
|
# Keep the same type but convert dicts to DictModels
|
||||||
self[key] = type(value)(
|
self[key] = type(value)(
|
||||||
|
@ -17,6 +17,7 @@ import collections
|
|||||||
import netaddr
|
import netaddr
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
|
import six
|
||||||
|
|
||||||
from neutron.agent import firewall
|
from neutron.agent import firewall
|
||||||
from neutron.agent.linux import ipset_manager
|
from neutron.agent.linux import ipset_manager
|
||||||
@ -591,7 +592,7 @@ class IptablesFirewallDriver(firewall.FirewallDriver):
|
|||||||
remote_sgs_to_remove = self._determine_remote_sgs_to_remove(
|
remote_sgs_to_remove = self._determine_remote_sgs_to_remove(
|
||||||
filtered_ports)
|
filtered_ports)
|
||||||
|
|
||||||
for ip_version, remote_sg_ids in remote_sgs_to_remove.iteritems():
|
for ip_version, remote_sg_ids in six.iteritems(remote_sgs_to_remove):
|
||||||
self._clear_sg_members(ip_version, remote_sg_ids)
|
self._clear_sg_members(ip_version, remote_sg_ids)
|
||||||
if self.enable_ipset:
|
if self.enable_ipset:
|
||||||
self._remove_ipsets_for_remote_sgs(ip_version, remote_sg_ids)
|
self._remove_ipsets_for_remote_sgs(ip_version, remote_sg_ids)
|
||||||
@ -613,7 +614,7 @@ class IptablesFirewallDriver(firewall.FirewallDriver):
|
|||||||
remote_group_id_sets = self._get_remote_sg_ids_sets_by_ipversion(
|
remote_group_id_sets = self._get_remote_sg_ids_sets_by_ipversion(
|
||||||
filtered_ports)
|
filtered_ports)
|
||||||
for ip_version, remote_group_id_set in (
|
for ip_version, remote_group_id_set in (
|
||||||
remote_group_id_sets.iteritems()):
|
six.iteritems(remote_group_id_sets)):
|
||||||
sgs_to_remove_per_ipversion[ip_version].update(
|
sgs_to_remove_per_ipversion[ip_version].update(
|
||||||
set(self.pre_sg_members) - remote_group_id_set)
|
set(self.pre_sg_members) - remote_group_id_set)
|
||||||
return sgs_to_remove_per_ipversion
|
return sgs_to_remove_per_ipversion
|
||||||
@ -623,8 +624,8 @@ class IptablesFirewallDriver(firewall.FirewallDriver):
|
|||||||
remote_group_id_sets = {constants.IPv4: set(),
|
remote_group_id_sets = {constants.IPv4: set(),
|
||||||
constants.IPv6: set()}
|
constants.IPv6: set()}
|
||||||
for port in filtered_ports:
|
for port in filtered_ports:
|
||||||
for ip_version, sg_ids in self._get_remote_sg_ids(
|
remote_sg_ids = self._get_remote_sg_ids(port)
|
||||||
port).iteritems():
|
for ip_version, sg_ids in six.iteritems(remote_sg_ids):
|
||||||
remote_group_id_sets[ip_version].update(sg_ids)
|
remote_group_id_sets[ip_version].update(sg_ids)
|
||||||
return remote_group_id_sets
|
return remote_group_id_sets
|
||||||
|
|
||||||
|
@ -28,6 +28,7 @@ from oslo_concurrency import lockutils
|
|||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
from oslo_utils import excutils
|
from oslo_utils import excutils
|
||||||
|
import six
|
||||||
|
|
||||||
from neutron.agent.common import config
|
from neutron.agent.common import config
|
||||||
from neutron.agent.linux import iptables_comments as ic
|
from neutron.agent.linux import iptables_comments as ic
|
||||||
@ -347,7 +348,7 @@ class IptablesManager(object):
|
|||||||
elif ip_version == 6:
|
elif ip_version == 6:
|
||||||
tables = self.ipv6
|
tables = self.ipv6
|
||||||
|
|
||||||
for table, chains in builtin_chains[ip_version].iteritems():
|
for table, chains in six.iteritems(builtin_chains[ip_version]):
|
||||||
for chain in chains:
|
for chain in chains:
|
||||||
tables[table].add_chain(chain)
|
tables[table].add_chain(chain)
|
||||||
tables[table].add_rule(chain, '-j $%s' %
|
tables[table].add_rule(chain, '-j $%s' %
|
||||||
|
@ -17,6 +17,7 @@ import urllib
|
|||||||
|
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
|
import six
|
||||||
from webob import exc
|
from webob import exc
|
||||||
|
|
||||||
from neutron.common import constants
|
from neutron.common import constants
|
||||||
@ -36,7 +37,7 @@ def get_filters(request, attr_info, skips=[]):
|
|||||||
{'check': [u'a', u'b'], 'name': [u'Bob']}
|
{'check': [u'a', u'b'], 'name': [u'Bob']}
|
||||||
"""
|
"""
|
||||||
res = {}
|
res = {}
|
||||||
for key, values in request.GET.dict_of_lists().iteritems():
|
for key, values in six.iteritems(request.GET.dict_of_lists()):
|
||||||
if key in skips:
|
if key in skips:
|
||||||
continue
|
continue
|
||||||
values = [v for v in values if v]
|
values = [v for v in values if v]
|
||||||
|
@ -170,7 +170,7 @@ class ExtensionDescriptor(object):
|
|||||||
if not extension_attrs_map:
|
if not extension_attrs_map:
|
||||||
return
|
return
|
||||||
|
|
||||||
for resource, attrs in extension_attrs_map.iteritems():
|
for resource, attrs in six.iteritems(extension_attrs_map):
|
||||||
extended_attrs = extended_attributes.get(resource)
|
extended_attrs = extended_attributes.get(resource)
|
||||||
if extended_attrs:
|
if extended_attrs:
|
||||||
attrs.update(extended_attrs)
|
attrs.update(extended_attrs)
|
||||||
@ -200,7 +200,7 @@ class ActionExtensionController(wsgi.Controller):
|
|||||||
def action(self, request, id):
|
def action(self, request, id):
|
||||||
input_dict = self._deserialize(request.body,
|
input_dict = self._deserialize(request.body,
|
||||||
request.get_content_type())
|
request.get_content_type())
|
||||||
for action_name, handler in self.action_handlers.iteritems():
|
for action_name, handler in six.iteritems(self.action_handlers):
|
||||||
if action_name in input_dict:
|
if action_name in input_dict:
|
||||||
return handler(input_dict, request, id)
|
return handler(input_dict, request, id)
|
||||||
# no action handler found (bump to downstream application)
|
# no action handler found (bump to downstream application)
|
||||||
@ -242,7 +242,7 @@ class ExtensionController(wsgi.Controller):
|
|||||||
|
|
||||||
def index(self, request):
|
def index(self, request):
|
||||||
extensions = []
|
extensions = []
|
||||||
for _alias, ext in self.extension_manager.extensions.iteritems():
|
for _alias, ext in six.iteritems(self.extension_manager.extensions):
|
||||||
extensions.append(self._translate(ext))
|
extensions.append(self._translate(ext))
|
||||||
return dict(extensions=extensions)
|
return dict(extensions=extensions)
|
||||||
|
|
||||||
@ -283,7 +283,7 @@ class ExtensionMiddleware(wsgi.Middleware):
|
|||||||
|
|
||||||
LOG.debug('Extended resource: %s',
|
LOG.debug('Extended resource: %s',
|
||||||
resource.collection)
|
resource.collection)
|
||||||
for action, method in resource.collection_actions.iteritems():
|
for action, method in six.iteritems(resource.collection_actions):
|
||||||
conditions = dict(method=[method])
|
conditions = dict(method=[method])
|
||||||
path = "/%s/%s" % (resource.collection, action)
|
path = "/%s/%s" % (resource.collection, action)
|
||||||
with mapper.submapper(controller=resource.controller,
|
with mapper.submapper(controller=resource.controller,
|
||||||
@ -474,11 +474,11 @@ class ExtensionManager(object):
|
|||||||
continue
|
continue
|
||||||
try:
|
try:
|
||||||
extended_attrs = ext.get_extended_resources(version)
|
extended_attrs = ext.get_extended_resources(version)
|
||||||
for resource, resource_attrs in extended_attrs.iteritems():
|
for res, resource_attrs in six.iteritems(extended_attrs):
|
||||||
if attr_map.get(resource, None):
|
if attr_map.get(res, None):
|
||||||
attr_map[resource].update(resource_attrs)
|
attr_map[res].update(resource_attrs)
|
||||||
else:
|
else:
|
||||||
attr_map[resource] = resource_attrs
|
attr_map[res] = resource_attrs
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
LOG.exception(_LE("Error fetching extended attributes for "
|
LOG.exception(_LE("Error fetching extended attributes for "
|
||||||
"extension '%s'"), ext.get_name())
|
"extension '%s'"), ext.get_name())
|
||||||
|
@ -14,6 +14,7 @@
|
|||||||
|
|
||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
import oslo_messaging
|
import oslo_messaging
|
||||||
|
import six
|
||||||
|
|
||||||
from neutron.common import constants
|
from neutron.common import constants
|
||||||
from neutron.common import rpc as n_rpc
|
from neutron.common import rpc as n_rpc
|
||||||
@ -58,7 +59,7 @@ class MeteringAgentNotifyAPI(object):
|
|||||||
l3_router.append(router)
|
l3_router.append(router)
|
||||||
l3_routers[l3_agent.host] = l3_router
|
l3_routers[l3_agent.host] = l3_router
|
||||||
|
|
||||||
for host, routers in l3_routers.iteritems():
|
for host, routers in six.iteritems(l3_routers):
|
||||||
cctxt = self.client.prepare(server=host)
|
cctxt = self.client.prepare(server=host)
|
||||||
cctxt.cast(context, method, routers=routers)
|
cctxt.cast(context, method, routers=routers)
|
||||||
|
|
||||||
|
@ -17,6 +17,7 @@ from oslo_config import cfg
|
|||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
import oslo_messaging
|
import oslo_messaging
|
||||||
from oslo_serialization import jsonutils
|
from oslo_serialization import jsonutils
|
||||||
|
import six
|
||||||
|
|
||||||
from neutron.common import constants
|
from neutron.common import constants
|
||||||
from neutron.common import exceptions
|
from neutron.common import exceptions
|
||||||
@ -162,7 +163,7 @@ class L3RpcCallback(object):
|
|||||||
def update_floatingip_statuses(self, context, router_id, fip_statuses):
|
def update_floatingip_statuses(self, context, router_id, fip_statuses):
|
||||||
"""Update operational status for a floating IP."""
|
"""Update operational status for a floating IP."""
|
||||||
with context.session.begin(subtransactions=True):
|
with context.session.begin(subtransactions=True):
|
||||||
for (floatingip_id, status) in fip_statuses.iteritems():
|
for (floatingip_id, status) in six.iteritems(fip_statuses):
|
||||||
LOG.debug("New status for floating IP %(floatingip_id)s: "
|
LOG.debug("New status for floating IP %(floatingip_id)s: "
|
||||||
"%(status)s", {'floatingip_id': floatingip_id,
|
"%(status)s", {'floatingip_id': floatingip_id,
|
||||||
'status': status})
|
'status': status})
|
||||||
|
@ -409,7 +409,7 @@ def _validate_dict_item(key, key_validator, data):
|
|||||||
# TODO(salv-orlando): Structure of dict attributes should be improved
|
# TODO(salv-orlando): Structure of dict attributes should be improved
|
||||||
# to avoid iterating over items
|
# to avoid iterating over items
|
||||||
val_func = val_params = None
|
val_func = val_params = None
|
||||||
for (k, v) in key_validator.iteritems():
|
for (k, v) in six.iteritems(key_validator):
|
||||||
if k.startswith('type:'):
|
if k.startswith('type:'):
|
||||||
# ask forgiveness, not permission
|
# ask forgiveness, not permission
|
||||||
try:
|
try:
|
||||||
@ -435,7 +435,7 @@ def _validate_dict(data, key_specs=None):
|
|||||||
return
|
return
|
||||||
|
|
||||||
# Check whether all required keys are present
|
# Check whether all required keys are present
|
||||||
required_keys = [key for key, spec in key_specs.iteritems()
|
required_keys = [key for key, spec in six.iteritems(key_specs)
|
||||||
if spec.get('required')]
|
if spec.get('required')]
|
||||||
|
|
||||||
if required_keys:
|
if required_keys:
|
||||||
@ -445,7 +445,7 @@ def _validate_dict(data, key_specs=None):
|
|||||||
|
|
||||||
# Perform validation and conversion of all values
|
# Perform validation and conversion of all values
|
||||||
# according to the specifications.
|
# according to the specifications.
|
||||||
for key, key_validator in [(k, v) for k, v in key_specs.iteritems()
|
for key, key_validator in [(k, v) for k, v in six.iteritems(key_specs)
|
||||||
if k in data]:
|
if k in data]:
|
||||||
msg = _validate_dict_item(key, key_validator, data)
|
msg = _validate_dict_item(key, key_validator, data)
|
||||||
if msg:
|
if msg:
|
||||||
@ -546,7 +546,7 @@ def convert_kvp_list_to_dict(kvp_list):
|
|||||||
key, value = convert_kvp_str_to_list(kvp_str)
|
key, value = convert_kvp_str_to_list(kvp_str)
|
||||||
kvp_map.setdefault(key, set())
|
kvp_map.setdefault(key, set())
|
||||||
kvp_map[key].add(value)
|
kvp_map[key].add(value)
|
||||||
return dict((x, list(y)) for x, y in kvp_map.iteritems())
|
return dict((x, list(y)) for x, y in six.iteritems(kvp_map))
|
||||||
|
|
||||||
|
|
||||||
def convert_none_to_empty_list(value):
|
def convert_none_to_empty_list(value):
|
||||||
|
@ -19,6 +19,7 @@ import netaddr
|
|||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
from oslo_utils import excutils
|
from oslo_utils import excutils
|
||||||
|
import six
|
||||||
import webob.exc
|
import webob.exc
|
||||||
|
|
||||||
from neutron.api import api_common
|
from neutron.api import api_common
|
||||||
@ -109,7 +110,7 @@ class Controller(object):
|
|||||||
self._resource)
|
self._resource)
|
||||||
|
|
||||||
def _get_primary_key(self, default_primary_key='id'):
|
def _get_primary_key(self, default_primary_key='id'):
|
||||||
for key, value in self._attr_info.iteritems():
|
for key, value in six.iteritems(self._attr_info):
|
||||||
if value.get('primary_key', False):
|
if value.get('primary_key', False):
|
||||||
return key
|
return key
|
||||||
return default_primary_key
|
return default_primary_key
|
||||||
@ -170,7 +171,7 @@ class Controller(object):
|
|||||||
def _filter_attributes(self, context, data, fields_to_strip=None):
|
def _filter_attributes(self, context, data, fields_to_strip=None):
|
||||||
if not fields_to_strip:
|
if not fields_to_strip:
|
||||||
return data
|
return data
|
||||||
return dict(item for item in data.iteritems()
|
return dict(item for item in six.iteritems(data)
|
||||||
if (item[0] not in fields_to_strip))
|
if (item[0] not in fields_to_strip))
|
||||||
|
|
||||||
def _do_field_list(self, original_fields):
|
def _do_field_list(self, original_fields):
|
||||||
@ -517,7 +518,7 @@ class Controller(object):
|
|||||||
# Load object to check authz
|
# Load object to check authz
|
||||||
# but pass only attributes in the original body and required
|
# but pass only attributes in the original body and required
|
||||||
# by the policy engine to the policy 'brain'
|
# by the policy engine to the policy 'brain'
|
||||||
field_list = [name for (name, value) in self._attr_info.iteritems()
|
field_list = [name for (name, value) in six.iteritems(self._attr_info)
|
||||||
if (value.get('required_by_policy') or
|
if (value.get('required_by_policy') or
|
||||||
value.get('primary_key') or
|
value.get('primary_key') or
|
||||||
'default' not in value)]
|
'default' not in value)]
|
||||||
@ -621,7 +622,7 @@ class Controller(object):
|
|||||||
Controller._verify_attributes(res_dict, attr_info)
|
Controller._verify_attributes(res_dict, attr_info)
|
||||||
|
|
||||||
if is_create: # POST
|
if is_create: # POST
|
||||||
for attr, attr_vals in attr_info.iteritems():
|
for attr, attr_vals in six.iteritems(attr_info):
|
||||||
if attr_vals['allow_post']:
|
if attr_vals['allow_post']:
|
||||||
if ('default' not in attr_vals and
|
if ('default' not in attr_vals and
|
||||||
attr not in res_dict):
|
attr not in res_dict):
|
||||||
@ -635,12 +636,12 @@ class Controller(object):
|
|||||||
msg = _("Attribute '%s' not allowed in POST") % attr
|
msg = _("Attribute '%s' not allowed in POST") % attr
|
||||||
raise webob.exc.HTTPBadRequest(msg)
|
raise webob.exc.HTTPBadRequest(msg)
|
||||||
else: # PUT
|
else: # PUT
|
||||||
for attr, attr_vals in attr_info.iteritems():
|
for attr, attr_vals in six.iteritems(attr_info):
|
||||||
if attr in res_dict and not attr_vals['allow_put']:
|
if attr in res_dict and not attr_vals['allow_put']:
|
||||||
msg = _("Cannot update read-only attribute %s") % attr
|
msg = _("Cannot update read-only attribute %s") % attr
|
||||||
raise webob.exc.HTTPBadRequest(msg)
|
raise webob.exc.HTTPBadRequest(msg)
|
||||||
|
|
||||||
for attr, attr_vals in attr_info.iteritems():
|
for attr, attr_vals in six.iteritems(attr_info):
|
||||||
if (attr not in res_dict or
|
if (attr not in res_dict or
|
||||||
res_dict[attr] is attributes.ATTR_NOT_SPECIFIED):
|
res_dict[attr] is attributes.ATTR_NOT_SPECIFIED):
|
||||||
continue
|
continue
|
||||||
|
@ -16,6 +16,7 @@
|
|||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
import routes as routes_mapper
|
import routes as routes_mapper
|
||||||
|
import six
|
||||||
import six.moves.urllib.parse as urlparse
|
import six.moves.urllib.parse as urlparse
|
||||||
import webob
|
import webob
|
||||||
import webob.dec
|
import webob.dec
|
||||||
@ -51,7 +52,7 @@ class Index(wsgi.Application):
|
|||||||
metadata = {}
|
metadata = {}
|
||||||
|
|
||||||
layout = []
|
layout = []
|
||||||
for name, collection in self.resources.iteritems():
|
for name, collection in six.iteritems(self.resources):
|
||||||
href = urlparse.urljoin(req.path_url, collection)
|
href = urlparse.urljoin(req.path_url, collection)
|
||||||
resource = {'name': name,
|
resource = {'name': name,
|
||||||
'collection': collection,
|
'collection': collection,
|
||||||
|
@ -35,6 +35,7 @@ from oslo_concurrency import lockutils
|
|||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
from oslo_utils import excutils
|
from oslo_utils import excutils
|
||||||
|
import six
|
||||||
|
|
||||||
from neutron.common import constants as q_const
|
from neutron.common import constants as q_const
|
||||||
|
|
||||||
@ -235,7 +236,7 @@ def compare_elements(a, b):
|
|||||||
|
|
||||||
def dict2str(dic):
|
def dict2str(dic):
|
||||||
return ','.join("%s=%s" % (key, val)
|
return ','.join("%s=%s" % (key, val)
|
||||||
for key, val in sorted(dic.iteritems()))
|
for key, val in sorted(six.iteritems(dic)))
|
||||||
|
|
||||||
|
|
||||||
def str2dict(string):
|
def str2dict(string):
|
||||||
|
@ -89,8 +89,8 @@ class CommonDbMixin(object):
|
|||||||
else:
|
else:
|
||||||
query_filter = (model.tenant_id == context.tenant_id)
|
query_filter = (model.tenant_id == context.tenant_id)
|
||||||
# Execute query hooks registered from mixins and plugins
|
# Execute query hooks registered from mixins and plugins
|
||||||
for _name, hooks in self._model_query_hooks.get(model,
|
for _name, hooks in six.iteritems(self._model_query_hooks.get(model,
|
||||||
{}).iteritems():
|
{})):
|
||||||
query_hook = hooks.get('query')
|
query_hook = hooks.get('query')
|
||||||
if isinstance(query_hook, six.string_types):
|
if isinstance(query_hook, six.string_types):
|
||||||
query_hook = getattr(self, query_hook, None)
|
query_hook = getattr(self, query_hook, None)
|
||||||
@ -132,15 +132,15 @@ class CommonDbMixin(object):
|
|||||||
|
|
||||||
def _apply_filters_to_query(self, query, model, filters):
|
def _apply_filters_to_query(self, query, model, filters):
|
||||||
if filters:
|
if filters:
|
||||||
for key, value in filters.iteritems():
|
for key, value in six.iteritems(filters):
|
||||||
column = getattr(model, key, None)
|
column = getattr(model, key, None)
|
||||||
if column:
|
if column:
|
||||||
if not value:
|
if not value:
|
||||||
query = query.filter(sql.false())
|
query = query.filter(sql.false())
|
||||||
return query
|
return query
|
||||||
query = query.filter(column.in_(value))
|
query = query.filter(column.in_(value))
|
||||||
for _name, hooks in self._model_query_hooks.get(model,
|
for _nam, hooks in six.iteritems(self._model_query_hooks.get(model,
|
||||||
{}).iteritems():
|
{})):
|
||||||
result_filter = hooks.get('result_filters', None)
|
result_filter = hooks.get('result_filters', None)
|
||||||
if isinstance(result_filter, six.string_types):
|
if isinstance(result_filter, six.string_types):
|
||||||
result_filter = getattr(self, result_filter, None)
|
result_filter = getattr(self, result_filter, None)
|
||||||
@ -201,4 +201,4 @@ class CommonDbMixin(object):
|
|||||||
"""
|
"""
|
||||||
columns = [c.name for c in model.__table__.columns]
|
columns = [c.name for c in model.__table__.columns]
|
||||||
return dict((k, v) for (k, v) in
|
return dict((k, v) for (k, v) in
|
||||||
data.iteritems() if k in columns)
|
six.iteritems(data) if k in columns)
|
||||||
|
@ -17,6 +17,7 @@ from oslo_config import cfg
|
|||||||
from oslo_db import exception as db_exc
|
from oslo_db import exception as db_exc
|
||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
import oslo_messaging
|
import oslo_messaging
|
||||||
|
import six
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
from sqlalchemy import func
|
from sqlalchemy import func
|
||||||
from sqlalchemy import or_
|
from sqlalchemy import or_
|
||||||
@ -348,7 +349,7 @@ class L3AgentSchedulerDbMixin(l3agentscheduler.L3AgentSchedulerPluginBase,
|
|||||||
if active is not None:
|
if active is not None:
|
||||||
query = (query.filter(agents_db.Agent.admin_state_up == active))
|
query = (query.filter(agents_db.Agent.admin_state_up == active))
|
||||||
if filters:
|
if filters:
|
||||||
for key, value in filters.iteritems():
|
for key, value in six.iteritems(filters):
|
||||||
column = getattr(agents_db.Agent, key, None)
|
column = getattr(agents_db.Agent, key, None)
|
||||||
if column:
|
if column:
|
||||||
if not value:
|
if not value:
|
||||||
|
@ -19,6 +19,7 @@ from sqlalchemy import orm
|
|||||||
from sqlalchemy.orm import exc
|
from sqlalchemy.orm import exc
|
||||||
|
|
||||||
from oslo_utils import excutils
|
from oslo_utils import excutils
|
||||||
|
import six
|
||||||
|
|
||||||
from neutron.api.rpc.agentnotifiers import l3_rpc_agent_api
|
from neutron.api.rpc.agentnotifiers import l3_rpc_agent_api
|
||||||
from neutron.api.v2 import attributes
|
from neutron.api.v2 import attributes
|
||||||
@ -1013,7 +1014,7 @@ class L3_NAT_dbonly_mixin(l3.RouterPluginBase):
|
|||||||
marker_obj = self._get_marker_obj(context, 'floatingip', limit,
|
marker_obj = self._get_marker_obj(context, 'floatingip', limit,
|
||||||
marker)
|
marker)
|
||||||
if filters is not None:
|
if filters is not None:
|
||||||
for key, val in API_TO_DB_COLUMN_MAP.iteritems():
|
for key, val in six.iteritems(API_TO_DB_COLUMN_MAP):
|
||||||
if key in filters:
|
if key in filters:
|
||||||
filters[val] = filters.pop(key)
|
filters[val] = filters.pop(key)
|
||||||
|
|
||||||
|
@ -26,6 +26,7 @@ revision = '14be42f3d0a5'
|
|||||||
down_revision = '26b54cf9024d'
|
down_revision = '26b54cf9024d'
|
||||||
|
|
||||||
from alembic import op
|
from alembic import op
|
||||||
|
import six
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
|
|
||||||
from neutron.common import exceptions
|
from neutron.common import exceptions
|
||||||
@ -69,7 +70,7 @@ def check_sanity(connection):
|
|||||||
raise DuplicateSecurityGroupsNamedDefault(
|
raise DuplicateSecurityGroupsNamedDefault(
|
||||||
duplicates='; '.join('tenant %s: %s' %
|
duplicates='; '.join('tenant %s: %s' %
|
||||||
(tenant_id, ', '.join(groups))
|
(tenant_id, ', '.join(groups))
|
||||||
for tenant_id, groups in res.iteritems()))
|
for tenant_id, groups in six.iteritems(res)))
|
||||||
|
|
||||||
|
|
||||||
def get_duplicate_default_security_groups(connection):
|
def get_duplicate_default_security_groups(connection):
|
||||||
|
@ -15,6 +15,7 @@
|
|||||||
import re
|
import re
|
||||||
|
|
||||||
import pep8
|
import pep8
|
||||||
|
import six
|
||||||
|
|
||||||
# Guidelines for writing new hacking checks
|
# Guidelines for writing new hacking checks
|
||||||
#
|
#
|
||||||
@ -48,7 +49,7 @@ def _regex_for_level(level, hint):
|
|||||||
|
|
||||||
log_translation_hint = re.compile(
|
log_translation_hint = re.compile(
|
||||||
'|'.join('(?:%s)' % _regex_for_level(level, hint)
|
'|'.join('(?:%s)' % _regex_for_level(level, hint)
|
||||||
for level, hint in _all_log_levels.iteritems()))
|
for level, hint in six.iteritems(_all_log_levels)))
|
||||||
|
|
||||||
oslo_namespace_imports_dot = re.compile(r"import[\s]+oslo[.][^\s]+")
|
oslo_namespace_imports_dot = re.compile(r"import[\s]+oslo[.][^\s]+")
|
||||||
oslo_namespace_imports_from_dot = re.compile(r"from[\s]+oslo[.]")
|
oslo_namespace_imports_from_dot = re.compile(r"from[\s]+oslo[.]")
|
||||||
@ -166,6 +167,12 @@ def check_no_basestring(logical_line):
|
|||||||
yield(0, msg)
|
yield(0, msg)
|
||||||
|
|
||||||
|
|
||||||
|
def check_python3_no_iteritems(logical_line):
|
||||||
|
if re.search(r".*\.iteritems\(\)", logical_line):
|
||||||
|
msg = ("N327: Use six.iteritems() instead of dict.iteritems().")
|
||||||
|
yield(0, msg)
|
||||||
|
|
||||||
|
|
||||||
def factory(register):
|
def factory(register):
|
||||||
register(validate_log_translations)
|
register(validate_log_translations)
|
||||||
register(use_jsonutils)
|
register(use_jsonutils)
|
||||||
@ -175,3 +182,4 @@ def factory(register):
|
|||||||
register(check_no_contextlib_nested)
|
register(check_no_contextlib_nested)
|
||||||
register(check_python3_xrange)
|
register(check_python3_xrange)
|
||||||
register(check_no_basestring)
|
register(check_no_basestring)
|
||||||
|
register(check_python3_no_iteritems)
|
||||||
|
@ -19,6 +19,7 @@ from oslo_config import cfg
|
|||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
import oslo_messaging
|
import oslo_messaging
|
||||||
from oslo_utils import importutils
|
from oslo_utils import importutils
|
||||||
|
import six
|
||||||
|
|
||||||
from neutron.common import utils
|
from neutron.common import utils
|
||||||
from neutron.i18n import _LE, _LI
|
from neutron.i18n import _LE, _LI
|
||||||
@ -224,5 +225,6 @@ class NeutronManager(object):
|
|||||||
@classmethod
|
@classmethod
|
||||||
def get_service_plugins(cls):
|
def get_service_plugins(cls):
|
||||||
# Return weakrefs to minimize gc-preventing references.
|
# Return weakrefs to minimize gc-preventing references.
|
||||||
|
service_plugins = cls.get_instance().service_plugins
|
||||||
return dict((x, weakref.proxy(y))
|
return dict((x, weakref.proxy(y))
|
||||||
for x, y in cls.get_instance().service_plugins.iteritems())
|
for x, y in six.iteritems(service_plugins))
|
||||||
|
@ -25,6 +25,7 @@ eventlet.monkey_patch()
|
|||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
import oslo_messaging
|
import oslo_messaging
|
||||||
|
import six
|
||||||
|
|
||||||
from neutron.agent.common import ovs_lib
|
from neutron.agent.common import ovs_lib
|
||||||
from neutron.agent.linux import ip_lib
|
from neutron.agent.linux import ip_lib
|
||||||
@ -185,7 +186,7 @@ class SdnveNeutronAgent(object):
|
|||||||
:param interface_mappings: map physical net names to interface names.
|
:param interface_mappings: map physical net names to interface names.
|
||||||
'''
|
'''
|
||||||
|
|
||||||
for physical_network, interface in interface_mappings.iteritems():
|
for physical_network, interface in six.iteritems(interface_mappings):
|
||||||
LOG.info(_LI("Mapping physical network %(physical_network)s to "
|
LOG.info(_LI("Mapping physical network %(physical_network)s to "
|
||||||
"interface %(interface)s"),
|
"interface %(interface)s"),
|
||||||
{'physical_network': physical_network,
|
{'physical_network': physical_network,
|
||||||
|
@ -15,6 +15,7 @@
|
|||||||
|
|
||||||
from oslo_db import exception as db_exc
|
from oslo_db import exception as db_exc
|
||||||
from oslo_log import log
|
from oslo_log import log
|
||||||
|
import six
|
||||||
from sqlalchemy import or_
|
from sqlalchemy import or_
|
||||||
from sqlalchemy.orm import exc
|
from sqlalchemy.orm import exc
|
||||||
|
|
||||||
@ -268,7 +269,7 @@ def get_ports_and_sgs(context, port_ids):
|
|||||||
return []
|
return []
|
||||||
ports_to_sg_ids = get_sg_ids_grouped_by_port(context, port_ids)
|
ports_to_sg_ids = get_sg_ids_grouped_by_port(context, port_ids)
|
||||||
return [make_port_dict_with_security_groups(port, sec_groups)
|
return [make_port_dict_with_security_groups(port, sec_groups)
|
||||||
for port, sec_groups in ports_to_sg_ids.iteritems()]
|
for port, sec_groups in six.iteritems(ports_to_sg_ids)]
|
||||||
|
|
||||||
|
|
||||||
def get_sg_ids_grouped_by_port(context, port_ids):
|
def get_sg_ids_grouped_by_port(context, port_ids):
|
||||||
|
@ -19,6 +19,7 @@ from oslo_config import cfg
|
|||||||
from oslo_log import log
|
from oslo_log import log
|
||||||
from oslo_serialization import jsonutils
|
from oslo_serialization import jsonutils
|
||||||
import requests
|
import requests
|
||||||
|
import six
|
||||||
|
|
||||||
from neutron.plugins.ml2 import driver_api as api
|
from neutron.plugins.ml2 import driver_api as api
|
||||||
|
|
||||||
@ -173,7 +174,7 @@ class NCSMechanismDriver(api.MechanismDriver):
|
|||||||
"""
|
"""
|
||||||
if isinstance(obj, dict):
|
if isinstance(obj, dict):
|
||||||
obj = dict((self.escape(k), self.escape_keys(v))
|
obj = dict((self.escape(k), self.escape_keys(v))
|
||||||
for k, v in obj.iteritems())
|
for k, v in six.iteritems(obj))
|
||||||
if isinstance(obj, list):
|
if isinstance(obj, list):
|
||||||
obj = [self.escape_keys(x) for x in obj]
|
obj = [self.escape_keys(x) for x in obj]
|
||||||
return obj
|
return obj
|
||||||
|
@ -16,6 +16,7 @@
|
|||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
from oslo_db import exception as db_exc
|
from oslo_db import exception as db_exc
|
||||||
from oslo_log import log
|
from oslo_log import log
|
||||||
|
import six
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
|
|
||||||
from neutron.common import exceptions as exc
|
from neutron.common import exceptions as exc
|
||||||
@ -97,7 +98,7 @@ class FlatTypeDriver(helpers.BaseTypeDriver):
|
|||||||
% physical_network)
|
% physical_network)
|
||||||
raise exc.InvalidInput(error_message=msg)
|
raise exc.InvalidInput(error_message=msg)
|
||||||
|
|
||||||
for key, value in segment.iteritems():
|
for key, value in six.iteritems(segment):
|
||||||
if value and key not in [api.NETWORK_TYPE,
|
if value and key not in [api.NETWORK_TYPE,
|
||||||
api.PHYSICAL_NETWORK]:
|
api.PHYSICAL_NETWORK]:
|
||||||
msg = _("%s prohibited for flat provider network") % key
|
msg = _("%s prohibited for flat provider network") % key
|
||||||
|
@ -14,6 +14,7 @@
|
|||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
from oslo_log import log
|
from oslo_log import log
|
||||||
|
import six
|
||||||
|
|
||||||
from neutron.common import exceptions as exc
|
from neutron.common import exceptions as exc
|
||||||
from neutron.i18n import _LI
|
from neutron.i18n import _LI
|
||||||
@ -46,7 +47,7 @@ class LocalTypeDriver(api.TypeDriver):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
def validate_provider_segment(self, segment):
|
def validate_provider_segment(self, segment):
|
||||||
for key, value in segment.iteritems():
|
for key, value in six.iteritems(segment):
|
||||||
if value and key != api.NETWORK_TYPE:
|
if value and key != api.NETWORK_TYPE:
|
||||||
msg = _("%s prohibited for local provider network") % key
|
msg = _("%s prohibited for local provider network") % key
|
||||||
raise exc.InvalidInput(error_message=msg)
|
raise exc.InvalidInput(error_message=msg)
|
||||||
|
@ -15,6 +15,7 @@
|
|||||||
|
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
from oslo_log import log
|
from oslo_log import log
|
||||||
|
import six
|
||||||
import stevedore
|
import stevedore
|
||||||
|
|
||||||
from neutron.api.v2 import attributes
|
from neutron.api.v2 import attributes
|
||||||
@ -164,7 +165,7 @@ class TypeManager(stevedore.named.NamedExtensionManager):
|
|||||||
network[provider.SEGMENTATION_ID] = segment[api.SEGMENTATION_ID]
|
network[provider.SEGMENTATION_ID] = segment[api.SEGMENTATION_ID]
|
||||||
|
|
||||||
def initialize(self):
|
def initialize(self):
|
||||||
for network_type, driver in self.drivers.iteritems():
|
for network_type, driver in six.iteritems(self.drivers):
|
||||||
LOG.info(_LI("Initializing driver for type '%s'"), network_type)
|
LOG.info(_LI("Initializing driver for type '%s'"), network_type)
|
||||||
driver.obj.initialize()
|
driver.obj.initialize()
|
||||||
|
|
||||||
|
@ -22,6 +22,7 @@ import netaddr
|
|||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
import oslo_messaging
|
import oslo_messaging
|
||||||
|
import six
|
||||||
from six import moves
|
from six import moves
|
||||||
|
|
||||||
from neutron.agent.common import ovs_lib
|
from neutron.agent.common import ovs_lib
|
||||||
@ -340,7 +341,7 @@ class OVSNeutronAgent(sg_rpc.SecurityGroupAgentRpcCallbackMixin,
|
|||||||
start_listening=False)
|
start_listening=False)
|
||||||
|
|
||||||
def get_net_uuid(self, vif_id):
|
def get_net_uuid(self, vif_id):
|
||||||
for network_id, vlan_mapping in self.local_vlan_map.iteritems():
|
for network_id, vlan_mapping in six.iteritems(self.local_vlan_map):
|
||||||
if vif_id in vlan_mapping.vif_ports:
|
if vif_id in vlan_mapping.vif_ports:
|
||||||
return network_id
|
return network_id
|
||||||
|
|
||||||
@ -917,7 +918,7 @@ class OVSNeutronAgent(sg_rpc.SecurityGroupAgentRpcCallbackMixin,
|
|||||||
ip_wrapper = ip_lib.IPWrapper()
|
ip_wrapper = ip_lib.IPWrapper()
|
||||||
ovs = ovs_lib.BaseOVS()
|
ovs = ovs_lib.BaseOVS()
|
||||||
ovs_bridges = ovs.get_bridges()
|
ovs_bridges = ovs.get_bridges()
|
||||||
for physical_network, bridge in bridge_mappings.iteritems():
|
for physical_network, bridge in six.iteritems(bridge_mappings):
|
||||||
LOG.info(_LI("Mapping physical network %(physical_network)s to "
|
LOG.info(_LI("Mapping physical network %(physical_network)s to "
|
||||||
"bridge %(bridge)s"),
|
"bridge %(bridge)s"),
|
||||||
{'physical_network': physical_network,
|
{'physical_network': physical_network,
|
||||||
|
@ -18,6 +18,7 @@ import os
|
|||||||
import re
|
import re
|
||||||
|
|
||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
|
import six
|
||||||
|
|
||||||
from neutron.i18n import _LE, _LW
|
from neutron.i18n import _LE, _LW
|
||||||
from neutron.plugins.sriovnicagent.common import exceptions as exc
|
from neutron.plugins.sriovnicagent.common import exceptions as exc
|
||||||
@ -259,7 +260,7 @@ class ESwitchManager(object):
|
|||||||
"""
|
"""
|
||||||
if exclude_devices is None:
|
if exclude_devices is None:
|
||||||
exclude_devices = {}
|
exclude_devices = {}
|
||||||
for phys_net, dev_name in device_mappings.iteritems():
|
for phys_net, dev_name in six.iteritems(device_mappings):
|
||||||
self._create_emb_switch(phys_net, dev_name,
|
self._create_emb_switch(phys_net, dev_name,
|
||||||
exclude_devices.get(dev_name, set()))
|
exclude_devices.get(dev_name, set()))
|
||||||
|
|
||||||
|
@ -25,6 +25,7 @@ import re
|
|||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
from oslo_utils import excutils
|
from oslo_utils import excutils
|
||||||
from oslo_utils import importutils
|
from oslo_utils import importutils
|
||||||
|
import six
|
||||||
|
|
||||||
from neutron.api.v2 import attributes
|
from neutron.api.v2 import attributes
|
||||||
from neutron.common import constants as const
|
from neutron.common import constants as const
|
||||||
@ -146,7 +147,7 @@ def _should_validate_sub_attributes(attribute, sub_attr):
|
|||||||
validate = attribute.get('validate')
|
validate = attribute.get('validate')
|
||||||
return (validate and isinstance(sub_attr, collections.Iterable) and
|
return (validate and isinstance(sub_attr, collections.Iterable) and
|
||||||
any([k.startswith('type:dict') and
|
any([k.startswith('type:dict') and
|
||||||
v for (k, v) in validate.iteritems()]))
|
v for (k, v) in six.iteritems(validate)]))
|
||||||
|
|
||||||
|
|
||||||
def _build_subattr_match_rule(attr_name, attr, action, target):
|
def _build_subattr_match_rule(attr_name, attr, action, target):
|
||||||
|
@ -13,6 +13,7 @@
|
|||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
|
import six
|
||||||
from tempest_lib.common.utils import data_utils
|
from tempest_lib.common.utils import data_utils
|
||||||
|
|
||||||
from neutron.tests.api import base
|
from neutron.tests.api import base
|
||||||
@ -59,7 +60,7 @@ class QuotasTest(base.BaseAdminNetworkTest):
|
|||||||
quota_set = self.admin_client.update_quotas(tenant_id,
|
quota_set = self.admin_client.update_quotas(tenant_id,
|
||||||
**new_quotas)
|
**new_quotas)
|
||||||
self.addCleanup(self.admin_client.reset_quotas, tenant_id)
|
self.addCleanup(self.admin_client.reset_quotas, tenant_id)
|
||||||
for key, value in new_quotas.iteritems():
|
for key, value in six.iteritems(new_quotas):
|
||||||
self.assertEqual(value, quota_set[key])
|
self.assertEqual(value, quota_set[key])
|
||||||
|
|
||||||
# Confirm our tenant is listed among tenants with non default quotas
|
# Confirm our tenant is listed among tenants with non default quotas
|
||||||
@ -73,7 +74,7 @@ class QuotasTest(base.BaseAdminNetworkTest):
|
|||||||
# Confirm from API quotas were changed as requested for tenant
|
# Confirm from API quotas were changed as requested for tenant
|
||||||
quota_set = self.admin_client.show_quotas(tenant_id)
|
quota_set = self.admin_client.show_quotas(tenant_id)
|
||||||
quota_set = quota_set['quota']
|
quota_set = quota_set['quota']
|
||||||
for key, value in new_quotas.iteritems():
|
for key, value in six.iteritems(new_quotas):
|
||||||
self.assertEqual(value, quota_set[key])
|
self.assertEqual(value, quota_set[key])
|
||||||
|
|
||||||
# Reset quotas to default and confirm
|
# Reset quotas to default and confirm
|
||||||
|
@ -16,6 +16,7 @@
|
|||||||
import netaddr
|
import netaddr
|
||||||
import random
|
import random
|
||||||
|
|
||||||
|
import six
|
||||||
from tempest_lib.common.utils import data_utils
|
from tempest_lib.common.utils import data_utils
|
||||||
from tempest_lib import exceptions as lib_exc
|
from tempest_lib import exceptions as lib_exc
|
||||||
|
|
||||||
@ -126,7 +127,7 @@ class NetworksTestDHCPv6(base.BaseNetworkTest):
|
|||||||
):
|
):
|
||||||
kwargs = {'ipv6_ra_mode': ra_mode,
|
kwargs = {'ipv6_ra_mode': ra_mode,
|
||||||
'ipv6_address_mode': add_mode}
|
'ipv6_address_mode': add_mode}
|
||||||
kwargs = {k: v for k, v in kwargs.iteritems() if v}
|
kwargs = {k: v for k, v in six.iteritems(kwargs) if v}
|
||||||
real_ip, eui_ip = self._get_ips_from_subnet(**kwargs)
|
real_ip, eui_ip = self._get_ips_from_subnet(**kwargs)
|
||||||
self._clean_network()
|
self._clean_network()
|
||||||
self.assertEqual(eui_ip, real_ip,
|
self.assertEqual(eui_ip, real_ip,
|
||||||
@ -286,7 +287,7 @@ class NetworksTestDHCPv6(base.BaseNetworkTest):
|
|||||||
):
|
):
|
||||||
kwargs = {'ipv6_ra_mode': ra_mode,
|
kwargs = {'ipv6_ra_mode': ra_mode,
|
||||||
'ipv6_address_mode': add_mode}
|
'ipv6_address_mode': add_mode}
|
||||||
kwargs = {k: v for k, v in kwargs.iteritems() if v}
|
kwargs = {k: v for k, v in six.iteritems(kwargs) if v}
|
||||||
subnet = self.create_subnet(self.network, **kwargs)
|
subnet = self.create_subnet(self.network, **kwargs)
|
||||||
port = self.create_port(self.network)
|
port = self.create_port(self.network)
|
||||||
port_ip = next(iter(port['fixed_ips']), None)['ip_address']
|
port_ip = next(iter(port['fixed_ips']), None)['ip_address']
|
||||||
@ -313,7 +314,7 @@ class NetworksTestDHCPv6(base.BaseNetworkTest):
|
|||||||
):
|
):
|
||||||
kwargs = {'ipv6_ra_mode': ra_mode,
|
kwargs = {'ipv6_ra_mode': ra_mode,
|
||||||
'ipv6_address_mode': add_mode}
|
'ipv6_address_mode': add_mode}
|
||||||
kwargs = {k: v for k, v in kwargs.iteritems() if v}
|
kwargs = {k: v for k, v in six.iteritems(kwargs) if v}
|
||||||
subnet = self.create_subnet(self.network, **kwargs)
|
subnet = self.create_subnet(self.network, **kwargs)
|
||||||
ip_range = netaddr.IPRange(subnet["allocation_pools"][0]["start"],
|
ip_range = netaddr.IPRange(subnet["allocation_pools"][0]["start"],
|
||||||
subnet["allocation_pools"][0]["end"])
|
subnet["allocation_pools"][0]["end"])
|
||||||
@ -391,7 +392,7 @@ class NetworksTestDHCPv6(base.BaseNetworkTest):
|
|||||||
):
|
):
|
||||||
kwargs = {'ipv6_ra_mode': ra_mode,
|
kwargs = {'ipv6_ra_mode': ra_mode,
|
||||||
'ipv6_address_mode': add_mode}
|
'ipv6_address_mode': add_mode}
|
||||||
kwargs = {k: v for k, v in kwargs.iteritems() if v}
|
kwargs = {k: v for k, v in six.iteritems(kwargs) if v}
|
||||||
subnet, port = self._create_subnet_router(kwargs)
|
subnet, port = self._create_subnet_router(kwargs)
|
||||||
port_ip = next(iter(port['fixed_ips']), None)['ip_address']
|
port_ip = next(iter(port['fixed_ips']), None)['ip_address']
|
||||||
self._clean_network()
|
self._clean_network()
|
||||||
|
@ -12,6 +12,7 @@
|
|||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
|
import six
|
||||||
from tempest_lib.common.utils import data_utils
|
from tempest_lib.common.utils import data_utils
|
||||||
from tempest_lib import exceptions as lib_exc
|
from tempest_lib import exceptions as lib_exc
|
||||||
|
|
||||||
@ -142,7 +143,7 @@ class FWaaSExtensionTestJSON(base.BaseNetworkTest):
|
|||||||
def test_show_firewall_rule(self):
|
def test_show_firewall_rule(self):
|
||||||
# show a created firewall rule
|
# show a created firewall rule
|
||||||
fw_rule = self.client.show_firewall_rule(self.fw_rule['id'])
|
fw_rule = self.client.show_firewall_rule(self.fw_rule['id'])
|
||||||
for key, value in fw_rule['firewall_rule'].iteritems():
|
for key, value in six.iteritems(fw_rule['firewall_rule']):
|
||||||
self.assertEqual(self.fw_rule[key], value)
|
self.assertEqual(self.fw_rule[key], value)
|
||||||
|
|
||||||
@test.idempotent_id('1086dd93-a4c0-4bbb-a1bd-6d4bc62c199f')
|
@test.idempotent_id('1086dd93-a4c0-4bbb-a1bd-6d4bc62c199f')
|
||||||
@ -184,7 +185,7 @@ class FWaaSExtensionTestJSON(base.BaseNetworkTest):
|
|||||||
# show a created firewall policy
|
# show a created firewall policy
|
||||||
fw_policy = self.client.show_firewall_policy(self.fw_policy['id'])
|
fw_policy = self.client.show_firewall_policy(self.fw_policy['id'])
|
||||||
fw_policy = fw_policy['firewall_policy']
|
fw_policy = fw_policy['firewall_policy']
|
||||||
for key, value in fw_policy.iteritems():
|
for key, value in six.iteritems(fw_policy):
|
||||||
self.assertEqual(self.fw_policy[key], value)
|
self.assertEqual(self.fw_policy[key], value)
|
||||||
|
|
||||||
@test.idempotent_id('02082a03-3cdd-4789-986a-1327dd80bfb7')
|
@test.idempotent_id('02082a03-3cdd-4789-986a-1327dd80bfb7')
|
||||||
@ -213,7 +214,7 @@ class FWaaSExtensionTestJSON(base.BaseNetworkTest):
|
|||||||
firewall = self.client.show_firewall(firewall_id)
|
firewall = self.client.show_firewall(firewall_id)
|
||||||
firewall = firewall['firewall']
|
firewall = firewall['firewall']
|
||||||
|
|
||||||
for key, value in firewall.iteritems():
|
for key, value in six.iteritems(firewall):
|
||||||
if key == 'status':
|
if key == 'status':
|
||||||
continue
|
continue
|
||||||
self.assertEqual(created_firewall[key], value)
|
self.assertEqual(created_firewall[key], value)
|
||||||
|
@ -13,6 +13,7 @@
|
|||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
|
import six
|
||||||
from tempest_lib.common.utils import data_utils
|
from tempest_lib.common.utils import data_utils
|
||||||
from tempest_lib import decorators
|
from tempest_lib import decorators
|
||||||
|
|
||||||
@ -72,7 +73,7 @@ class LoadBalancerTestJSON(base.BaseNetworkTest):
|
|||||||
body = create_obj(**kwargs)
|
body = create_obj(**kwargs)
|
||||||
obj = body[obj_name]
|
obj = body[obj_name]
|
||||||
self.addCleanup(delete_obj, obj['id'])
|
self.addCleanup(delete_obj, obj['id'])
|
||||||
for key, value in obj.iteritems():
|
for key, value in six.iteritems(obj):
|
||||||
# It is not relevant to filter by all arguments. That is why
|
# It is not relevant to filter by all arguments. That is why
|
||||||
# there is a list of attr to except
|
# there is a list of attr to except
|
||||||
if key not in attr_exceptions:
|
if key not in attr_exceptions:
|
||||||
@ -169,7 +170,7 @@ class LoadBalancerTestJSON(base.BaseNetworkTest):
|
|||||||
# Verifies the details of a vip
|
# Verifies the details of a vip
|
||||||
body = self.client.show_vip(self.vip['id'])
|
body = self.client.show_vip(self.vip['id'])
|
||||||
vip = body['vip']
|
vip = body['vip']
|
||||||
for key, value in vip.iteritems():
|
for key, value in six.iteritems(vip):
|
||||||
# 'status' should not be confirmed in api tests
|
# 'status' should not be confirmed in api tests
|
||||||
if key != 'status':
|
if key != 'status':
|
||||||
self.assertEqual(self.vip[key], value)
|
self.assertEqual(self.vip[key], value)
|
||||||
@ -187,7 +188,7 @@ class LoadBalancerTestJSON(base.BaseNetworkTest):
|
|||||||
# Verifies the details of a pool
|
# Verifies the details of a pool
|
||||||
body = self.client.show_pool(pool['id'])
|
body = self.client.show_pool(pool['id'])
|
||||||
shown_pool = body['pool']
|
shown_pool = body['pool']
|
||||||
for key, value in pool.iteritems():
|
for key, value in six.iteritems(pool):
|
||||||
# 'status' should not be confirmed in api tests
|
# 'status' should not be confirmed in api tests
|
||||||
if key != 'status':
|
if key != 'status':
|
||||||
self.assertEqual(value, shown_pool[key])
|
self.assertEqual(value, shown_pool[key])
|
||||||
@ -251,7 +252,7 @@ class LoadBalancerTestJSON(base.BaseNetworkTest):
|
|||||||
# Verifies the details of a member
|
# Verifies the details of a member
|
||||||
body = self.client.show_member(self.member['id'])
|
body = self.client.show_member(self.member['id'])
|
||||||
member = body['member']
|
member = body['member']
|
||||||
for key, value in member.iteritems():
|
for key, value in six.iteritems(member):
|
||||||
# 'status' should not be confirmed in api tests
|
# 'status' should not be confirmed in api tests
|
||||||
if key != 'status':
|
if key != 'status':
|
||||||
self.assertEqual(self.member[key], value)
|
self.assertEqual(self.member[key], value)
|
||||||
@ -330,7 +331,7 @@ class LoadBalancerTestJSON(base.BaseNetworkTest):
|
|||||||
# Verifies the details of a health_monitor
|
# Verifies the details of a health_monitor
|
||||||
body = self.client.show_health_monitor(self.health_monitor['id'])
|
body = self.client.show_health_monitor(self.health_monitor['id'])
|
||||||
health_monitor = body['health_monitor']
|
health_monitor = body['health_monitor']
|
||||||
for key, value in health_monitor.iteritems():
|
for key, value in six.iteritems(health_monitor):
|
||||||
# 'status' should not be confirmed in api tests
|
# 'status' should not be confirmed in api tests
|
||||||
if key != 'status':
|
if key != 'status':
|
||||||
self.assertEqual(self.health_monitor[key], value)
|
self.assertEqual(self.health_monitor[key], value)
|
||||||
|
@ -15,6 +15,7 @@
|
|||||||
import itertools
|
import itertools
|
||||||
|
|
||||||
import netaddr
|
import netaddr
|
||||||
|
import six
|
||||||
from tempest_lib.common.utils import data_utils
|
from tempest_lib.common.utils import data_utils
|
||||||
from tempest_lib import exceptions as lib_exc
|
from tempest_lib import exceptions as lib_exc
|
||||||
|
|
||||||
@ -163,7 +164,7 @@ class NetworksTestJSON(base.BaseNetworkTest):
|
|||||||
**kwargs)
|
**kwargs)
|
||||||
compare_args_full = dict(gateway_ip=gateway, cidr=cidr,
|
compare_args_full = dict(gateway_ip=gateway, cidr=cidr,
|
||||||
mask_bits=mask_bits, **kwargs)
|
mask_bits=mask_bits, **kwargs)
|
||||||
compare_args = dict((k, v) for k, v in compare_args_full.iteritems()
|
compare_args = dict((k, v) for k, v in six.iteritems(compare_args_full)
|
||||||
if v is not None)
|
if v is not None)
|
||||||
|
|
||||||
if 'dns_nameservers' in set(subnet).intersection(compare_args):
|
if 'dns_nameservers' in set(subnet).intersection(compare_args):
|
||||||
|
@ -14,6 +14,7 @@
|
|||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
import netaddr
|
import netaddr
|
||||||
|
import six
|
||||||
from tempest_lib.common.utils import data_utils
|
from tempest_lib.common.utils import data_utils
|
||||||
|
|
||||||
from neutron.tests.api import base_routers as base
|
from neutron.tests.api import base_routers as base
|
||||||
@ -173,7 +174,7 @@ class RoutersTest(base.BaseRouterTest):
|
|||||||
self.assertIsNone(actual_ext_gw_info)
|
self.assertIsNone(actual_ext_gw_info)
|
||||||
return
|
return
|
||||||
# Verify only keys passed in exp_ext_gw_info
|
# Verify only keys passed in exp_ext_gw_info
|
||||||
for k, v in exp_ext_gw_info.iteritems():
|
for k, v in six.iteritems(exp_ext_gw_info):
|
||||||
self.assertEqual(v, actual_ext_gw_info[k])
|
self.assertEqual(v, actual_ext_gw_info[k])
|
||||||
|
|
||||||
def _verify_gateway_port(self, router_id):
|
def _verify_gateway_port(self, router_id):
|
||||||
|
@ -13,6 +13,7 @@
|
|||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
|
import six
|
||||||
from tempest_lib.common.utils import data_utils
|
from tempest_lib.common.utils import data_utils
|
||||||
from tempest_lib import exceptions as lib_exc
|
from tempest_lib import exceptions as lib_exc
|
||||||
|
|
||||||
@ -79,7 +80,7 @@ class VPNaaSTestJSON(base.BaseAdminNetworkTest):
|
|||||||
|
|
||||||
def _assertExpected(self, expected, actual):
|
def _assertExpected(self, expected, actual):
|
||||||
# Check if not expected keys/values exists in actual response body
|
# Check if not expected keys/values exists in actual response body
|
||||||
for key, value in expected.iteritems():
|
for key, value in six.iteritems(expected):
|
||||||
self.assertIn(key, actual)
|
self.assertIn(key, actual)
|
||||||
self.assertEqual(value, actual[key])
|
self.assertEqual(value, actual[key])
|
||||||
|
|
||||||
@ -250,7 +251,7 @@ class VPNaaSTestJSON(base.BaseAdminNetworkTest):
|
|||||||
# Confirm that update was successful by verifying using 'show'
|
# Confirm that update was successful by verifying using 'show'
|
||||||
body = self.client.show_ikepolicy(ikepolicy['id'])
|
body = self.client.show_ikepolicy(ikepolicy['id'])
|
||||||
ike_policy = body['ikepolicy']
|
ike_policy = body['ikepolicy']
|
||||||
for key, value in new_ike.iteritems():
|
for key, value in six.iteritems(new_ike):
|
||||||
self.assertIn(key, ike_policy)
|
self.assertIn(key, ike_policy)
|
||||||
self.assertEqual(value, ike_policy[key])
|
self.assertEqual(value, ike_policy[key])
|
||||||
|
|
||||||
|
@ -32,6 +32,7 @@ from oslo_concurrency.fixture import lockutils
|
|||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
from oslo_messaging import conffixture as messaging_conffixture
|
from oslo_messaging import conffixture as messaging_conffixture
|
||||||
from oslo_utils import strutils
|
from oslo_utils import strutils
|
||||||
|
import six
|
||||||
import testtools
|
import testtools
|
||||||
|
|
||||||
from neutron.agent.linux import external_process
|
from neutron.agent.linux import external_process
|
||||||
@ -182,7 +183,7 @@ class DietTestCase(testtools.TestCase):
|
|||||||
self.assertEqual(expect_val, actual_val)
|
self.assertEqual(expect_val, actual_val)
|
||||||
|
|
||||||
def sort_dict_lists(self, dic):
|
def sort_dict_lists(self, dic):
|
||||||
for key, value in dic.iteritems():
|
for key, value in six.iteritems(dic):
|
||||||
if isinstance(value, list):
|
if isinstance(value, list):
|
||||||
dic[key] = sorted(value)
|
dic[key] = sorted(value)
|
||||||
elif isinstance(value, dict):
|
elif isinstance(value, dict):
|
||||||
@ -361,7 +362,7 @@ class BaseTestCase(DietTestCase):
|
|||||||
test by the fixtures cleanup process.
|
test by the fixtures cleanup process.
|
||||||
"""
|
"""
|
||||||
group = kw.pop('group', None)
|
group = kw.pop('group', None)
|
||||||
for k, v in kw.iteritems():
|
for k, v in six.iteritems(kw):
|
||||||
CONF.set_override(k, v, group)
|
CONF.set_override(k, v, group)
|
||||||
|
|
||||||
def setup_coreplugin(self, core_plugin=None):
|
def setup_coreplugin(self, core_plugin=None):
|
||||||
|
@ -34,7 +34,7 @@ class ConfigDict(base.AttributeDict):
|
|||||||
|
|
||||||
:param other: dictionary to be directly modified.
|
:param other: dictionary to be directly modified.
|
||||||
"""
|
"""
|
||||||
for key, value in other.iteritems():
|
for key, value in six.iteritems(other):
|
||||||
if isinstance(value, dict):
|
if isinstance(value, dict):
|
||||||
if not isinstance(value, base.AttributeDict):
|
if not isinstance(value, base.AttributeDict):
|
||||||
other[key] = base.AttributeDict(value)
|
other[key] = base.AttributeDict(value)
|
||||||
|
@ -21,6 +21,7 @@ import mock
|
|||||||
import netaddr
|
import netaddr
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
|
import six
|
||||||
import testtools
|
import testtools
|
||||||
import webob
|
import webob
|
||||||
import webob.dec
|
import webob.dec
|
||||||
@ -326,7 +327,7 @@ class L3AgentTestCase(L3AgentTestFramework):
|
|||||||
# Get the last state reported for each router
|
# Get the last state reported for each router
|
||||||
actual_router_states = {}
|
actual_router_states = {}
|
||||||
for call in calls:
|
for call in calls:
|
||||||
for router_id, state in call.iteritems():
|
for router_id, state in six.iteritems(call):
|
||||||
actual_router_states[router_id] = state
|
actual_router_states[router_id] = state
|
||||||
|
|
||||||
return actual_router_states == expected
|
return actual_router_states == expected
|
||||||
|
@ -14,6 +14,7 @@
|
|||||||
|
|
||||||
import re
|
import re
|
||||||
|
|
||||||
|
import six
|
||||||
from testtools import helpers
|
from testtools import helpers
|
||||||
|
|
||||||
|
|
||||||
@ -121,7 +122,7 @@ class AreAllWellFormatted(object):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def match(self, actual):
|
def match(self, actual):
|
||||||
for key, value in actual.iteritems():
|
for key, value in six.iteritems(actual):
|
||||||
if key in ('content-length', 'x-account-bytes-used',
|
if key in ('content-length', 'x-account-bytes-used',
|
||||||
'x-account-container-count', 'x-account-object-count',
|
'x-account-container-count', 'x-account-object-count',
|
||||||
'x-container-bytes-used', 'x-container-object-count')\
|
'x-container-bytes-used', 'x-container-object-count')\
|
||||||
|
@ -17,6 +17,7 @@ import copy
|
|||||||
import functools
|
import functools
|
||||||
|
|
||||||
import jsonschema
|
import jsonschema
|
||||||
|
import six
|
||||||
|
|
||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
|
|
||||||
@ -122,7 +123,7 @@ class BasicGeneratorSet(object):
|
|||||||
|
|
||||||
if schema_type == 'object':
|
if schema_type == 'object':
|
||||||
properties = schema["properties"]
|
properties = schema["properties"]
|
||||||
for attribute, definition in properties.iteritems():
|
for attribute, definition in six.iteritems(properties):
|
||||||
current_path = copy.copy(path)
|
current_path = copy.copy(path)
|
||||||
if path is not None:
|
if path is not None:
|
||||||
current_path.append(attribute)
|
current_path.append(attribute)
|
||||||
|
@ -14,6 +14,7 @@
|
|||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
|
import six
|
||||||
|
|
||||||
import neutron.tests.tempest.common.generator.base_generator as base
|
import neutron.tests.tempest.common.generator.base_generator as base
|
||||||
|
|
||||||
@ -51,7 +52,7 @@ class ValidTestGenerator(base.BasicGeneratorSet):
|
|||||||
@base.simple_generator
|
@base.simple_generator
|
||||||
def generate_valid_object(self, schema):
|
def generate_valid_object(self, schema):
|
||||||
obj = {}
|
obj = {}
|
||||||
for k, v in schema["properties"].iteritems():
|
for k, v in six.iteritems(schema["properties"]):
|
||||||
obj[k] = self.generate_valid(v)
|
obj[k] = self.generate_valid(v)
|
||||||
return obj
|
return obj
|
||||||
|
|
||||||
|
@ -14,6 +14,7 @@
|
|||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
import fixtures
|
import fixtures
|
||||||
|
import six
|
||||||
|
|
||||||
from neutron.api.v2 import attributes
|
from neutron.api.v2 import attributes
|
||||||
|
|
||||||
@ -40,8 +41,8 @@ class AttributeMapMemento(fixtures.Fixture):
|
|||||||
# deeper than a shallow copy.
|
# deeper than a shallow copy.
|
||||||
super(AttributeMapMemento, self).setUp()
|
super(AttributeMapMemento, self).setUp()
|
||||||
self.contents_backup = {}
|
self.contents_backup = {}
|
||||||
for resource, attrs in attributes.RESOURCE_ATTRIBUTE_MAP.iteritems():
|
for res, attrs in six.iteritems(attributes.RESOURCE_ATTRIBUTE_MAP):
|
||||||
self.contents_backup[resource] = attrs.copy()
|
self.contents_backup[res] = attrs.copy()
|
||||||
self.addCleanup(self.restore)
|
self.addCleanup(self.restore)
|
||||||
|
|
||||||
def restore(self):
|
def restore(self):
|
||||||
|
@ -17,6 +17,7 @@ import copy
|
|||||||
|
|
||||||
import mock
|
import mock
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
|
import six
|
||||||
|
|
||||||
from neutron.agent.common import config as a_cfg
|
from neutron.agent.common import config as a_cfg
|
||||||
from neutron.agent.linux import ipset_manager
|
from neutron.agent.linux import ipset_manager
|
||||||
@ -1443,7 +1444,7 @@ class IptablesFirewallEnhancedIpsetTestCase(BaseIptablesFirewallTestCase):
|
|||||||
remote_groups = remote_groups or {_IPv4: [FAKE_SGID],
|
remote_groups = remote_groups or {_IPv4: [FAKE_SGID],
|
||||||
_IPv6: [FAKE_SGID]}
|
_IPv6: [FAKE_SGID]}
|
||||||
rules = []
|
rules = []
|
||||||
for ip_version, remote_group_list in remote_groups.iteritems():
|
for ip_version, remote_group_list in six.iteritems(remote_groups):
|
||||||
for remote_group in remote_group_list:
|
for remote_group in remote_group_list:
|
||||||
rules.append(self._fake_sg_rule_for_ethertype(ip_version,
|
rules.append(self._fake_sg_rule_for_ethertype(ip_version,
|
||||||
remote_group))
|
remote_group))
|
||||||
|
@ -20,6 +20,7 @@ from oslo_config import cfg
|
|||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
from oslo_serialization import jsonutils
|
from oslo_serialization import jsonutils
|
||||||
import routes
|
import routes
|
||||||
|
import six
|
||||||
import webob
|
import webob
|
||||||
import webob.exc as webexc
|
import webob.exc as webexc
|
||||||
import webtest
|
import webtest
|
||||||
@ -743,8 +744,8 @@ class ExtensionExtendedAttributeTestCase(base.BaseTestCase):
|
|||||||
self._tenant_id = "8c70909f-b081-452d-872b-df48e6c355d1"
|
self._tenant_id = "8c70909f-b081-452d-872b-df48e6c355d1"
|
||||||
# Save the global RESOURCE_ATTRIBUTE_MAP
|
# Save the global RESOURCE_ATTRIBUTE_MAP
|
||||||
self.saved_attr_map = {}
|
self.saved_attr_map = {}
|
||||||
for resource, attrs in attributes.RESOURCE_ATTRIBUTE_MAP.iteritems():
|
for res, attrs in six.iteritems(attributes.RESOURCE_ATTRIBUTE_MAP):
|
||||||
self.saved_attr_map[resource] = attrs.copy()
|
self.saved_attr_map[res] = attrs.copy()
|
||||||
# Add the resources to the global attribute map
|
# Add the resources to the global attribute map
|
||||||
# This is done here as the setup process won't
|
# This is done here as the setup process won't
|
||||||
# initialize the main API router which extends
|
# initialize the main API router which extends
|
||||||
|
@ -17,6 +17,7 @@ import os
|
|||||||
|
|
||||||
import mock
|
import mock
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
|
import six
|
||||||
from six import moves
|
from six import moves
|
||||||
import six.moves.urllib.parse as urlparse
|
import six.moves.urllib.parse as urlparse
|
||||||
import webob
|
import webob
|
||||||
@ -547,7 +548,7 @@ class JSONV2TestCase(APIv2TestBase, testlib_api.WebTestCase):
|
|||||||
output_dict = res['networks'][0]
|
output_dict = res['networks'][0]
|
||||||
input_dict['shared'] = False
|
input_dict['shared'] = False
|
||||||
self.assertEqual(len(input_dict), len(output_dict))
|
self.assertEqual(len(input_dict), len(output_dict))
|
||||||
for k, v in input_dict.iteritems():
|
for k, v in six.iteritems(input_dict):
|
||||||
self.assertEqual(v, output_dict[k])
|
self.assertEqual(v, output_dict[k])
|
||||||
else:
|
else:
|
||||||
# expect no results
|
# expect no results
|
||||||
|
@ -22,6 +22,7 @@ import netaddr
|
|||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
from oslo_db import exception as db_exc
|
from oslo_db import exception as db_exc
|
||||||
from oslo_utils import importutils
|
from oslo_utils import importutils
|
||||||
|
import six
|
||||||
from sqlalchemy import orm
|
from sqlalchemy import orm
|
||||||
from testtools import matchers
|
from testtools import matchers
|
||||||
import webob.exc
|
import webob.exc
|
||||||
@ -107,7 +108,7 @@ class NeutronDbPluginV2TestCase(testlib_api.WebTestCase):
|
|||||||
cfg.CONF.set_override(
|
cfg.CONF.set_override(
|
||||||
'service_plugins',
|
'service_plugins',
|
||||||
[test_lib.test_config.get(key, default)
|
[test_lib.test_config.get(key, default)
|
||||||
for key, default in (service_plugins or {}).iteritems()]
|
for key, default in six.iteritems(service_plugins or {})]
|
||||||
)
|
)
|
||||||
|
|
||||||
cfg.CONF.set_override('base_mac', "12:34:56:78:90:ab")
|
cfg.CONF.set_override('base_mac', "12:34:56:78:90:ab")
|
||||||
|
@ -17,6 +17,7 @@ import contextlib
|
|||||||
|
|
||||||
import mock
|
import mock
|
||||||
import oslo_db.exception as exc
|
import oslo_db.exception as exc
|
||||||
|
import six
|
||||||
import testtools
|
import testtools
|
||||||
import webob.exc
|
import webob.exc
|
||||||
|
|
||||||
@ -160,7 +161,7 @@ class SecurityGroupsTestCase(test_db_base_plugin_v2.NeutronDbPluginV2TestCase):
|
|||||||
"""Asserts that the sg rule has expected key/value pairs passed
|
"""Asserts that the sg rule has expected key/value pairs passed
|
||||||
in as expected_kvs dictionary
|
in as expected_kvs dictionary
|
||||||
"""
|
"""
|
||||||
for k, v in expected_kvs.iteritems():
|
for k, v in six.iteritems(expected_kvs):
|
||||||
self.assertEqual(security_group_rule[k], v)
|
self.assertEqual(security_group_rule[k], v)
|
||||||
|
|
||||||
|
|
||||||
@ -441,7 +442,7 @@ class TestSecurityGroups(SecurityGroupDBTestCase):
|
|||||||
test_addr = {'192.168.1.1/24': 'IPv6',
|
test_addr = {'192.168.1.1/24': 'IPv6',
|
||||||
'2001:db8:1234::/48': 'IPv4',
|
'2001:db8:1234::/48': 'IPv4',
|
||||||
'192.168.2.1/24': 'BadEthertype'}
|
'192.168.2.1/24': 'BadEthertype'}
|
||||||
for remote_ip_prefix, ethertype in test_addr.iteritems():
|
for remote_ip_prefix, ethertype in six.iteritems(test_addr):
|
||||||
with self.security_group(name, description) as sg:
|
with self.security_group(name, description) as sg:
|
||||||
sg_id = sg['security_group']['id']
|
sg_id = sg['security_group']['id']
|
||||||
rule = self._build_security_group_rule(
|
rule = self._build_security_group_rule(
|
||||||
@ -1501,7 +1502,7 @@ class TestConvertIPPrefixToCIDR(base.BaseTestCase):
|
|||||||
|
|
||||||
def test_convert_ip_prefix_no_netmask_to_cidr(self):
|
def test_convert_ip_prefix_no_netmask_to_cidr(self):
|
||||||
addr = {'10.1.2.3': '32', 'fe80::2677:3ff:fe7d:4c': '128'}
|
addr = {'10.1.2.3': '32', 'fe80::2677:3ff:fe7d:4c': '128'}
|
||||||
for k, v in addr.iteritems():
|
for k, v in six.iteritems(addr):
|
||||||
self.assertEqual(ext_sg.convert_ip_prefix_to_cidr(k),
|
self.assertEqual(ext_sg.convert_ip_prefix_to_cidr(k),
|
||||||
'%s/%s' % (k, v))
|
'%s/%s' % (k, v))
|
||||||
|
|
||||||
|
@ -13,6 +13,7 @@
|
|||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
|
import six
|
||||||
from webob import exc as web_exc
|
from webob import exc as web_exc
|
||||||
|
|
||||||
from neutron.api.v2 import attributes
|
from neutron.api.v2 import attributes
|
||||||
@ -55,8 +56,8 @@ class VlanTransparentExtensionTestCase(test_db_base_plugin_v2.TestNetworksV2):
|
|||||||
|
|
||||||
# Save the global RESOURCE_ATTRIBUTE_MAP
|
# Save the global RESOURCE_ATTRIBUTE_MAP
|
||||||
self.saved_attr_map = {}
|
self.saved_attr_map = {}
|
||||||
for resource, attrs in attributes.RESOURCE_ATTRIBUTE_MAP.iteritems():
|
for res, attrs in six.iteritems(attributes.RESOURCE_ATTRIBUTE_MAP):
|
||||||
self.saved_attr_map[resource] = attrs.copy()
|
self.saved_attr_map[res] = attrs.copy()
|
||||||
|
|
||||||
# Update the plugin and extensions path
|
# Update the plugin and extensions path
|
||||||
self.setup_coreplugin(plugin)
|
self.setup_coreplugin(plugin)
|
||||||
|
@ -149,3 +149,8 @@ class HackingTestCase(base.BaseTestCase):
|
|||||||
def test_no_basestring(self):
|
def test_no_basestring(self):
|
||||||
self.assertEqual(1,
|
self.assertEqual(1,
|
||||||
len(list(checks.check_no_basestring("isinstance(x, basestring)"))))
|
len(list(checks.check_no_basestring("isinstance(x, basestring)"))))
|
||||||
|
|
||||||
|
def test_check_python3_iteritems(self):
|
||||||
|
f = checks.check_python3_no_iteritems
|
||||||
|
self.assertLineFails(f, "d.iteritems()")
|
||||||
|
self.assertLinePasses(f, "six.iteritems(d)")
|
||||||
|
@ -33,6 +33,7 @@ from oslo_log import loggers
|
|||||||
from oslo_serialization import jsonutils
|
from oslo_serialization import jsonutils
|
||||||
from oslo_utils import excutils
|
from oslo_utils import excutils
|
||||||
import routes.middleware
|
import routes.middleware
|
||||||
|
import six
|
||||||
import webob.dec
|
import webob.dec
|
||||||
import webob.exc
|
import webob.exc
|
||||||
|
|
||||||
@ -666,7 +667,7 @@ class Debug(Middleware):
|
|||||||
resp = req.get_response(self.application)
|
resp = req.get_response(self.application)
|
||||||
|
|
||||||
print(("*" * 40) + " RESPONSE HEADERS")
|
print(("*" * 40) + " RESPONSE HEADERS")
|
||||||
for (key, value) in resp.headers.iteritems():
|
for (key, value) in six.iteritems(resp.headers):
|
||||||
print(key, "=", value)
|
print(key, "=", value)
|
||||||
print()
|
print()
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user