Performance: leverage dict comprehension in PEP-0274
PEP-0274 introduced dict comprehensions to replace dict constructor with a sequence of length-2 sequences, these are benefits copied from [1]: The dictionary constructor approach has two distinct disadvantages from the proposed syntax though. First, it isn't as legible as a dict comprehension. Second, it forces the programmer to create an in-core list object first, which could be expensive. Manila does not support python 2.6, we can leverage this. There is deep dive about PEP-0274[2] and basic tests about performance[3]. Note: This commit doesn't handle dict constructor with kwagrs. This commit also adds a hacking rule. [1]http://legacy.python.org/dev/peps/pep-0274/ [2]http://doughellmann.com/2012/11/12/the-performance-impact-of-using -dict-instead-of-in-cpython-2-7-2.html [3]http://paste.openstack.org/show/480757/ Change-Id: I87d26a46ef0d494f92afb1d3bebda2797a12413c Closes-Bug: #1524771
This commit is contained in:
parent
6ac61c1a69
commit
51069d5390
@ -20,6 +20,8 @@ Manila Specific Commandments
|
||||
- [M330] LOG.warning messages require translations _LW()!
|
||||
- [M331] Log messages require translations!
|
||||
- [M333] 'oslo_' should be used instead of 'oslo.'
|
||||
- [M336] Must use a dict comprehension instead of a dict constructor
|
||||
with a sequence of key-value pairs.
|
||||
|
||||
|
||||
LOG Translations
|
||||
|
@ -80,7 +80,7 @@ class Limit(object):
|
||||
60 * 60 * 24: "DAY",
|
||||
}
|
||||
|
||||
UNIT_MAP = dict([(v, k) for k, v in UNITS.items()])
|
||||
UNIT_MAP = {v: k for k, v in UNITS.items()}
|
||||
|
||||
def __init__(self, verb, uri, regex, value, unit):
|
||||
"""Initialize a new `Limit`.
|
||||
|
@ -154,9 +154,9 @@ class ShareSnapshotsController(wsgi.Controller, wsgi.AdminActionsMixin):
|
||||
'display_description',
|
||||
)
|
||||
|
||||
update_dict = dict([(key, snapshot_data[key])
|
||||
for key in valid_update_keys
|
||||
if key in snapshot_data])
|
||||
update_dict = {key: snapshot_data[key]
|
||||
for key in valid_update_keys
|
||||
if key in snapshot_data}
|
||||
|
||||
try:
|
||||
snapshot = self.share_api.get_snapshot(context, id)
|
||||
|
@ -197,9 +197,9 @@ class ShareMixin(object):
|
||||
'is_public',
|
||||
)
|
||||
|
||||
update_dict = dict([(key, share_data[key])
|
||||
for key in valid_update_keys
|
||||
if key in share_data])
|
||||
update_dict = {key: share_data[key]
|
||||
for key in valid_update_keys
|
||||
if key in share_data}
|
||||
|
||||
try:
|
||||
share = self.share_api.get(context, id)
|
||||
|
@ -63,7 +63,7 @@ class QuotaSetsMixin(object):
|
||||
|
||||
if usages:
|
||||
return values
|
||||
return dict((k, v['limit']) for k, v in values.items())
|
||||
return {k: v['limit'] for k, v in values.items()}
|
||||
|
||||
@wsgi.Controller.authorize("show")
|
||||
def _show(self, req, id):
|
||||
|
@ -50,7 +50,7 @@ class ViewBuilder(common.ViewBuilder):
|
||||
context = request.environ['manila.context']
|
||||
metadata = share.get('share_metadata')
|
||||
if metadata:
|
||||
metadata = dict((item['key'], item['value']) for item in metadata)
|
||||
metadata = {item['key']: item['value'] for item in metadata}
|
||||
else:
|
||||
metadata = {}
|
||||
|
||||
|
@ -770,7 +770,7 @@ def _get_user_quota_usages(context, session, project_id, user_id):
|
||||
models.QuotaUsage.user_id is None)).\
|
||||
with_lockmode('update').\
|
||||
all()
|
||||
return dict((row.resource, row) for row in rows)
|
||||
return {row.resource: row for row in rows}
|
||||
|
||||
|
||||
def _get_project_quota_usages(context, session, project_id):
|
||||
@ -980,8 +980,8 @@ def quota_reserve(context, resources, project_quotas, user_quotas, deltas,
|
||||
usages = project_usages
|
||||
else:
|
||||
usages = user_usages
|
||||
usages = dict((k, dict(in_use=v['in_use'], reserved=v['reserved']))
|
||||
for k, v in usages.items())
|
||||
usages = {k: dict(in_use=v['in_use'], reserved=v['reserved'])
|
||||
for k, v in usages.items()}
|
||||
raise exception.OverQuota(overs=sorted(overs), quotas=user_quotas,
|
||||
usages=usages)
|
||||
|
||||
@ -2465,7 +2465,7 @@ def driver_private_data_get(context, host, entity_id, key=None,
|
||||
query = _driver_private_data_query(session, context, host, entity_id, key)
|
||||
|
||||
if key is None or isinstance(key, list):
|
||||
return dict([(item.key, item.value) for item in query.all()])
|
||||
return {item.key: item.value for item in query.all()}
|
||||
else:
|
||||
result = query.first()
|
||||
return result["value"] if result is not None else default
|
||||
@ -2606,8 +2606,8 @@ to a single dict:
|
||||
'extra_specs' : {'k1': 'v1'}
|
||||
"""
|
||||
inst_type_dict = dict(inst_type_query)
|
||||
extra_specs = dict([(x['key'], x['value'])
|
||||
for x in inst_type_query['extra_specs']])
|
||||
extra_specs = {x['key']: x['value']
|
||||
for x in inst_type_query['extra_specs']}
|
||||
inst_type_dict['extra_specs'] = extra_specs
|
||||
return inst_type_dict
|
||||
|
||||
|
@ -698,8 +698,8 @@ class ShareServer(BASE, ManilaBase):
|
||||
|
||||
@property
|
||||
def backend_details(self):
|
||||
return dict((model['key'], model['value'])
|
||||
for model in self._backend_details)
|
||||
return {model['key']: model['value']
|
||||
for model in self._backend_details}
|
||||
|
||||
_extra_keys = ['backend_details']
|
||||
|
||||
|
@ -53,6 +53,7 @@ underscore_import_check = re.compile(r"(.)*import _(.)*")
|
||||
# We need this for cases where they have created their own _ function.
|
||||
custom_underscore_check = re.compile(r"(.)*_\s*=\s*(.)*")
|
||||
oslo_namespace_imports = re.compile(r"from[\s]*oslo[.](.*)")
|
||||
dict_constructor_with_list_copy_re = re.compile(r".*\bdict\((\[)?(\(|\[)")
|
||||
|
||||
|
||||
class BaseASTChecker(ast.NodeVisitor):
|
||||
@ -232,6 +233,14 @@ def check_oslo_namespace_imports(logical_line, physical_line, filename):
|
||||
yield(0, msg)
|
||||
|
||||
|
||||
def dict_constructor_with_list_copy(logical_line):
|
||||
msg = ("M336: Must use a dict comprehension instead of a dict constructor"
|
||||
" with a sequence of key-value pairs."
|
||||
)
|
||||
if dict_constructor_with_list_copy_re.match(logical_line):
|
||||
yield (0, msg)
|
||||
|
||||
|
||||
def factory(register):
|
||||
register(validate_log_translations)
|
||||
register(check_explicit_underscore_import)
|
||||
@ -239,3 +248,4 @@ def factory(register):
|
||||
register(CheckForStrExc)
|
||||
register(CheckForTransAdd)
|
||||
register(check_oslo_namespace_imports)
|
||||
register(dict_constructor_with_list_copy)
|
||||
|
@ -218,7 +218,7 @@ class API(object):
|
||||
|
||||
def list_extensions(self):
|
||||
extensions_list = self.client.list_extensions().get('extensions')
|
||||
return dict((ext['name'], ext) for ext in extensions_list)
|
||||
return {ext['name']: ext for ext in extensions_list}
|
||||
|
||||
def _has_port_binding_extension(self):
|
||||
if not self.extensions:
|
||||
|
@ -310,8 +310,8 @@ class DbQuotaDriver(object):
|
||||
else:
|
||||
sync_filt = lambda x: not hasattr(x, 'sync')
|
||||
desired = set(keys)
|
||||
sub_resources = dict((k, v) for k, v in resources.items()
|
||||
if k in desired and sync_filt(v))
|
||||
sub_resources = {k: v for k, v in resources.items()
|
||||
if k in desired and sync_filt(v)}
|
||||
|
||||
# Make sure we accounted for all of them...
|
||||
if len(keys) != len(sub_resources):
|
||||
@ -330,7 +330,7 @@ class DbQuotaDriver(object):
|
||||
context.quota_class,
|
||||
usages=False)
|
||||
|
||||
return dict((k, v['limit']) for k, v in quotas.items())
|
||||
return {k: v['limit'] for k, v in quotas.items()}
|
||||
|
||||
def limit_check(self, context, resources, values, project_id=None,
|
||||
user_id=None):
|
||||
|
@ -1173,8 +1173,8 @@ class ShareManager(manager.SchedulerDependentManager):
|
||||
"""Get info about relationships between pools and share_servers."""
|
||||
share_servers = self.db.share_server_get_all_by_host(context,
|
||||
self.host)
|
||||
return dict((server['id'], self.driver.get_share_server_pools(server))
|
||||
for server in share_servers)
|
||||
return {server['id']: self.driver.get_share_server_pools(server)
|
||||
for server in share_servers}
|
||||
|
||||
@add_hooks
|
||||
@utils.require_driver_initialized
|
||||
|
@ -344,8 +344,8 @@ class TestCase(base_test.BaseTestCase):
|
||||
def _dict_from_object(self, obj, ignored_keys):
|
||||
if ignored_keys is None:
|
||||
ignored_keys = []
|
||||
return dict([(k, v) for k, v in obj.iteritems()
|
||||
if k not in ignored_keys])
|
||||
return {k: v for k, v in obj.iteritems()
|
||||
if k not in ignored_keys}
|
||||
|
||||
def _assertEqualListsOfObjects(self, objs1, objs2, ignored_keys=None):
|
||||
obj_to_dict = lambda o: self._dict_from_object(o, ignored_keys)
|
||||
|
@ -135,7 +135,7 @@ def create_arg_list(key_names):
|
||||
|
||||
|
||||
def create_arg_dict(key_names):
|
||||
return dict((key, fake_storage_data[key]) for key in key_names)
|
||||
return {key: fake_storage_data[key] for key in key_names}
|
||||
|
||||
|
||||
@ddt.ddt
|
||||
|
@ -200,3 +200,31 @@ class HackingTestCase(test.TestCase):
|
||||
"""
|
||||
errors = []
|
||||
self._assert_has_errors(code, checker, expected_errors=errors)
|
||||
|
||||
def test_dict_constructor_with_list_copy(self):
|
||||
self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
|
||||
" dict([(i, connect_info[i])"))))
|
||||
|
||||
self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
|
||||
" attrs = dict([(k, _from_json(v))"))))
|
||||
|
||||
self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
|
||||
" type_names = dict((value, key) for key, value in"))))
|
||||
|
||||
self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
|
||||
" dict((value, key) for key, value in"))))
|
||||
|
||||
self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
|
||||
"foo(param=dict((k, v) for k, v in bar.items()))"))))
|
||||
|
||||
self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
|
||||
" dict([[i,i] for i in range(3)])"))))
|
||||
|
||||
self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
|
||||
" dd = dict([i,i] for i in range(3))"))))
|
||||
|
||||
self.assertEqual(0, len(list(checks.dict_constructor_with_list_copy(
|
||||
" create_kwargs = dict(snapshot=snapshot,"))))
|
||||
|
||||
self.assertEqual(0, len(list(checks.dict_constructor_with_list_copy(
|
||||
" self._render_dict(xml, data_el, data.__dict__)"))))
|
||||
|
@ -952,8 +952,8 @@ class DbQuotaDriverTestCase(test.TestCase):
|
||||
quota_class=None, defaults=True,
|
||||
usages=True):
|
||||
self.calls.append('get_project_quotas')
|
||||
return dict((k, dict(limit=v.default))
|
||||
for k, v in resources.items())
|
||||
return {k: dict(limit=v.default)
|
||||
for k, v in resources.items()}
|
||||
|
||||
self.mock_object(self.driver, 'get_project_quotas',
|
||||
fake_get_project_quotas)
|
||||
|
Loading…
Reference in New Issue
Block a user