Leverage dict comprehension in PEP-0274
PEP-0274 introduced dict comprehensions to replace dict constructor with a sqeuence of key-pairs[1], these are benefits: First, it makes the code look neater. Second, it gains a micro-optimization. Cinder dropped python 2.6 support in Kilo, we can leverage this now. Note: This commit doesn't handle dict constructor with kwargs. This commit also adds a hacking rule. [1]http://legacy.python.org/dev/peps/pep-0274/ Change-Id: Ie65796438160b1aa1f47c8519fb9943e81bff3e9
This commit is contained in:
parent
c2db466995
commit
5800f25f7e
|
@ -17,6 +17,7 @@ Cinder Specific Commandments
|
|||
- [N329] LOG.exception and LOG.error messages require translations `_LE()`.
|
||||
- [N330] LOG.warning messages require translations `_LW()`.
|
||||
- [N333] Ensure that oslo namespaces are used for namespaced libraries.
|
||||
- [N336] Must use a dict comprehension instead of a dict constructor with a sequence of key-value pairs.
|
||||
- [C301] timeutils.utcnow() from oslo_utils should be used instead of datetime.now().
|
||||
- [C302] six.text_type should be used instead of unicode
|
||||
- [C303] Ensure that there are no 'print()' statements in code that is being committed.
|
||||
|
@ -25,7 +26,6 @@ Cinder Specific Commandments
|
|||
- [C306] timeutils.strtime() must not be used (deprecated).
|
||||
- [C307] LOG.warn is deprecated. Enforce use of LOG.warning.
|
||||
|
||||
|
||||
General
|
||||
-------
|
||||
- Use 'raise' instead of 'raise e' to preserve original traceback or exception being reraised::
|
||||
|
|
|
@ -76,7 +76,7 @@ class QuotaSetsController(wsgi.Controller):
|
|||
if usages:
|
||||
return values
|
||||
else:
|
||||
return dict((k, v['limit']) for k, v in values.items())
|
||||
return {k: v['limit'] for k, v in values.items()}
|
||||
|
||||
@wsgi.serializers(xml=QuotaTemplate)
|
||||
def show(self, req, id):
|
||||
|
|
|
@ -84,7 +84,7 @@ class LimitsController(wsgi.Controller):
|
|||
context = req.environ['cinder.context']
|
||||
quotas = QUOTAS.get_project_quotas(context, context.project_id,
|
||||
usages=False)
|
||||
abs_limits = dict((k, v['limit']) for k, v in quotas.items())
|
||||
abs_limits = {k: v['limit'] for k, v in quotas.items()}
|
||||
rate_limits = req.environ.get("cinder.limits", [])
|
||||
|
||||
builder = self._get_view_builder(req)
|
||||
|
@ -108,7 +108,7 @@ class Limit(object):
|
|||
60 * 60 * 24: "DAY",
|
||||
}
|
||||
|
||||
UNIT_MAP = dict([(v, k) for k, v in UNITS.items()])
|
||||
UNIT_MAP = {v: k for k, v in UNITS.items()}
|
||||
|
||||
def __init__(self, verb, uri, regex, value, unit):
|
||||
"""Initialize a new `Limit`.
|
||||
|
|
|
@ -120,7 +120,7 @@ def _translate_volume_summary_view(context, vol, image_id=None):
|
|||
|
||||
if vol.get('volume_metadata'):
|
||||
metadata = vol.get('volume_metadata')
|
||||
d['metadata'] = dict((item['key'], item['value']) for item in metadata)
|
||||
d['metadata'] = {item['key']: item['value'] for item in metadata}
|
||||
# avoid circular ref when vol is a Volume instance
|
||||
elif vol.get('metadata') and isinstance(vol.get('metadata'), dict):
|
||||
d['metadata'] = vol['metadata']
|
||||
|
|
|
@ -84,7 +84,7 @@ class LimitsController(wsgi.Controller):
|
|||
context = req.environ['cinder.context']
|
||||
quotas = QUOTAS.get_project_quotas(context, context.project_id,
|
||||
usages=False)
|
||||
abs_limits = dict((k, v['limit']) for k, v in quotas.items())
|
||||
abs_limits = {k: v['limit'] for k, v in quotas.items()}
|
||||
rate_limits = req.environ.get("cinder.limits", [])
|
||||
|
||||
builder = self._get_view_builder(req)
|
||||
|
@ -108,7 +108,7 @@ class Limit(object):
|
|||
60 * 60 * 24: "DAY",
|
||||
}
|
||||
|
||||
UNIT_MAP = dict([(v, k) for k, v in UNITS.items()])
|
||||
UNIT_MAP = {v: k for k, v in UNITS.items()}
|
||||
|
||||
def __init__(self, verb, uri, regex, value, unit):
|
||||
"""Initialize a new `Limit`.
|
||||
|
|
|
@ -105,7 +105,7 @@ class ViewBuilder(common.ViewBuilder):
|
|||
"""Retrieve the metadata of the volume object."""
|
||||
if volume.get('volume_metadata'):
|
||||
metadata = volume.get('volume_metadata')
|
||||
return dict((item['key'], item['value']) for item in metadata)
|
||||
return {item['key']: item['value'] for item in metadata}
|
||||
# avoid circular ref when vol is a Volume instance
|
||||
elif volume.get('metadata') and isinstance(volume.get('metadata'),
|
||||
dict):
|
||||
|
|
|
@ -470,8 +470,8 @@ def _dict_with_extra_specs(inst_type_query):
|
|||
'extra_specs' : {'k1': 'v1'}
|
||||
"""
|
||||
inst_type_dict = dict(inst_type_query)
|
||||
extra_specs = dict([(x['key'], x['value'])
|
||||
for x in inst_type_query['extra_specs']])
|
||||
extra_specs = {x['key']: x['value']
|
||||
for x in inst_type_query['extra_specs']}
|
||||
inst_type_dict['extra_specs'] = extra_specs
|
||||
return inst_type_dict
|
||||
|
||||
|
@ -742,7 +742,7 @@ def _get_quota_usages(context, session, project_id):
|
|||
filter_by(project_id=project_id).\
|
||||
with_lockmode('update').\
|
||||
all()
|
||||
return dict((row.resource, row) for row in rows)
|
||||
return {row.resource: row for row in rows}
|
||||
|
||||
|
||||
@require_context
|
||||
|
@ -877,8 +877,8 @@ def quota_reserve(context, resources, quotas, deltas, expire,
|
|||
LOG.warning(_LW("Change will make usage less than 0 for the following "
|
||||
"resources: %s"), unders)
|
||||
if overs:
|
||||
usages = dict((k, dict(in_use=v['in_use'], reserved=v['reserved']))
|
||||
for k, v in usages.items())
|
||||
usages = {k: dict(in_use=v['in_use'], reserved=v['reserved'])
|
||||
for k, v in usages.items()}
|
||||
raise exception.OverQuota(overs=sorted(overs), quotas=quotas,
|
||||
usages=usages)
|
||||
|
||||
|
|
|
@ -43,6 +43,7 @@ underscore_import_check = re.compile(r"(.)*i18n\s+import\s+_(.)*")
|
|||
custom_underscore_check = re.compile(r"(.)*_\s*=\s*(.)*")
|
||||
no_audit_log = re.compile(r"(.)*LOG\.audit(.)*")
|
||||
no_print_statements = re.compile(r"\s*print\s*\(.+\).*")
|
||||
dict_constructor_with_list_copy_re = re.compile(r".*\bdict\((\[)?(\(|\[)")
|
||||
|
||||
# NOTE(jsbryant): When other oslo libraries switch over non-namespaced
|
||||
# imports, we will need to add them to the regex below.
|
||||
|
@ -303,6 +304,13 @@ def no_log_warn(logical_line):
|
|||
yield (0, msg)
|
||||
|
||||
|
||||
def dict_constructor_with_list_copy(logical_line):
|
||||
msg = ("N336: Must use a dict comprehension instead of a dict constructor "
|
||||
"with a sequence of key-value pairs.")
|
||||
if dict_constructor_with_list_copy_re.match(logical_line):
|
||||
yield (0, msg)
|
||||
|
||||
|
||||
def factory(register):
|
||||
register(no_vi_headers)
|
||||
register(no_translate_debug_logs)
|
||||
|
@ -319,3 +327,4 @@ def factory(register):
|
|||
register(check_no_log_audit)
|
||||
register(check_no_contextlib_nested)
|
||||
register(no_log_warn)
|
||||
register(dict_constructor_with_list_copy)
|
||||
|
|
|
@ -244,8 +244,8 @@ class DbQuotaDriver(object):
|
|||
else:
|
||||
sync_filt = lambda x: not hasattr(x, 'sync')
|
||||
desired = set(keys)
|
||||
sub_resources = dict((k, v) for k, v in resources.items()
|
||||
if k in desired and sync_filt(v))
|
||||
sub_resources = {k: v for k, v in resources.items()
|
||||
if k in desired and sync_filt(v)}
|
||||
|
||||
# Make sure we accounted for all of them...
|
||||
if len(keys) != len(sub_resources):
|
||||
|
@ -257,7 +257,7 @@ class DbQuotaDriver(object):
|
|||
project_id,
|
||||
context.quota_class, usages=False)
|
||||
|
||||
return dict((k, v['limit']) for k, v in quotas.items())
|
||||
return {k: v['limit'] for k, v in quotas.items()}
|
||||
|
||||
def limit_check(self, context, resources, values, project_id=None):
|
||||
"""Check simple quota limits.
|
||||
|
|
|
@ -55,8 +55,7 @@ class BaseLimitTestSuite(test.TestCase):
|
|||
self.absolute_limits = {}
|
||||
|
||||
def stub_get_project_quotas(context, project_id, usages=True):
|
||||
return dict((k, dict(limit=v))
|
||||
for k, v in self.absolute_limits.items())
|
||||
return {k: dict(limit=v) for k, v in self.absolute_limits.items()}
|
||||
|
||||
self.stubs.Set(cinder.quota.QUOTAS, "get_project_quotas",
|
||||
stub_get_project_quotas)
|
||||
|
|
|
@ -55,8 +55,7 @@ class BaseLimitTestSuite(test.TestCase):
|
|||
self.absolute_limits = {}
|
||||
|
||||
def stub_get_project_quotas(context, project_id, usages=True):
|
||||
return dict((k, dict(limit=v))
|
||||
for k, v in self.absolute_limits.items())
|
||||
return {k: dict(limit=v) for k, v in self.absolute_limits.items()}
|
||||
|
||||
self.stubs.Set(cinder.quota.QUOTAS, "get_project_quotas",
|
||||
stub_get_project_quotas)
|
||||
|
|
|
@ -63,8 +63,8 @@ class ModelsObjectComparatorMixin(object):
|
|||
def _dict_from_object(self, obj, ignored_keys):
|
||||
if ignored_keys is None:
|
||||
ignored_keys = []
|
||||
return dict([(k, v) for k, v in obj.iteritems()
|
||||
if k not in ignored_keys])
|
||||
return {k: v for k, v in obj.iteritems()
|
||||
if k not in ignored_keys}
|
||||
|
||||
def _assertEqualObjects(self, obj1, obj2, ignored_keys=None):
|
||||
obj1 = self._dict_from_object(obj1, ignored_keys)
|
||||
|
@ -537,8 +537,8 @@ class DBAPIVolumeTestCase(BaseTest):
|
|||
# metadata is a dict, compare the 'key' and 'value' of each
|
||||
if key == 'volume_metadata':
|
||||
self.assertEqual(len(val1), len(val2))
|
||||
val1_dict = dict((x.key, x.value) for x in val1)
|
||||
val2_dict = dict((x.key, x.value) for x in val2)
|
||||
val1_dict = {x.key: x.value for x in val1}
|
||||
val2_dict = {x.key: x.value for x in val2}
|
||||
self.assertDictMatch(val1_dict, val2_dict)
|
||||
else:
|
||||
self.assertEqual(val1, val2)
|
||||
|
@ -1104,7 +1104,7 @@ class DBAPIEncryptionTestCase(BaseTest):
|
|||
step = str(step)
|
||||
return val + step
|
||||
|
||||
return [dict([(k, compose(v, i)) for k, v in values.items()])
|
||||
return [{k: compose(v, i) for k, v in values.items()}
|
||||
for i in range(1, 4)]
|
||||
|
||||
def test_volume_type_encryption_create(self):
|
||||
|
@ -1504,7 +1504,7 @@ class DBAPIBackupTestCase(BaseTest):
|
|||
step = str(step)
|
||||
return val + step
|
||||
|
||||
return [dict([(k, compose(v, i)) for k, v in base_values.items()])
|
||||
return [{k: compose(v, i) for k, v in base_values.items()}
|
||||
for i in range(1, 4)]
|
||||
|
||||
def test_backup_create(self):
|
||||
|
|
|
@ -650,21 +650,16 @@ class GPFSDriverTestCase(test.TestCase):
|
|||
@mock.patch('cinder.volume.drivers.ibm.gpfs.GPFSDriver.'
|
||||
'_gpfs_change_attributes')
|
||||
def test_set_volume_attributes(self, mock_change_attributes, mock_mkfs):
|
||||
metadata = [dict([('key', 'data_pool_name'), ('value', 'test')]),
|
||||
dict([('key', 'replicas'), ('value', 'test')]),
|
||||
dict([('key', 'dio'), ('value', 'test')]),
|
||||
dict([('key', 'write_affinity_depth'), ('value', 'test')]),
|
||||
dict([('key', 'block_group_factor'), ('value', 'test')]),
|
||||
dict([('key', 'write_affinity_failure_group'),
|
||||
('value', 'test')]),
|
||||
dict([('key', 'test'),
|
||||
('value', 'test')]),
|
||||
dict([('key', 'fstype'),
|
||||
('value', 'test')]),
|
||||
dict([('key', 'fslabel'),
|
||||
('value', 'test')]),
|
||||
dict([('key', 'test'),
|
||||
('value', 'test')])]
|
||||
metadata = [{'key': 'data_pool_name', 'value': 'test'},
|
||||
{'key': 'replicas', 'value': 'test'},
|
||||
{'key': 'dio', 'value': 'test'},
|
||||
{'key': 'write_affinity_depth', 'value': 'test'},
|
||||
{'key': 'block_group_factor', 'value': 'test'},
|
||||
{'key': 'write_affinity_failure_group', 'value': 'test'},
|
||||
{'key': 'test', 'value': 'test'},
|
||||
{'key': 'fstype', 'value': 'test'},
|
||||
{'key': 'fslabel', 'value': 'test'},
|
||||
{'key': 'test', 'value': 'test'}]
|
||||
|
||||
self.driver._set_volume_attributes('', '', metadata)
|
||||
|
||||
|
|
|
@ -325,3 +325,34 @@ class HackingTestCase(test.TestCase):
|
|||
self.assertEqual(1, len(list(checks.check_no_print_statements(
|
||||
"print ('My print with space')",
|
||||
"cinder/volume/anotherFile.py", False))))
|
||||
|
||||
def test_dict_constructor_with_list_copy(self):
|
||||
self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
|
||||
" dict([(i, connect_info[i])"))))
|
||||
|
||||
self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
|
||||
" attrs = dict([(k, _from_json(v))"))))
|
||||
|
||||
self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
|
||||
" type_names = dict((value, key) for key, value in"))))
|
||||
|
||||
self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
|
||||
" dict((value, key) for key, value in"))))
|
||||
|
||||
self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
|
||||
"foo(param=dict((k, v) for k, v in bar.items()))"))))
|
||||
|
||||
self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
|
||||
" dict([[i,i] for i in range(3)])"))))
|
||||
|
||||
self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
|
||||
" dd = dict([i,i] for i in range(3))"))))
|
||||
|
||||
self.assertEqual(0, len(list(checks.dict_constructor_with_list_copy(
|
||||
" dict()"))))
|
||||
|
||||
self.assertEqual(0, len(list(checks.dict_constructor_with_list_copy(
|
||||
" create_kwargs = dict(snapshot=snapshot,"))))
|
||||
|
||||
self.assertEqual(0, len(list(checks.dict_constructor_with_list_copy(
|
||||
" self._render_dict(xml, data_el, data.__dict__)"))))
|
||||
|
|
|
@ -1045,8 +1045,7 @@ class DbQuotaDriverTestCase(test.TestCase):
|
|||
quota_class=None, defaults=True,
|
||||
usages=True):
|
||||
self.calls.append('get_project_quotas')
|
||||
return dict((k, dict(limit=v.default))
|
||||
for k, v in resources.items())
|
||||
return {k: dict(limit=v.default) for k, v in resources.items()}
|
||||
|
||||
self.stubs.Set(self.driver, 'get_project_quotas',
|
||||
fake_get_project_quotas)
|
||||
|
|
|
@ -3108,8 +3108,8 @@ class VolumeTestCase(BaseVolumeTestCase):
|
|||
# ensure that volume's glance metadata is copied
|
||||
# to snapshot's glance metadata
|
||||
self.assertEqual(len(vol_glance_meta), len(snap_glance_meta))
|
||||
vol_glance_dict = dict((x.key, x.value) for x in vol_glance_meta)
|
||||
snap_glance_dict = dict((x.key, x.value) for x in snap_glance_meta)
|
||||
vol_glance_dict = {x.key: x.value for x in vol_glance_meta}
|
||||
snap_glance_dict = {x.key: x.value for x in snap_glance_meta}
|
||||
self.assertDictMatch(vol_glance_dict, snap_glance_dict)
|
||||
|
||||
# ensure that snapshot's status is changed to 'available'
|
||||
|
|
|
@ -148,7 +148,7 @@ class VolumeGlanceMetadataTestCase(test.TestCase):
|
|||
db.volume_glance_metadata_copy_to_volume(self.ctxt, vol2['id'],
|
||||
snapshot['id'])
|
||||
metadata = db.volume_glance_metadata_get(self.ctxt, vol2['id'])
|
||||
metadata = dict([(m['key'], m['value']) for m in metadata])
|
||||
metadata = {m['key']: m['value'] for m in metadata}
|
||||
self.assertEqual(metadata, {'m1': 'v1'})
|
||||
|
||||
def test_volume_snapshot_glance_metadata_get_nonexistent(self):
|
||||
|
|
|
@ -1106,9 +1106,7 @@ class API(base.Base):
|
|||
db_data = self.db.volume_glance_metadata_get(context, volume['id'])
|
||||
LOG.info(_LI("Get volume image-metadata completed successfully."),
|
||||
resource=volume)
|
||||
return dict(
|
||||
(meta_entry.key, meta_entry.value) for meta_entry in db_data
|
||||
)
|
||||
return {meta_entry.key: meta_entry.value for meta_entry in db_data}
|
||||
|
||||
def _check_volume_availability(self, volume, force):
|
||||
"""Check if the volume can be used."""
|
||||
|
@ -1141,11 +1139,10 @@ class API(base.Base):
|
|||
custom_property_set = (set(volume_image_metadata).difference
|
||||
(set(glance_core_properties)))
|
||||
if custom_property_set:
|
||||
metadata.update(dict(properties=dict((custom_property,
|
||||
volume_image_metadata
|
||||
[custom_property])
|
||||
for custom_property
|
||||
in custom_property_set)))
|
||||
properties = {custom_property:
|
||||
volume_image_metadata[custom_property]
|
||||
for custom_property in custom_property_set}
|
||||
metadata.update(dict(properties=properties))
|
||||
except exception.GlanceMetadataNotFound:
|
||||
# If volume is not created from image, No glance metadata
|
||||
# would be available for that volume in
|
||||
|
|
|
@ -164,8 +164,8 @@ class FlashSystemDriver(san.SanDriver):
|
|||
|
||||
host_name = connector['host']
|
||||
if isinstance(host_name, six.text_type):
|
||||
unicode_host_name_filter = dict((ord(six.text_type(char)), u'-')
|
||||
for char in invalid_ch_in_host)
|
||||
unicode_host_name_filter = {ord(six.text_type(char)): u'-'
|
||||
for char in invalid_ch_in_host}
|
||||
host_name = host_name.translate(unicode_host_name_filter)
|
||||
elif isinstance(host_name, str):
|
||||
string_host_name_filter = string.maketrans(
|
||||
|
@ -459,7 +459,7 @@ class FlashSystemDriver(san.SanDriver):
|
|||
(_('_get_hdr_dic: attribute headers and values do not match.\n '
|
||||
'Headers: %(header)s\n Values: %(row)s.')
|
||||
% {'header': six.text_type(header), 'row': six.text_type(row)}))
|
||||
dic = dict((a, v) for a, v in map(None, attributes, values))
|
||||
dic = {a: v for a, v in map(None, attributes, values)}
|
||||
return dic
|
||||
|
||||
def _get_conn_fc_wwpns(self):
|
||||
|
|
Loading…
Reference in New Issue