Remove usage of dict.iteritems()

Replace dict.iteritems() with dict.items()
Make congress python-3 compatible

Change-Id: Id59c296a655cb9570bbd0d8915014230888b7088
Partially-Implements: blueprint support-python3
This commit is contained in:
VeenaSL 2015-07-22 20:04:46 +05:30
parent d272627e52
commit e9373f1c81
11 changed files with 31 additions and 31 deletions

View File

@ -178,7 +178,7 @@ class Debug(Middleware):
resp = req.get_response(self.application)
print(('*' * 40) + ' RESPONSE HEADERS')
for (key, value) in resp.headers.iteritems():
for (key, value) in resp.headers.items():
print(key, '=', value)
print()

View File

@ -489,7 +489,7 @@ class PulpLpLang(LpLang):
prob += c
# invert values
return prob, {value: key for key, value in values.iteritems()}
return prob, {value: key for key, value in values.items()}
def pulpify(self, expr, variables, values):
"""Return PuLP version of expr.

View File

@ -77,7 +77,7 @@ class BiUnifier(object):
# each value is a Value
self.contents = {}
if dictionary is not None:
for var, value in dictionary.iteritems():
for var, value in dictionary.items():
self.add(var, value, None)
def add(self, var, value, unifier):
@ -144,7 +144,7 @@ class BiUnifier(object):
s = repr(self)
s += "={"
s += ",".join(["{}:{}".format(str(var), str(val))
for var, val in self.contents.iteritems()])
for var, val in self.contents.items()])
s += "}"
return s
@ -152,7 +152,7 @@ class BiUnifier(object):
s = repr(self)
s += "={"
s += ",".join(["{}:{}".format(var, val.recur_str())
for var, val in self.contents.iteritems()])
for var, val in self.contents.items()])
s += "}"
return s
@ -164,7 +164,7 @@ def binding_str(binding):
"""Handles string conversion of either dictionary or Unifier."""
if isinstance(binding, dict):
s = ",".join(["{}: {}".format(str(var), str(val))
for var, val in binding.iteritems()])
for var, val in binding.items()])
return '{' + s + '}'
else:
return str(binding)

View File

@ -249,7 +249,7 @@ class Graph(object):
node_obj = self.nodes[node]
begin = node_obj.begin
end = node_obj.end
return set([n for n, dfs_obj in self.nodes.iteritems()
return set([n for n, dfs_obj in self.nodes.items()
if begin <= dfs_obj.begin and dfs_obj.end <= end])
def next_counter(self):

View File

@ -159,7 +159,7 @@ class MuranoDriver(datasource_driver.DataSourceDriver,
self.state[self.STATES].add((env.id, env.status))
parent_types = self._get_parent_types(env_type)
self._add_parent_types(env.id, parent_types)
for key, value in env.to_dict().iteritems():
for key, value in env.to_dict().items():
if key in self.UNUSED_ENV_PROPERTIES:
continue
self._add_properties(env.id, key, value)
@ -196,7 +196,7 @@ class MuranoDriver(datasource_driver.DataSourceDriver,
s_id = s_dict['?']['id']
s_type = s_dict['?']['type']
self.state[self.OBJECTS].add((s_id, env_id, s_type))
for key, value in s_dict.iteritems():
for key, value in s_dict.items():
if key in ['instance', '?']:
continue
self._add_properties(s_id, key, value)
@ -215,7 +215,7 @@ class MuranoDriver(datasource_driver.DataSourceDriver,
si_type = si_dict['?']['type']
self.state[self.OBJECTS].add((si_id, s_id, si_type))
for key, value in si_dict.iteritems():
for key, value in si_dict.items():
if key in ['?']:
continue
self._add_properties(si_id, key, value)
@ -243,7 +243,7 @@ class MuranoDriver(datasource_driver.DataSourceDriver,
o_actions = obj_dict['?']['_actions']
if not o_actions:
return
for action_id, action_value in o_actions.iteritems():
for action_id, action_value in o_actions.items():
action_name = action_value.get('name', '')
enabled = action_value.get('enabled', False)
action = (obj_id, action_id, action_name, enabled)
@ -285,7 +285,7 @@ class MuranoDriver(datasource_driver.DataSourceDriver,
parent_types = self._get_parent_types(net_type)
self._add_parent_types(net_id, parent_types)
for key, value in default_networks['environment'].iteritems():
for key, value in default_networks['environment'].items():
if key in ['?']:
continue
self._add_properties(net_id, key, value)
@ -293,7 +293,7 @@ class MuranoDriver(datasource_driver.DataSourceDriver,
if not net_id:
continue
self._add_relationships(env_id, 'defaultNetworks', net_id)
for key, value in default_networks.iteritems():
for key, value in default_networks.items():
if key in ['environment']:
# data from environment already populated
continue
@ -323,7 +323,7 @@ class MuranoDriver(datasource_driver.DataSourceDriver,
pkg_type = 'io.murano.Application'
self.state[self.OBJECTS].add((pkg.id, pkg.owner_id, pkg_type))
for key, value in pkg.to_dict().iteritems():
for key, value in pkg.to_dict().items():
if key in self.UNUSED_PKG_PROPERTIES:
continue
self._add_properties(pkg.id, key, value)
@ -340,7 +340,7 @@ class MuranoDriver(datasource_driver.DataSourceDriver,
if value is None or value == '':
return
if isinstance(value, dict):
for k, v in value.iteritems():
for k, v in value.items():
new_key = key + "." + k
self._add_properties(obj_id, new_key, v)
elif isinstance(value, list):

View File

@ -68,7 +68,7 @@ class CongressException(Exception):
# kwargs doesn't match a variable in the message
# log the issue and the kwargs
LOG.exception(_('Exception in string format operation'))
for name, value in kwargs.iteritems():
for name, value in kwargs.items():
LOG.error("%s: %s", name, value) # noqa
if CONF.fatal_exception_format_errors:

View File

@ -218,7 +218,7 @@ class DataSourceManager(object):
# check that all the required options are passed in
required_options = set(
[k for k, v in loaded_driver['config'].iteritems()
[k for k, v in loaded_driver['config'].items()
if v == constants.REQUIRED])
missing_options = required_options - specified_options
if missing_options:

View File

@ -185,7 +185,7 @@ class TriggerRegistry(object):
"""Build string representation of self.index; useful for debugging."""
s = '{'
s += ";".join(["%s -> %s" % (key, ",".join(str(x) for x in value))
for key, value in self.index.iteritems()])
for key, value in self.index.items()])
s += '}'
return s
@ -434,7 +434,7 @@ class Runtime (object):
# rerun the trigger queries to check for changes
table_data_new = self._compute_table_contents(table_triggers)
# run triggers if tables changed
for table, triggers in table_triggers.iteritems():
for table, triggers in table_triggers.items():
if table_data_old[table] != table_data_new[table]:
for trigger in triggers:
trigger.callback(table,
@ -657,7 +657,7 @@ class Runtime (object):
return (True, [])
# check that the updates would not cause an error
by_theory = self.group_events_by_target(events)
for th, th_events in by_theory.iteritems():
for th, th_events in by_theory.items():
th_obj = self.get_target(th)
errors.extend(th_obj.update_would_cause_errors(th_events))
# update dependency graph (and undo it if errors)
@ -685,12 +685,12 @@ class Runtime (object):
table_data_old = self._compute_table_contents(table_triggers)
# actually apply the updates
changes = []
for th, th_events in by_theory.iteritems():
for th, th_events in by_theory.items():
changes.extend(self.get_target(th).update(events))
# rerun the trigger queries to check for changes
table_data_new = self._compute_table_contents(table_triggers)
# run triggers if tables changed
for table, triggers in table_triggers.iteritems():
for table, triggers in table_triggers.items():
if table_data_old[table] != table_data_new[table]:
for trigger in triggers:
trigger.callback(table,
@ -1378,7 +1378,7 @@ class DseRuntime (Runtime, deepsix.deepSix):
if 'named' in action_args:
named_args = ", ".join(
"%s=%s" % (key, val)
for key, val in action_args['named'].iteritems())
for key, val in action_args['named'].items())
delimit = ''
if pos_args and named_args:
delimit = ', '

View File

@ -459,7 +459,7 @@ class ComputePlacementEngine(PolicyEngineDriver):
definitions[head].add(conjunction)
equalities = [self.lplang.makeEqual(h, self.lplang.makeOr(*bodies))
for h, bodies in definitions.iteritems()]
for h, bodies in definitions.items()]
return equalities, definitions.keys()
def _extract_lp_variable_equalities(self, rule, rewrite_theory):

View File

@ -80,7 +80,7 @@ class TestDataSourceManager(base.SqlTestCase):
# let driver generate this for us.
del req['id']
result = self.datasource_mgr.add_datasource(req)
for key, value in req.iteritems():
for key, value in req.items():
self.assertEqual(value, result[key])
# TODO(thinrichs): test that ensure the DB, the policy engine,
# and the datasource manager are all in sync
@ -96,7 +96,7 @@ class TestDataSourceManager(base.SqlTestCase):
del req['id']
result = self.datasource_mgr.add_datasource(req)
result = self.datasource_mgr.get_datasource(result['id'])
for key, value in req.iteritems():
for key, value in req.items():
self.assertEqual(value, result[key])
def test_get_datasources(self):
@ -115,11 +115,11 @@ class TestDataSourceManager(base.SqlTestCase):
result = self.datasource_mgr.get_datasources()
req['name'] = 'datasource1'
for key, value in req.iteritems():
for key, value in req.items():
self.assertEqual(value, result[0][key])
req['name'] = 'datasource2'
for key, value in req.iteritems():
for key, value in req.items():
self.assertEqual(value, result[1][key])
def test_get_datasources_hide_secret(self):
@ -141,11 +141,11 @@ class TestDataSourceManager(base.SqlTestCase):
result = self.datasource_mgr.get_datasources(filter_secret=True)
req['name'] = 'datasource1'
for key, value in req.iteritems():
for key, value in req.items():
self.assertEqual(value, result[0][key])
req['name'] = 'datasource2'
for key, value in req.iteritems():
for key, value in req.items():
self.assertEqual(value, result[1][key])
def test_create_datasource_duplicate_name(self):

View File

@ -55,7 +55,7 @@ class RoleBasedPolicyFixture(fixtures.Fixture):
policy = json.load(open(CONF.oslo_policy.policy_file))
# Convert all actions to require specified role
for action, rule in policy.iteritems():
for action, rule in policy.items():
policy[action] = 'role:%s' % self.role
self.policy_dir = self.useFixture(fixtures.TempDir())