Python 3: generalize the usage of the six module

* Replace itertools.ifilter() with six.moves.filter()
* Replace itertools.imap() with six.moves.map()
* Replace map(_compare, statistics)
  with [_compare(statistic) for statistic in statistics]
* Replace obj.iterkeys() with six.iterkeys(obj)
* Replace obj.iteritems() with six.iteritems(obj)
* Replace xrange() with six.moves.xrange(), or with range() for small
  ranges
* Replace the repr module with six.moves.reprlib

Change-Id: Iaaa328cc15355182bde444a1aeaa4385691c8f90
This commit is contained in:
Victor Stinner 2015-05-15 18:16:21 +02:00
parent 253a6288e8
commit de9c4891e7
11 changed files with 24 additions and 20 deletions

View File

@ -26,6 +26,7 @@ import random
from oslo_config import cfg
from oslo_context import context
import six
from six import moves
from six.moves.urllib import parse as urlparse
from stevedore import extension
@ -214,7 +215,7 @@ class AgentManager(os_service.Service):
extensions = (self._extensions('poll', namespace).extensions
for namespace in namespaces)
if pollster_list:
extensions = (itertools.ifilter(_match, exts)
extensions = (moves.filter(_match, exts)
for exts in extensions)
self.extensions = list(itertools.chain(*list(extensions)))

View File

@ -16,7 +16,7 @@
# under the License.
import itertools
from six import moves
from ceilometer.alarm import evaluator
from ceilometer.i18n import _
@ -106,8 +106,7 @@ class CombinationEvaluator(evaluator.Evaluator):
return
states = zip(alarm.rule['alarm_ids'],
itertools.imap(self._get_alarm_state,
alarm.rule['alarm_ids']))
moves.map(self._get_alarm_state, alarm.rule['alarm_ids']))
if self._sufficient_states(alarm, states):
self._transition(alarm, states)

View File

@ -201,4 +201,4 @@ class ThresholdEvaluator(evaluator.Evaluator):
self._transition(alarm,
statistics,
map(_compare, statistics))
[_compare(statistic) for statistic in statistics])

View File

@ -18,6 +18,7 @@
from oslo_config import cfg
from oslo_utils import units
from oslo_vmware import api
import six
from ceilometer.compute.virt import inspector as virt_inspector
from ceilometer.compute.virt.vmware import vsphere_operations
@ -129,7 +130,7 @@ class VsphereInspector(virt_inspector.Inspector):
vnic_id_to_stats_map = self._ops.query_vm_device_stats(
vm_moid, net_counter_id, duration)
vnic_stats[net_counter] = vnic_id_to_stats_map
vnic_ids.update(vnic_id_to_stats_map.iterkeys())
vnic_ids.update(six.iterkeys(vnic_id_to_stats_map))
# Stats provided from vSphere are in KB/s, converting it to B/s.
for vnic_id in vnic_ids:
@ -180,7 +181,7 @@ class VsphereInspector(virt_inspector.Inspector):
disk_id_to_stat_map = self._ops.query_vm_device_stats(
vm_moid, disk_counter_id, duration)
disk_stats[disk_counter] = disk_id_to_stat_map
disk_ids.update(disk_id_to_stat_map.iterkeys())
disk_ids.update(six.iterkeys(disk_id_to_stat_map))
for disk_id in disk_ids:

View File

@ -21,6 +21,8 @@
from pysnmp.entity.rfc3413.oneliner import cmdgen
import six
from ceilometer.hardware.inspector import base
@ -347,7 +349,7 @@ class SNMPInspector(base.Inspector):
@classmethod
def construct_metadata(cls, oid_cache, meta_defs, suffix=''):
metadata = {}
for key, oid_def in meta_defs.iteritems():
for key, oid_def in six.iteritems(meta_defs):
metadata[key] = cls.get_oid_value(oid_cache, oid_def, suffix)
return metadata
@ -423,7 +425,7 @@ class SNMPInspector(base.Inspector):
# populate the oid into cache
self._query_oids(host, [self._interface_ip_oid], cache, True)
ip_addr = ''
for k, v in oid_cache.iteritems():
for k, v in six.iteritems(oid_cache):
if k.startswith(self._interface_ip_oid) and v == int(suffix[1:]):
ip_addr = k.replace(self._interface_ip_oid + ".", "")
metadata.update(ip=ip_addr)

View File

@ -184,7 +184,7 @@ def improve_keys(data, metaquery=False):
return data
if metaquery:
for key in data.iterkeys():
for key in six.iterkeys(data):
if '.$' in key:
key_list = []
for k in quote_key(key):

View File

@ -403,12 +403,12 @@ class TestEvaluate(base.TestEvaluatorBase):
avgs = [self._get_stat('avg',
threshold + (v if v < 10 else -v),
count=20 if v < 10 else 1)
for v in xrange(1, 11)]
for v in moves.xrange(1, 11)]
threshold = self.alarms[1].rule['threshold']
maxs = [self._get_stat('max',
threshold - (v if v < 7 else -v),
count=20 if v < 7 else 1)
for v in xrange(8)]
for v in moves.xrange(8)]
self.api_client.statistics.list.side_effect = [avgs, maxs]
self._evaluate_all_alarms()
self._assert_all_alarms('alarm' if exclude_outliers else 'ok')
@ -445,12 +445,12 @@ class TestEvaluate(base.TestEvaluatorBase):
avgs = [self._get_stat('avg',
threshold - (v if v < 9 else -v),
count=20 if v < 9 else 1)
for v in xrange(10)]
for v in moves.xrange(10)]
threshold = self.alarms[1].rule['threshold']
maxs = [self._get_stat('max',
threshold + (v if v < 8 else -v),
count=20 if v < 8 else 1)
for v in xrange(1, 9)]
for v in moves.xrange(1, 9)]
self.api_client.statistics.list.side_effect = [avgs, maxs]
self._evaluate_all_alarms()
self._assert_all_alarms('ok' if exclude_outliers else 'alarm')

View File

@ -1460,7 +1460,7 @@ class TestSelectableAggregates(v2.FunctionalTest,
# add a large number of datapoints that won't impact on cardinality
# if the computation logic is tolerant of different DB behavior on
# larger numbers of samples per-period
for i in xrange(200):
for i in range(200):
s = sample.Sample(
'instance',
sample.TYPE_GAUGE,

View File

@ -57,7 +57,7 @@ class FakeRequest(object):
if 'wsgi.input' not in environ:
environ['wsgi.input'] = six.moves.cStringIO('')
for header, value in headers.iteritems():
for header, value in six.iteritems(headers):
environ['HTTP_%s' % header.upper()] = value
self.environ = environ

View File

@ -19,7 +19,7 @@
"""
import datetime
import repr
from six.moves import reprlib
import mock
from oslo_utils import timeutils
@ -62,7 +62,7 @@ class EventTypeTest(tests_db.TestBase):
self.assertNotEqual(et1.id, et2.id)
self.assertNotEqual(et1.desc, et2.desc)
# Test the method __repr__ returns a string
self.assertTrue(repr.repr(et2))
self.assertTrue(reprlib.repr(et2))
@tests_db.run_with('sqlite', 'mysql', 'pgsql')
@ -100,7 +100,7 @@ class EventTest(tests_db.TestBase):
def test_event_repr(self):
ev = sql_models.Event('msg_id', None, False, {})
ev.id = 100
self.assertTrue(repr.repr(ev))
self.assertTrue(reprlib.repr(ev))
@tests_db.run_with('sqlite', 'mysql', 'pgsql')

View File

@ -27,6 +27,7 @@ import uuid
import make_test_data
from oslo_context import context
from six import moves
from ceilometer import messaging
from ceilometer import service
@ -53,7 +54,7 @@ def generate_data(rpc_client, make_data_args, samples_count,
make_data_args.resource_id = None
resources_list = [str(uuid.uuid4())
for _ in xrange(resources_count)]
for _ in moves.xrange(resources_count)]
resource_samples = {resource: 0 for resource in resources_list}
batch = []
count = 0