cap cpu_util

deriving cpu_util from cputime is not exact as it relies on timing
of host and a completely independent timing of pollster. this can
cause precision issues with nanosecond timing resulting in >100%
calculations. this sets a cap so at most cpu_util can only report
100% cpu utilisation.

Change-Id: I80c099d8618833794ef19e9497cfad4db7912851
Closes-Bug: #1527620
(cherry picked from commit 41d940e369)
This commit is contained in:
gord chung 2017-06-20 22:16:26 +00:00 committed by Julien Danjou
parent 2f69d50aca
commit cf895bac40
3 changed files with 61 additions and 1 deletions

View File

@ -1092,6 +1092,63 @@ class BasePipelineTestCase(base.BaseTestCase):
pipe.flush()
self.assertEqual(0, len(publisher.samples))
def test_rate_of_change_max(self):
s = "100.0 / (10**9 * (resource_metadata.cpu_number or 1))"
transformer_cfg = [
{
'name': 'rate_of_change',
'parameters': {
'source': {},
'target': {'name': 'cpu_util',
'unit': '%',
'type': sample.TYPE_GAUGE,
'scale': s,
'max': 100}
}
},
]
self._set_pipeline_cfg('transformers', transformer_cfg)
self._set_pipeline_cfg('counters', ['cpu'])
pipeline_manager = pipeline.PipelineManager(
self.cfg2file(self.pipeline_cfg), self.transformer_manager)
pipe = pipeline_manager.pipelines[0]
now = timeutils.utcnow()
later = now + datetime.timedelta(seconds=10)
rounding = 12345
counters = [
sample.Sample(
name='cpu',
type=sample.TYPE_CUMULATIVE,
volume=125000000000,
unit='ns',
user_id='test_user',
project_id='test_proj',
resource_id='test_resource',
timestamp=now.isoformat(),
resource_metadata={'cpu_number': 4}
),
sample.Sample(
name='cpu',
type=sample.TYPE_CUMULATIVE,
volume=165000000000 + rounding,
unit='ns',
user_id='test_user',
project_id='test_proj',
resource_id='test_resource',
timestamp=later.isoformat(),
resource_metadata={'cpu_number': 4}
),
]
pipe.publish_data(counters)
publisher = pipe.publishers[0]
self.assertEqual(1, len(publisher.samples))
cpu_util_sample = publisher.samples[0]
self.assertEqual(100, cpu_util_sample.volume)
@mock.patch('ceilometer.transformer.conversions.LOG')
def test_rate_of_change_out_of_order(self, the_log):
s = "100.0 / (10**9 * (resource_metadata.cpu_number or 1))"

View File

@ -128,6 +128,7 @@ class ScalingTransformer(BaseConversionTransformer):
super(ScalingTransformer, self).__init__(source=source, target=target,
**kwargs)
self.scale = self.target.get('scale')
self.max = self.target.get('max')
LOG.debug('scaling conversion transformer with source:'
' %(source)s target: %(target)s:', {'source': self.source,
'target': self.target})
@ -145,11 +146,12 @@ class ScalingTransformer(BaseConversionTransformer):
def _convert(self, s, growth=1):
"""Transform the appropriate sample fields."""
volume = self._scale(s) * growth
return sample.Sample(
name=self._map(s, 'name'),
unit=self._map(s, 'unit'),
type=self.target.get('type', s.type),
volume=self._scale(s) * growth,
volume=min(volume, self.max) if self.max else volume,
user_id=s.user_id,
project_id=s.project_id,
resource_id=s.resource_id,

View File

@ -48,6 +48,7 @@ sinks:
name: "cpu_util"
unit: "%"
type: "gauge"
max: 100
scale: "100.0 / (10**9 * (resource_metadata.cpu_number or 1))"
publishers:
- notifier://