Merge "Add Datasource Abstraction"

This commit is contained in:
Zuul 2017-12-26 03:02:36 +00:00 committed by Gerrit Code Review
commit cca3e75ac1
10 changed files with 719 additions and 12 deletions

112
watcher/datasource/base.py Normal file
View File

@ -0,0 +1,112 @@
# -*- encoding: utf-8 -*-
# Copyright 2017 NEC Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import abc
class DataSourceBase(object):
METRIC_MAP = dict(
ceilometer=dict(host_cpu_usage='compute.node.cpu.percent',
instance_cpu_usage='cpu_util',
instance_l3_cache_usage='cpu_l3_cache',
host_outlet_temp=(
'hardware.ipmi.node.outlet_temperature'),
host_airflow='hardware.ipmi.node.airflow',
host_inlet_temp='hardware.ipmi.node.temperature',
host_power='hardware.ipmi.node.power',
instance_ram_usage='memory.resident',
instance_ram_allocated='memory',
instance_root_disk_size='disk.root.size',
host_memory_usage='hardware.memory.used', ),
gnocchi=dict(host_cpu_usage='compute.node.cpu.percent',
instance_cpu_usage='cpu_util',
instance_l3_cache_usage=None,
host_outlet_temp='hardware.ipmi.node.outlet_temperature',
host_airflow='hardware.ipmi.node.airflow',
host_inlet_temp='hardware.ipmi.node.temperature',
host_power='hardware.ipmi.node.power',
instance_ram_usage='memory.resident',
instance_ram_allocated='memory',
instance_root_disk_size='disk.root.size',
host_memory_usage='hardware.memory.used'
),
monasca=dict(host_cpu_usage='cpu.percent',
instance_cpu_usage='vm.cpu.utilization_perc',
instance_l3_cache_usage=None,
host_outlet_temp=None,
host_airflow=None,
host_inlet_temp=None,
host_power=None,
instance_ram_usage=None,
instance_ram_allocated=None,
instance_root_disk_size=None,
host_memory_usage=None
),
)
@abc.abstractmethod
def get_host_cpu_usage(self, resource_id, period, aggregate,
granularity=None):
pass
@abc.abstractmethod
def get_instance_cpu_usage(self, resource_id, period, aggregate,
granularity=None):
pass
@abc.abstractmethod
def get_host_memory_usage(self, resource_id, period, aggregate,
granularity=None):
pass
@abc.abstractmethod
def get_instance_memory_usage(self, resource_id, period, aggregate,
granularity=None):
pass
@abc.abstractmethod
def get_instance_l3_cache_usage(self, resource_id, period, aggregate,
granularity=None):
pass
@abc.abstractmethod
def get_instance_ram_allocated(self, resource_id, period, aggregate,
granularity=None):
pass
@abc.abstractmethod
def get_instance_root_disk_allocated(self, resource_id, period, aggregate,
granularity=None):
pass
@abc.abstractmethod
def get_host_outlet_temperature(self, resource_id, period, aggregate,
granularity=None):
pass
@abc.abstractmethod
def get_host_inlet_temperature(self, resource_id, period, aggregate,
granularity=None):
pass
@abc.abstractmethod
def get_host_airflow(self, resource_id, period, aggregate,
granularity=None):
pass
@abc.abstractmethod
def get_host_power(self, resource_id, period, aggregate, granularity=None):
pass

View File

@ -24,9 +24,14 @@ from oslo_utils import timeutils
from watcher._i18n import _
from watcher.common import clients
from watcher.common import exception
from watcher.datasource import base
class CeilometerHelper(object):
class CeilometerHelper(base.DataSourceBase):
NAME = 'ceilometer'
METRIC_MAP = base.DataSourceBase.METRIC_MAP['ceilometer']
def __init__(self, osc=None):
""":param osc: an OpenStackClients instance"""
self.osc = osc if osc else clients.OpenStackClients()
@ -146,6 +151,8 @@ class CeilometerHelper(object):
"""
end_time = datetime.datetime.utcnow()
if aggregate == 'mean':
aggregate = 'avg'
start_time = end_time - datetime.timedelta(seconds=int(period))
query = self.build_query(
resource_id=resource_id, start_time=start_time, end_time=end_time)
@ -182,3 +189,69 @@ class CeilometerHelper(object):
return samples[-1]._info['counter_volume']
else:
return False
def get_host_cpu_usage(self, resource_id, period, aggregate,
granularity=None):
meter_name = self.METRIC_MAP.get('host_cpu_usage')
return self.statistic_aggregation(resource_id, meter_name, period,
aggregate=aggregate)
def get_instance_cpu_usage(self, resource_id, period, aggregate,
granularity=None):
meter_name = self.METRIC_MAP.get('instance_cpu_usage')
return self.statistic_aggregation(resource_id, meter_name, period,
aggregate=aggregate)
def get_host_memory_usage(self, resource_id, period, aggregate,
granularity=None):
meter_name = self.METRIC_MAP.get('host_memory_usage')
return self.statistic_aggregation(resource_id, meter_name, period,
aggregate=aggregate)
def get_instance_memory_usage(self, resource_id, period, aggregate,
granularity=None):
meter_name = self.METRIC_MAP.get('instance_ram_usage')
return self.statistic_aggregation(resource_id, meter_name, period,
aggregate=aggregate)
def get_instance_l3_cache_usage(self, resource_id, period, aggregate,
granularity=None):
meter_name = self.METRIC_MAP.get('instance_l3_cache_usage')
return self.statistic_aggregation(resource_id, meter_name, period,
aggregate=aggregate)
def get_instance_ram_allocated(self, resource_id, period, aggregate,
granularity=None):
meter_name = self.METRIC_MAP.get('instance_ram_allocated')
return self.statistic_aggregation(resource_id, meter_name, period,
aggregate=aggregate)
def get_instance_root_disk_allocated(self, resource_id, period, aggregate,
granularity=None):
meter_name = self.METRIC_MAP.get('instance_root_disk_size')
return self.statistic_aggregation(resource_id, meter_name, period,
aggregate=aggregate)
def get_host_outlet_temperature(self, resource_id, period, aggregate,
granularity=None):
meter_name = self.METRIC_MAP.get('host_outlet_temp')
return self.statistic_aggregation(resource_id, meter_name, period,
aggregate=aggregate)
def get_host_inlet_temperature(self, resource_id, period, aggregate,
granularity=None):
meter_name = self.METRIC_MAP.get('host_inlet_temp')
return self.statistic_aggregation(resource_id, meter_name, period,
aggregate=aggregate)
def get_host_airflow(self, resource_id, period, aggregate,
granularity=None):
meter_name = self.METRIC_MAP.get('host_airflow')
return self.statistic_aggregation(resource_id, meter_name, period,
aggregate=aggregate)
def get_host_power(self, resource_id, period, aggregate,
granularity=None):
meter_name = self.METRIC_MAP.get('host_power')
return self.statistic_aggregation(resource_id, meter_name, period,
aggregate=aggregate)

View File

@ -17,6 +17,7 @@
# limitations under the License.
from datetime import datetime
from datetime import timedelta
import time
from oslo_config import cfg
@ -25,12 +26,16 @@ from oslo_log import log
from watcher.common import clients
from watcher.common import exception
from watcher.common import utils as common_utils
from watcher.datasource import base
CONF = cfg.CONF
LOG = log.getLogger(__name__)
class GnocchiHelper(object):
class GnocchiHelper(base.DataSourceBase):
NAME = 'gnocchi'
METRIC_MAP = base.DataSourceBase.METRIC_MAP['gnocchi']
def __init__(self, osc=None):
""":param osc: an OpenStackClients instance"""
@ -46,13 +51,13 @@ class GnocchiHelper(object):
time.sleep(CONF.gnocchi_client.query_timeout)
raise
def statistic_aggregation(self,
resource_id,
metric,
granularity,
start_time=None,
stop_time=None,
aggregation='mean'):
def _statistic_aggregation(self,
resource_id,
metric,
granularity,
start_time=None,
stop_time=None,
aggregation='mean'):
"""Representing a statistic aggregate by operators
:param metric: metric name of which we want the statistics
@ -102,3 +107,79 @@ class GnocchiHelper(object):
# return value of latest measure
# measure has structure [time, granularity, value]
return statistics[-1][2]
def statistic_aggregation(self, resource_id, metric, period, granularity,
aggregation='mean'):
stop_time = datetime.utcnow()
start_time = stop_time - timedelta(seconds=(int(period)))
return self._statistic_aggregation(
resource_id=resource_id,
metric=metric,
granularity=granularity,
start_time=start_time,
stop_time=stop_time,
aggregation=aggregation)
def get_host_cpu_usage(self, resource_id, period, aggregate,
granularity=300):
meter_name = self.METRIC_MAP.get('host_cpu_usage')
return self.statistic_aggregation(resource_id, meter_name, period,
granularity, aggregation=aggregate)
def get_instance_cpu_usage(self, resource_id, period, aggregate,
granularity=300):
meter_name = self.METRIC_MAP.get('instance_cpu_usage')
return self.statistic_aggregation(resource_id, meter_name, period,
granularity, aggregation=aggregate)
def get_host_memory_usage(self, resource_id, period, aggregate,
granularity=300):
meter_name = self.METRIC_MAP.get('host_memory_usage')
return self.statistic_aggregation(resource_id, meter_name, period,
granularity, aggregation=aggregate)
def get_instance_memory_usage(self, resource_id, period, aggregate,
granularity=300):
meter_name = self.METRIC_MAP.get('instance_ram_usage')
return self.statistic_aggregation(resource_id, meter_name, period,
granularity, aggregation=aggregate)
def get_instance_l3_cache_usage(self, resource_id, period, aggregate,
granularity=300):
raise NotImplementedError
def get_instance_ram_allocated(self, resource_id, period, aggregate,
granularity=300):
meter_name = self.METRIC_MAP.get('instance_ram_allocated')
return self.statistic_aggregation(resource_id, meter_name, period,
granularity, aggregation=aggregate)
def get_instance_root_disk_allocated(self, resource_id, period, aggregate,
granularity=300):
meter_name = self.METRIC_MAP.get('instance_root_disk_size')
return self.statistic_aggregation(resource_id, meter_name, period,
granularity, aggregation=aggregate)
def get_host_outlet_temperature(self, resource_id, period, aggregate,
granularity=300):
meter_name = self.METRIC_MAP.get('host_outlet_temp')
return self.statistic_aggregation(resource_id, meter_name, period,
granularity, aggregation=aggregate)
def get_host_inlet_temperature(self, resource_id, period, aggregate,
granularity=300):
meter_name = self.METRIC_MAP.get('host_inlet_temp')
return self.statistic_aggregation(resource_id, meter_name, period,
granularity, aggregation=aggregate)
def get_host_airflow(self, resource_id, period, aggregate,
granularity=300):
meter_name = self.METRIC_MAP.get('host_airflow')
return self.statistic_aggregation(resource_id, meter_name, period,
granularity, aggregation=aggregate)
def get_host_power(self, resource_id, period, aggregate,
granularity=300):
meter_name = self.METRIC_MAP.get('host_power')
return self.statistic_aggregation(resource_id, meter_name, period,
granularity, aggregation=aggregate)

View File

@ -0,0 +1,78 @@
# -*- encoding: utf-8 -*-
# Copyright 2017 NEC Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_log import log
from watcher.common import exception
from watcher.datasource import base
from watcher.datasource import ceilometer as ceil
from watcher.datasource import gnocchi as gnoc
from watcher.datasource import monasca as mon
LOG = log.getLogger(__name__)
class DataSourceManager(object):
def __init__(self, config=None, osc=None):
self.osc = osc
self.config = config
self._ceilometer = None
self._monasca = None
self._gnocchi = None
self.metric_map = base.DataSourceBase.METRIC_MAP
self.datasources = self.config.datasource
@property
def ceilometer(self):
if self._ceilometer is None:
self.ceilometer = ceil.CeilometerHelper(osc=self.osc)
return self._ceilometer
@ceilometer.setter
def ceilometer(self, ceilometer):
self._ceilometer = ceilometer
@property
def monasca(self):
if self._monasca is None:
self._monasca = mon.MonascaHelper(osc=self.osc)
return self._monasca
@monasca.setter
def monasca(self, monasca):
self._monasca = monasca
@property
def gnocchi(self):
if self._gnocchi is None:
self._gnocchi = gnoc.GnocchiHelper(osc=self.osc)
return self._gnocchi
@gnocchi.setter
def gnocchi(self, gnocchi):
self._gnocchi = gnocchi
def get_backend(self, metrics):
for datasource in self.datasources:
no_metric = False
for metric in metrics:
if (metric not in self.metric_map[datasource] or
self.metric_map[datasource].get(metric) is None):
no_metric = True
break
if not no_metric:
return getattr(self, datasource)
raise exception.NoSuchMetric()

View File

@ -21,9 +21,13 @@ import datetime
from monascaclient import exc
from watcher.common import clients
from watcher.datasource import base
class MonascaHelper(object):
class MonascaHelper(base.DataSourceBase):
NAME = 'monasca'
METRIC_MAP = base.DataSourceBase.METRIC_MAP['monasca']
def __init__(self, osc=None):
""":param osc: an OpenStackClients instance"""
@ -106,6 +110,9 @@ class MonascaHelper(object):
start_time, end_time, period
)
if aggregate == 'mean':
aggregate = 'avg'
raw_kwargs = dict(
name=meter_name,
start_time=start_timestamp,
@ -122,3 +129,77 @@ class MonascaHelper(object):
f=self.monasca.metrics.list_statistics, **kwargs)
return statistics
def get_host_cpu_usage(self, resource_id, period, aggregate,
granularity=None):
metric_name = self.METRIC_MAP.get('host_cpu_usage')
node_uuid = resource_id.split('_')[0]
statistics = self.statistic_aggregation(
meter_name=metric_name,
dimensions=dict(hostname=node_uuid),
period=period,
aggregate=aggregate
)
cpu_usage = None
for stat in statistics:
avg_col_idx = stat['columns'].index('avg')
values = [r[avg_col_idx] for r in stat['statistics']]
value = float(sum(values)) / len(values)
cpu_usage = value
return cpu_usage
def get_instance_cpu_usage(self, resource_id, period, aggregate,
granularity=None):
metric_name = self.METRIC_MAP.get('instance_cpu_usage')
statistics = self.statistic_aggregation(
meter_name=metric_name,
dimensions=dict(hostname=resource_id),
period=period,
aggregate=aggregate
)
cpu_usage = None
for stat in statistics:
avg_col_idx = stat['columns'].index('avg')
values = [r[avg_col_idx] for r in stat['statistics']]
value = float(sum(values)) / len(values)
cpu_usage = value
return cpu_usage
def get_host_memory_usage(self, resource_id, period, aggregate,
granularity=None):
raise NotImplementedError
def get_instance_memory_usage(self, resource_id, period, aggregate,
granularity=None):
raise NotImplementedError
def get_instance_l3_cache_usage(self, resource_id, period, aggregate,
granularity=None):
raise NotImplementedError
def get_instance_ram_allocated(self, resource_id, period, aggregate,
granularity=None):
raise NotImplementedError
def get_instance_root_disk_allocated(self, resource_id, period, aggregate,
granularity=None):
raise NotImplementedError
def get_host_outlet_temperature(self, resource_id, period, aggregate,
granularity=None):
raise NotImplementedError
def get_host_inlet_temperature(self, resource_id, period, aggregate,
granularity=None):
raise NotImplementedError
def get_host_airflow(self, resource_id, period, aggregate,
granularity=None):
raise NotImplementedError
def get_host_power(self, resource_id, period, aggregate,
granularity=None):
raise NotImplementedError

View File

@ -46,6 +46,7 @@ from watcher.common import context
from watcher.common import exception
from watcher.common.loader import loadable
from watcher.common import utils
from watcher.datasource import manager as ds_manager
from watcher.decision_engine.loading import default as loading
from watcher.decision_engine.model.collector import manager
from watcher.decision_engine.solution import default
@ -60,6 +61,8 @@ class BaseStrategy(loadable.Loadable):
Solution for a given Goal.
"""
DATASOURCE_METRICS = []
def __init__(self, config, osc=None):
"""Constructor: the signature should be identical within the subclasses
@ -84,6 +87,7 @@ class BaseStrategy(loadable.Loadable):
self._storage_model = None
self._input_parameters = utils.Struct()
self._audit_scope = None
self._datasource_backend = None
@classmethod
@abc.abstractmethod
@ -224,6 +228,15 @@ class BaseStrategy(loadable.Loadable):
"""
return {}
@property
def datasource_backend(self):
if not self._datasource_backend:
self._datasource_backend = ds_manager.DataSourceManager(
config=self.config,
osc=self.osc
).get_backend(self.DATASOURCE_METRICS)
return self._datasource_backend
@property
def input_parameters(self):
return self._input_parameters

View File

@ -93,3 +93,108 @@ class TestCeilometerHelper(base.BaseTestCase):
cm = ceilometer_helper.CeilometerHelper()
val = cm.statistic_list(meter_name="cpu_util")
self.assertEqual(expected_value, val)
@mock.patch.object(ceilometer_helper.CeilometerHelper,
'statistic_aggregation')
def test_get_host_cpu_usage(self, mock_aggregation, mock_ceilometer):
helper = ceilometer_helper.CeilometerHelper()
helper.get_host_cpu_usage('compute1', 600, 'mean')
mock_aggregation.assert_called_once_with(
'compute1', helper.METRIC_MAP['host_cpu_usage'], 600,
aggregate='mean')
@mock.patch.object(ceilometer_helper.CeilometerHelper,
'statistic_aggregation')
def test_get_instance_cpu_usage(self, mock_aggregation, mock_ceilometer):
helper = ceilometer_helper.CeilometerHelper()
helper.get_instance_cpu_usage('compute1', 600, 'mean')
mock_aggregation.assert_called_once_with(
'compute1', helper.METRIC_MAP['instance_cpu_usage'], 600,
aggregate='mean')
@mock.patch.object(ceilometer_helper.CeilometerHelper,
'statistic_aggregation')
def test_get_host_memory_usage(self, mock_aggregation, mock_ceilometer):
helper = ceilometer_helper.CeilometerHelper()
helper.get_host_memory_usage('compute1', 600, 'mean')
mock_aggregation.assert_called_once_with(
'compute1', helper.METRIC_MAP['host_memory_usage'], 600,
aggregate='mean')
@mock.patch.object(ceilometer_helper.CeilometerHelper,
'statistic_aggregation')
def test_get_instance_memory_usage(self, mock_aggregation,
mock_ceilometer):
helper = ceilometer_helper.CeilometerHelper()
helper.get_instance_memory_usage('compute1', 600, 'mean')
mock_aggregation.assert_called_once_with(
'compute1', helper.METRIC_MAP['instance_ram_usage'], 600,
aggregate='mean')
@mock.patch.object(ceilometer_helper.CeilometerHelper,
'statistic_aggregation')
def test_get_instance_l3_cache_usage(self, mock_aggregation,
mock_ceilometer):
helper = ceilometer_helper.CeilometerHelper()
helper.get_instance_l3_cache_usage('compute1', 600, 'mean')
mock_aggregation.assert_called_once_with(
'compute1', helper.METRIC_MAP['instance_l3_cache_usage'], 600,
aggregate='mean')
@mock.patch.object(ceilometer_helper.CeilometerHelper,
'statistic_aggregation')
def test_get_instance_ram_allocated(self, mock_aggregation,
mock_ceilometer):
helper = ceilometer_helper.CeilometerHelper()
helper.get_instance_ram_allocated('compute1', 600, 'mean')
mock_aggregation.assert_called_once_with(
'compute1', helper.METRIC_MAP['instance_ram_allocated'], 600,
aggregate='mean')
@mock.patch.object(ceilometer_helper.CeilometerHelper,
'statistic_aggregation')
def test_get_instance_root_disk_allocated(self, mock_aggregation,
mock_ceilometer):
helper = ceilometer_helper.CeilometerHelper()
helper.get_instance_root_disk_allocated('compute1', 600, 'mean')
mock_aggregation.assert_called_once_with(
'compute1', helper.METRIC_MAP['instance_root_disk_size'], 600,
aggregate='mean')
@mock.patch.object(ceilometer_helper.CeilometerHelper,
'statistic_aggregation')
def test_get_host_outlet_temperature(self, mock_aggregation,
mock_ceilometer):
helper = ceilometer_helper.CeilometerHelper()
helper.get_host_outlet_temperature('compute1', 600, 'mean')
mock_aggregation.assert_called_once_with(
'compute1', helper.METRIC_MAP['host_outlet_temp'], 600,
aggregate='mean')
@mock.patch.object(ceilometer_helper.CeilometerHelper,
'statistic_aggregation')
def test_get_host_inlet_temperature(self, mock_aggregation,
mock_ceilometer):
helper = ceilometer_helper.CeilometerHelper()
helper.get_host_inlet_temperature('compute1', 600, 'mean')
mock_aggregation.assert_called_once_with(
'compute1', helper.METRIC_MAP['host_inlet_temp'], 600,
aggregate='mean')
@mock.patch.object(ceilometer_helper.CeilometerHelper,
'statistic_aggregation')
def test_get_host_airflow(self, mock_aggregation, mock_ceilometer):
helper = ceilometer_helper.CeilometerHelper()
helper.get_host_airflow('compute1', 600, 'mean')
mock_aggregation.assert_called_once_with(
'compute1', helper.METRIC_MAP['host_airflow'], 600,
aggregate='mean')
@mock.patch.object(ceilometer_helper.CeilometerHelper,
'statistic_aggregation')
def test_get_host_power(self, mock_aggregation, mock_ceilometer):
helper = ceilometer_helper.CeilometerHelper()
helper.get_host_power('compute1', 600, 'mean')
mock_aggregation.assert_called_once_with(
'compute1', helper.METRIC_MAP['host_power'], 600,
aggregate='mean')

View File

@ -39,7 +39,7 @@ class TestGnocchiHelper(base.BaseTestCase):
mock_gnocchi.return_value = gnocchi
helper = gnocchi_helper.GnocchiHelper()
result = helper.statistic_aggregation(
result = helper._statistic_aggregation(
resource_id='16a86790-327a-45f9-bc82-45839f062fdc',
metric='cpu_util',
granularity=360,
@ -59,10 +59,96 @@ class TestGnocchiHelper(base.BaseTestCase):
helper = gnocchi_helper.GnocchiHelper()
self.assertRaises(
exception.InvalidParameter, helper.statistic_aggregation,
exception.InvalidParameter, helper._statistic_aggregation,
resource_id='16a86790-327a-45f9-bc82-45839f062fdc',
metric='cpu_util',
granularity=360,
start_time="2017-02-02T09:00:00.000000",
stop_time=timeutils.parse_isotime("2017-02-02T10:00:00.000000"),
aggregation='mean')
@mock.patch.object(gnocchi_helper.GnocchiHelper, 'statistic_aggregation')
def test_get_host_cpu_usage(self, mock_aggregation, mock_gnocchi):
helper = gnocchi_helper.GnocchiHelper()
helper.get_host_cpu_usage('compute1', 600, 'mean', granularity=300)
mock_aggregation.assert_called_once_with(
'compute1', helper.METRIC_MAP['host_cpu_usage'], 600, 300,
aggregation='mean')
@mock.patch.object(gnocchi_helper.GnocchiHelper, 'statistic_aggregation')
def test_get_instance_cpu_usage(self, mock_aggregation, mock_gnocchi):
helper = gnocchi_helper.GnocchiHelper()
helper.get_instance_cpu_usage('compute1', 600, 'mean', granularity=300)
mock_aggregation.assert_called_once_with(
'compute1', helper.METRIC_MAP['instance_cpu_usage'], 600, 300,
aggregation='mean')
@mock.patch.object(gnocchi_helper.GnocchiHelper, 'statistic_aggregation')
def test_get_host_memory_usage(self, mock_aggregation, mock_gnocchi):
helper = gnocchi_helper.GnocchiHelper()
helper.get_host_memory_usage('compute1', 600, 'mean', granularity=300)
mock_aggregation.assert_called_once_with(
'compute1', helper.METRIC_MAP['host_memory_usage'], 600, 300,
aggregation='mean')
@mock.patch.object(gnocchi_helper.GnocchiHelper, 'statistic_aggregation')
def test_get_instance_memory_usage(self, mock_aggregation, mock_gnocchi):
helper = gnocchi_helper.GnocchiHelper()
helper.get_instance_memory_usage('compute1', 600, 'mean',
granularity=300)
mock_aggregation.assert_called_once_with(
'compute1', helper.METRIC_MAP['instance_ram_usage'], 600, 300,
aggregation='mean')
@mock.patch.object(gnocchi_helper.GnocchiHelper, 'statistic_aggregation')
def test_get_instance_ram_allocated(self, mock_aggregation, mock_gnocchi):
helper = gnocchi_helper.GnocchiHelper()
helper.get_instance_ram_allocated('compute1', 600, 'mean',
granularity=300)
mock_aggregation.assert_called_once_with(
'compute1', helper.METRIC_MAP['instance_ram_allocated'], 600, 300,
aggregation='mean')
@mock.patch.object(gnocchi_helper.GnocchiHelper, 'statistic_aggregation')
def test_get_instance_root_disk_allocated(self, mock_aggregation,
mock_gnocchi):
helper = gnocchi_helper.GnocchiHelper()
helper.get_instance_root_disk_allocated('compute1', 600, 'mean',
granularity=300)
mock_aggregation.assert_called_once_with(
'compute1', helper.METRIC_MAP['instance_root_disk_size'], 600,
300, aggregation='mean')
@mock.patch.object(gnocchi_helper.GnocchiHelper, 'statistic_aggregation')
def test_get_host_outlet_temperature(self, mock_aggregation, mock_gnocchi):
helper = gnocchi_helper.GnocchiHelper()
helper.get_host_outlet_temperature('compute1', 600, 'mean',
granularity=300)
mock_aggregation.assert_called_once_with(
'compute1', helper.METRIC_MAP['host_outlet_temp'], 600, 300,
aggregation='mean')
@mock.patch.object(gnocchi_helper.GnocchiHelper, 'statistic_aggregation')
def test_get_host_inlet_temperature(self, mock_aggregation, mock_gnocchi):
helper = gnocchi_helper.GnocchiHelper()
helper.get_host_inlet_temperature('compute1', 600, 'mean',
granularity=300)
mock_aggregation.assert_called_once_with(
'compute1', helper.METRIC_MAP['host_inlet_temp'], 600, 300,
aggregation='mean')
@mock.patch.object(gnocchi_helper.GnocchiHelper, 'statistic_aggregation')
def test_get_host_airflow(self, mock_aggregation, mock_gnocchi):
helper = gnocchi_helper.GnocchiHelper()
helper.get_host_airflow('compute1', 600, 'mean', granularity=300)
mock_aggregation.assert_called_once_with(
'compute1', helper.METRIC_MAP['host_airflow'], 600, 300,
aggregation='mean')
@mock.patch.object(gnocchi_helper.GnocchiHelper, 'statistic_aggregation')
def test_get_host_power(self, mock_aggregation, mock_gnocchi):
helper = gnocchi_helper.GnocchiHelper()
helper.get_host_power('compute1', 600, 'mean', granularity=300)
mock_aggregation.assert_called_once_with(
'compute1', helper.METRIC_MAP['host_power'], 600, 300,
aggregation='mean')

View File

@ -0,0 +1,43 @@
# -*- encoding: utf-8 -*-
# Copyright (c) 2017 Servionica
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from watcher.common import exception
from watcher.datasource import gnocchi as gnoc
from watcher.datasource import manager as ds_manager
from watcher.tests import base
class TestDataSourceManager(base.BaseTestCase):
@mock.patch.object(gnoc, 'GnocchiHelper')
def test_get_backend(self, mock_gnoc):
manager = ds_manager.DataSourceManager(
config=mock.MagicMock(
datasource=['gnocchi', 'ceilometer', 'monasca']),
osc=mock.MagicMock())
backend = manager.get_backend(['host_cpu_usage',
'instance_cpu_usage'])
self.assertEqual(backend, manager.gnocchi)
def test_get_backend_wrong_metric(self):
manager = ds_manager.DataSourceManager(
config=mock.MagicMock(
datasource=['gnocchi', 'ceilometer', 'monasca']),
osc=mock.MagicMock())
self.assertRaises(exception.NoSuchMetric, manager.get_backend,
['host_cpu', 'instance_cpu_usage'])

View File

@ -98,3 +98,38 @@ class TestMonascaHelper(base.BaseTestCase):
helper = monasca_helper.MonascaHelper()
val = helper.statistics_list(meter_name="cpu.percent", dimensions={})
self.assertEqual(expected_result, val)
@mock.patch.object(monasca_helper.MonascaHelper, 'statistic_aggregation')
def test_get_host_cpu_usage(self, mock_aggregation, mock_monasca):
node = "compute1_compute1"
mock_aggregation.return_value = [{
'columns': ['timestamp', 'avg'],
'dimensions': {
'hostname': 'rdev-indeedsrv001',
'service': 'monasca'},
'id': '0',
'name': 'cpu.percent',
'statistics': [
['2016-07-29T12:45:00Z', 0.0],
['2016-07-29T12:50:00Z', 0.9],
['2016-07-29T12:55:00Z', 0.9]]}]
helper = monasca_helper.MonascaHelper()
cpu_usage = helper.get_host_cpu_usage(node, 600, 'mean')
self.assertEqual(0.6, cpu_usage)
@mock.patch.object(monasca_helper.MonascaHelper, 'statistic_aggregation')
def test_get_instance_cpu_usage(self, mock_aggregation, mock_monasca):
mock_aggregation.return_value = [{
'columns': ['timestamp', 'avg'],
'dimensions': {
'name': 'vm1',
'service': 'monasca'},
'id': '0',
'name': 'cpu.percent',
'statistics': [
['2016-07-29T12:45:00Z', 0.0],
['2016-07-29T12:50:00Z', 0.9],
['2016-07-29T12:55:00Z', 0.9]]}]
helper = monasca_helper.MonascaHelper()
cpu_usage = helper.get_instance_cpu_usage('vm1', 600, 'mean')
self.assertEqual(0.6, cpu_usage)