Merge "Adding query metric mappings, dimensions, futurist, paging"

This commit is contained in:
Jenkins 2017-03-02 17:45:16 +00:00 committed by Gerrit Code Review
commit cd8b248a70
24 changed files with 3296 additions and 371 deletions

View File

@ -0,0 +1,20 @@
# Copyright 2014 eNovance
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class NotImplementedError(NotImplementedError):
# FIXME(jd) This is used by WSME to return a correct HTTP code. We should
# not expose it here but wrap our methods in the API to convert it to a
# proper HTTP error.
code = 501

View File

@ -0,0 +1,186 @@
#
# Copyright 2016 Hewlett Packard
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Static mapping for Ceilometer static info like unit and type information
"""
import os
import pkg_resources
import yaml
from oslo_config import cfg
from oslo_log import log
from ceilometer.i18n import _LE, _LW
from ceilometer import sample
LOG = log.getLogger(__name__)
OPTS = [
cfg.StrOpt('ceilometer_static_info_mapping',
default='ceilometer_static_info_mapping.yaml',
help='Configuration mapping file to map ceilometer meters to '
'their units an type informaiton'),
]
cfg.CONF.register_opts(OPTS, group='monasca')
class CeilometerStaticMappingDefinitionException(Exception):
def __init__(self, message, definition_cfg):
super(CeilometerStaticMappingDefinitionException,
self).__init__(message)
self.message = message
self.definition_cfg = definition_cfg
def __str__(self):
return '%s %s: %s' % (self.__class__.__name__,
self.definition_cfg, self.message)
class CeilometerStaticMappingDefinition(object):
REQUIRED_FIELDS = ['name', 'type', 'unit']
def __init__(self, definition_cfg):
self.cfg = definition_cfg
missing = [field for field in self.REQUIRED_FIELDS
if not self.cfg.get(field)]
if missing:
raise CeilometerStaticMappingDefinitionException(
_LE("Required fields %s not specified") % missing, self.cfg)
if ('type' not in self.cfg.get('lookup', []) and
self.cfg['type'] not in sample.TYPES):
raise CeilometerStaticMappingDefinitionException(
_LE("Invalid type %s specified") % self.cfg['type'], self.cfg)
def get_config_file():
config_file = cfg.CONF.monasca.ceilometer_static_info_mapping
if not os.path.exists(config_file):
config_file = cfg.CONF.find_file(config_file)
if not config_file:
config_file = pkg_resources.resource_filename(
__name__, "data/ceilometer_static_info_mapping.yaml")
return config_file
def setup_ceilometer_static_mapping_config():
"""Setup the meters definitions from yaml config file."""
config_file = get_config_file()
if config_file is not None:
LOG.debug("Static Ceilometer mapping file to map static info: %s",
config_file)
with open(config_file) as cf:
config = cf.read()
try:
ceilometer_static_mapping_config = yaml.safe_load(config)
except yaml.YAMLError as err:
if hasattr(err, 'problem_mark'):
mark = err.problem_mark
errmsg = (_LE("Invalid YAML syntax in static Ceilometer "
"Mapping Definitions file %(file)s at line: "
"%(line)s, column: %(column)s.")
% dict(file=config_file,
line=mark.line + 1,
column=mark.column + 1))
else:
errmsg = (_LE("YAML error reading static Ceilometer Mapping "
"Definitions file %(file)s") %
dict(file=config_file))
LOG.error(errmsg)
raise
else:
LOG.debug("No static Ceilometer Definitions configuration file "
"found! using default config.")
ceilometer_static_mapping_config = {}
LOG.debug("Ceilometer Monasca Definitions: %s",
ceilometer_static_mapping_config)
return ceilometer_static_mapping_config
def load_definitions(config_def):
if not config_def:
return []
ceilometer_static_mapping_defs = {}
for meter_info_static_map in reversed(config_def['meter_info_static_map']):
if meter_info_static_map.get('name') in ceilometer_static_mapping_defs:
# skip duplicate meters
LOG.warning(_LW("Skipping duplicate Ceilometer Monasca Mapping"
" Definition %s") % meter_info_static_map)
continue
try:
md = CeilometerStaticMappingDefinition(meter_info_static_map)
ceilometer_static_mapping_defs[meter_info_static_map['name']] = md
except CeilometerStaticMappingDefinitionException as me:
errmsg = (_LE("Error loading Ceilometer Static Mapping "
"Definition : %(err)s") % dict(err=me.message))
LOG.error(errmsg)
return ceilometer_static_mapping_defs.values()
class ProcessMappedCeilometerStaticInfo(object):
"""Implentation for class to provide static info for ceilometer meters
The class will be responsible for providing the static information of
ceilometer meters enabled using pipeline.yaml configuration.
get_list_supported_meters: is a get function which can be used to get
list of pipeline meters.
get_ceilometer_meter_static_definition: returns entire definition for
provided meter name
get_meter_static_info_key_val: returns specific value for provided meter
name and a particular key from definition
"""
_inited = False
_instance = None
def __new__(cls, *args, **kwargs):
"""Singleton to avoid duplicated initialization."""
if not cls._instance:
cls._instance = super(ProcessMappedCeilometerStaticInfo, cls).\
__new__(cls, *args, **kwargs)
return cls._instance
def __init__(self):
if not (self._instance and self._inited):
self._inited = True
self.__definitions = load_definitions(
setup_ceilometer_static_mapping_config())
self.__mapped_meter_info_map = dict()
for d in self.__definitions:
self.__mapped_meter_info_map[d.cfg['name']] = d
def get_list_supported_meters(self):
return self.__mapped_meter_info_map
def get_ceilometer_meter_static_definition(self, meter_name):
return self.__mapped_meter_info_map.get(meter_name)
def get_meter_static_info_key_val(self, meter_name, key):
return self.__mapped_meter_info_map.get(meter_name).cfg[key]
def reinitialize(self):
self.__definitions = load_definitions(
setup_ceilometer_static_mapping_config())
self.__mapped_meter_info_map = dict()
for d in self.__definitions:
self.__mapped_meter_info_map[d.cfg['name']] = d

View File

@ -0,0 +1,305 @@
#
# Copyright 2016 Hewlett Packard
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Monasca metric to Ceilometer Meter Mapper
"""
import functools
import os
import pkg_resources
import six
import yaml
from jsonpath_rw_ext import parser
from oslo_config import cfg
from oslo_log import log
from ceilometer.i18n import _LE, _LW
from ceilometer import pipeline
from ceilometer import sample
LOG = log.getLogger(__name__)
OPTS = [
cfg.StrOpt('ceilometer_monasca_metrics_mapping',
default='ceilosca_mapping.yaml',
help='Configuration mapping file to map monasca metrics to '
'ceilometer meters'),
]
cfg.CONF.register_opts(OPTS, group='monasca')
class CeiloscaMappingDefinitionException(Exception):
def __init__(self, message, definition_cfg):
super(CeiloscaMappingDefinitionException, self).__init__(message)
self.message = message
self.definition_cfg = definition_cfg
def __str__(self):
return '%s %s: %s' % (self.__class__.__name__,
self.definition_cfg, self.message)
class CeiloscaMappingDefinition(object):
JSONPATH_RW_PARSER = parser.ExtentedJsonPathParser()
REQUIRED_FIELDS = ['name', 'monasca_metric_name', 'type', 'unit', 'source',
'resource_metadata', 'resource_id', 'project_id',
'user_id', 'region']
def __init__(self, definition_cfg):
self.cfg = definition_cfg
missing = [field for field in self.REQUIRED_FIELDS
if not self.cfg.get(field)]
if missing:
raise CeiloscaMappingDefinitionException(
_LE("Required fields %s not specified") % missing, self.cfg)
self._monasca_metric_name = self.cfg.get('monasca_metric_name')
if isinstance(self._monasca_metric_name, six.string_types):
self._monasca_metric_name = [self._monasca_metric_name]
if ('type' not in self.cfg.get('lookup', []) and
self.cfg['type'] not in sample.TYPES):
raise CeiloscaMappingDefinitionException(
_LE("Invalid type %s specified") % self.cfg['type'], self.cfg)
self._field_getter = {}
for name, field in self.cfg.items():
if name in ["monasca_metric_name", "lookup"] or not field:
continue
elif isinstance(field, six.integer_types):
self._field_getter[name] = field
elif isinstance(field, six.string_types) and not \
field.startswith('$'):
self._field_getter[name] = field
elif isinstance(field, dict) and name == 'resource_metadata':
meta = {}
for key, val in field.items():
parts = self.parse_jsonpath(val)
meta[key] = functools.partial(self._parse_jsonpath_field,
parts)
self._field_getter['resource_metadata'] = meta
else:
parts = self.parse_jsonpath(field)
self._field_getter[name] = functools.partial(
self._parse_jsonpath_field, parts)
def parse_jsonpath(self, field):
try:
parts = self.JSONPATH_RW_PARSER.parse(field)
except Exception as e:
raise CeiloscaMappingDefinitionException(_LE(
"Parse error in JSONPath specification "
"'%(jsonpath)s': %(err)s")
% dict(jsonpath=field, err=e), self.cfg)
return parts
def parse_fields(self, field, message, all_values=False):
getter = self._field_getter.get(field)
if not getter:
return
elif isinstance(getter, dict):
dict_val = {}
for key, val in getter.items():
dict_val[key] = val(message, all_values)
return dict_val
elif callable(getter):
return getter(message, all_values)
else:
return getter
@staticmethod
def _parse_jsonpath_field(parts, message, all_values):
values = [match.value for match in parts.find(message)
if match.value is not None]
if values:
if not all_values:
return values[0]
return values
def get_config_file():
config_file = cfg.CONF.monasca.ceilometer_monasca_metrics_mapping
if not os.path.exists(config_file):
config_file = cfg.CONF.find_file(config_file)
if not config_file:
config_file = pkg_resources.resource_filename(
__name__, "data/ceilosca_mapping.yaml")
return config_file
def setup_ceilosca_mapping_config():
"""Setup the meters definitions from yaml config file."""
config_file = get_config_file()
if config_file is not None:
LOG.debug("Ceilometer Monasca Mapping Definitions file: %s",
config_file)
with open(config_file) as cf:
config = cf.read()
try:
ceilosca_mapping_config = yaml.safe_load(config)
except yaml.YAMLError as err:
if hasattr(err, 'problem_mark'):
mark = err.problem_mark
errmsg = (_LE("Invalid YAML syntax in Ceilometer Monasca "
"Mapping Definitions file %(file)s at line: "
"%(line)s, column: %(column)s.")
% dict(file=config_file,
line=mark.line + 1,
column=mark.column + 1))
else:
errmsg = (_LE("YAML error reading Ceilometer Monasca Mapping "
"Definitions file %(file)s") %
dict(file=config_file))
LOG.error(errmsg)
raise
else:
LOG.debug("No Ceilometer Monasca Definitions configuration file "
"found! using default config.")
ceilosca_mapping_config = {}
LOG.debug("Ceilometer Monasca Definitions: %s",
ceilosca_mapping_config)
return ceilosca_mapping_config
def load_definitions(config_def):
if not config_def:
return []
ceilosca_mapping_defs = {}
for meter_metric_map in reversed(config_def['meter_metric_map']):
if meter_metric_map.get('name') in ceilosca_mapping_defs:
# skip duplicate meters
LOG.warning(_LW("Skipping duplicate Ceilometer Monasca Mapping"
" Definition %s") % meter_metric_map)
continue
try:
md = CeiloscaMappingDefinition(meter_metric_map)
ceilosca_mapping_defs[meter_metric_map['name']] = md
except CeiloscaMappingDefinitionException as me:
errmsg = (_LE("Error loading Ceilometer Monasca Mapping "
"Definition : %(err)s") % dict(err=me.message))
LOG.error(errmsg)
return ceilosca_mapping_defs.values()
class ProcessMappedCeiloscaMetric(object):
"""Implentation for managing monasca mapped metrics to ceilometer meters
The class will be responsible for managing mapped meters and their
definition. You can use get functions for
get_monasca_metric_name: get mapped monasca metric name for ceilometer
meter name
get_list_monasca_metrics: get list of mapped metrics with their respective
definitions
get_ceilosca_mapped_metric_definition: get definition of a provided monasca
metric name
get_ceilosca_mapped_definition_key_val: get respective value of a provided
key from mapping definitions
The class would be a singleton class
"""
_inited = False
_instance = None
def __new__(cls, *args, **kwargs):
"""Singleton to avoid duplicated initialization."""
if not cls._instance:
cls._instance = super(ProcessMappedCeiloscaMetric, cls).__new__(
cls, *args, **kwargs)
return cls._instance
def __init__(self):
if not (self._instance and self._inited):
self._inited = True
self.__definitions = load_definitions(
setup_ceilosca_mapping_config())
self.__mapped_metric_map = dict()
self.__mon_metric_to_cm_meter_map = dict()
for d in self.__definitions:
self.__mapped_metric_map[d.cfg['monasca_metric_name']] = d
self.__mon_metric_to_cm_meter_map[d.cfg['name']] = (
d.cfg['monasca_metric_name'])
def get_monasca_metric_name(self, ceilometer_meter_name):
return self.__mon_metric_to_cm_meter_map.get(ceilometer_meter_name)
def get_list_monasca_metrics(self):
return self.__mapped_metric_map
def get_ceilosca_mapped_metric_definition(self, monasca_metric_name):
return self.__mapped_metric_map.get(monasca_metric_name)
def get_ceilosca_mapped_definition_key_val(self, monasca_metric_name, key):
return self.__mapped_metric_map.get(monasca_metric_name).cfg[key]
def reinitialize(self):
self.__definitions = load_definitions(
setup_ceilosca_mapping_config())
self.__mapped_metric_map = dict()
self.__mon_metric_to_cm_meter_map = dict()
for d in self.__definitions:
self.__mapped_metric_map[d.cfg['monasca_metric_name']] = d
self.__mon_metric_to_cm_meter_map[d.cfg['name']] = (
d.cfg['monasca_metric_name'])
class PipelineReader(object):
"""Implentation for class to provide ceilometer meters enabled by pipeline
The class will be responsible for providing the list of ceilometer meters
enabled using pipeline.yaml configuration.
get_pipeline_meters: is a get function which can be used to get list of
pipeline meters.
"""
_inited = False
_instance = None
def __new__(cls, *args, **kwargs):
"""Singleton to avoid duplicated initialization."""
if not cls._instance:
cls._instance = super(PipelineReader, cls).__new__(
cls, *args, **kwargs)
return cls._instance
def __init__(self):
if not (self._instance and self._inited):
self._inited = True
self.__pipeline_manager = pipeline.setup_pipeline()
self.__meters_from_pipeline = set()
for pipe in self.__pipeline_manager.pipelines:
if not isinstance(pipe, pipeline.EventPipeline):
for meter in pipe.source.meters:
if meter not in self.__meters_from_pipeline:
self.__meters_from_pipeline.add(meter)
def get_pipeline_meters(self):
return self.__meters_from_pipeline
def reinitialize(self):
self.__pipeline_manager = pipeline.setup_pipeline()
self.__meters_from_pipeline = set()
for pipe in self.__pipeline_manager.pipelines:
if not isinstance(pipe, pipeline.EventPipeline):
for meter in pipe.source.meters:
if meter not in self.__meters_from_pipeline:
self.__meters_from_pipeline.add(meter)

View File

@ -0,0 +1,147 @@
#reference: http://docs.openstack.org/admin-guide/telemetry-measurements.html
---
meter_info_static_map:
- name: "disk.ephemeral.size"
type: "gauge"
unit: "GB"
- name: "disk.root.size"
type: "gauge"
unit: "GB"
- name: "image"
type: "gauge"
unit: "image"
- name: "image.delete"
type: "delta"
unit: "image"
- name: "image.size"
type: "gauge"
unit: "B"
- name: "image.update"
type: "gauge"
unit: "image"
- name: "image.upload"
type: "delta"
unit: "image"
- name: "instance"
type: "gauge"
unit: "instance"
- name: "ip.floating"
type: "gauge"
unit: "ip"
- name: "ip.floating.create"
type: "delta"
unit: "ip"
- name: "ip.floating.update"
type: "delta"
unit: "ip"
- name: "memory"
type: "gauge"
unit: "MB"
- name: "network"
type: "gauge"
unit: "network"
- name: "network.create"
type: "delta"
unit: "network"
- name: "network.delete"
type: "delta"
unit: "network"
- name: "network.update"
type: "delta"
unit: "network"
- name: "port"
type: "gauge"
unit: "port"
- name: "port.create"
type: "delta"
unit: "port"
- name: "port.delete"
type: "delta"
unit: "port"
- name: "port.update"
type: "delta"
unit: "port"
- name: "router"
type: "gauge"
unit: "router"
- name: "router.create"
type: "delta"
unit: "router"
- name: "router.delete"
type: "delta"
unit: "router"
- name: "router.update"
type: "delta"
unit: "router"
- name: "storage.objects"
type: "gauge"
unit: "object"
- name: "storage.objects.containers"
type: "gauge"
unit: "container"
- name: "storage.objects.size"
type: "gauge"
unit: "B"
- name: "subnet"
type: "gauge"
unit: "subnet"
- name: "subnet.create"
type: "delta"
unit: "subnet"
- name: "subnet.delete"
type: "delta"
unit: "subnet"
- name: "subnet.update"
type: "delta"
unit: "subnet"
- name: "vcpus"
type: "gauge"
unit: "vcpu"
- name: "volume"
type: "gauge"
unit: "volume"
- name: "volume.delete.end"
type: "delta"
unit: "volume"
- name: "volume.size"
type: "gauge"
unit: "GB"
- name: "volume.update.end"
type: "delta"
unit: "volume"

View File

@ -0,0 +1,24 @@
---
meter_metric_map:
- name: "network.outgoing.rate"
monasca_metric_name: "vm.net.out_rate"
resource_id: $.dimensions.resource_id
project_id: $.dimensions.project_id
user_id: $.dimensions.user_id
region: "NA"
type: "gauge"
unit: "b/s"
source: "NA"
resource_metadata: $.measurement[0][2]
- name: "network.incoming.rate"
monasca_metric_name: "vm.net.in_rate"
resource_id: $.dimensions.resource_id
project_id: $.dimensions.project_id
user_id: $.dimensions.user_id
region: "NA"
type: "gauge"
unit: "b/s"
source: "NA"
resource_metadata: $.measurement[0][2]

View File

@ -12,27 +12,45 @@
# License for the specific language governing permissions and limitations
# under the License.
from ceilometer.i18n import _
import copy
from monascaclient import client
from monascaclient import exc
from monascaclient import ksclient
from oslo_config import cfg
from oslo_log import log
import retrying
from ceilometer.i18n import _, _LW
from ceilometer import keystone_client
monclient_opts = [
cfg.StrOpt('clientapi_version',
default='2_0',
help='Version of Monasca client to use while publishing.'),
cfg.BoolOpt('enable_api_pagination',
default=False,
help='Enable paging through monasca api resultset.'),
]
cfg.CONF.register_opts(monclient_opts, group='monasca')
cfg.CONF.import_group('service_credentials', 'ceilometer.keystone_client')
keystone_client.register_keystoneauth_opts(cfg.CONF)
cfg.CONF.import_group('service_credentials', 'ceilometer.service')
LOG = log.getLogger(__name__)
class MonascaException(Exception):
def __init__(self, message=''):
msg = 'An exception is raised from Monasca: ' + message
super(MonascaException, self).__init__(msg)
class MonascaServiceException(Exception):
pass
def __init__(self, message=''):
msg = 'Monasca service is unavailable: ' + message
super(MonascaServiceException, self).__init__(msg)
class MonascaInvalidServiceCredentialsException(Exception):
@ -42,73 +60,200 @@ class MonascaInvalidServiceCredentialsException(Exception):
class MonascaInvalidParametersException(Exception):
code = 400
def __init__(self, message=''):
msg = 'Request cannot be handled by Monasca: ' + message
super(MonascaInvalidParametersException, self).__init__(msg)
class Client(object):
"""A client which gets information via python-monascaclient."""
_ksclient = None
def __init__(self, parsed_url):
self._retry_interval = cfg.CONF.database.retry_interval * 1000
self._max_retries = cfg.CONF.database.max_retries or 1
# nable monasca api pagination
self._enable_api_pagination = cfg.CONF.monasca.enable_api_pagination
# NOTE(zqfan): There are many concurrency requests while using
# Ceilosca, to save system resource, we don't retry too many times.
if self._max_retries < 0 or self._max_retries > 10:
LOG.warning(_LW('Reduce max retries from %s to 10'),
self._max_retries)
self._max_retries = 10
conf = cfg.CONF.service_credentials
if not conf.username or not conf.password or \
not conf.auth_url:
# because our ansible script are in another repo, the old setting
# of auth_type is password-ceilometer-legacy which doesn't register
# os_xxx options, so here we need to provide a compatible way to
# avoid circle dependency
if conf.auth_type == 'password-ceilometer-legacy':
username = conf.os_username
password = conf.os_password
auth_url = conf.os_auth_url
project_id = conf.os_tenant_id
project_name = conf.os_tenant_name
else:
username = conf.username
password = conf.password
auth_url = conf.auth_url
project_id = conf.project_id
project_name = conf.project_name
if not username or not password or not auth_url:
err_msg = _("No user name or password or auth_url "
"found in service_credentials")
LOG.error(err_msg)
raise MonascaInvalidServiceCredentialsException(err_msg)
kwargs = {
'username': conf.username,
'password': conf.password,
'auth_url': conf.auth_url + "/v3",
'project_id': conf.project_id,
'project_name': conf.project_name,
'username': username,
'password': password,
'auth_url': auth_url.replace("v2.0", "v3"),
'project_id': project_id,
'project_name': project_name,
'region_name': conf.region_name,
'read_timeout': cfg.CONF.http_timeout,
'write_timeout': cfg.CONF.http_timeout,
}
self._kwargs = kwargs
self._endpoint = "http:" + parsed_url.path
self._endpoint = parsed_url.netloc + parsed_url.path
LOG.info(_("monasca_client: using %s as monasca end point") %
self._endpoint)
self._refresh_client()
def _refresh_client(self):
_ksclient = ksclient.KSClient(**self._kwargs)
self._kwargs['token'] = _ksclient.token
if not Client._ksclient:
Client._ksclient = ksclient.KSClient(**self._kwargs)
self._kwargs['token'] = Client._ksclient.token
self._mon_client = client.Client(cfg.CONF.monasca.clientapi_version,
self._endpoint, **self._kwargs)
@staticmethod
def _retry_on_exception(e):
return not isinstance(e, MonascaInvalidParametersException)
def call_func(self, func, **kwargs):
try:
return func(**kwargs)
except (exc.HTTPInternalServerError,
exc.HTTPServiceUnavailable,
exc.HTTPBadGateway,
exc.CommunicationError) as e:
LOG.exception(e)
raise MonascaServiceException(e.message)
except exc.HTTPUnProcessable as e:
LOG.exception(e)
raise MonascaInvalidParametersException(e.message)
except Exception as e:
LOG.exception(e)
raise
@retrying.retry(wait_fixed=self._retry_interval,
stop_max_attempt_number=self._max_retries,
retry_on_exception=self._retry_on_exception)
def _inner():
try:
return func(**kwargs)
except (exc.HTTPInternalServerError,
exc.HTTPServiceUnavailable,
exc.HTTPBadGateway,
exc.CommunicationError) as e:
LOG.exception(e)
msg = '%s: %s' % (e.__class__.__name__, e)
raise MonascaServiceException(msg)
except exc.HTTPException as e:
LOG.exception(e)
msg = '%s: %s' % (e.__class__.__name__, e)
status_code = e.code
# exc.HTTPException has string code 'N/A'
if not isinstance(status_code, int):
status_code = 500
if 400 <= status_code < 500:
raise MonascaInvalidParametersException(msg)
else:
raise MonascaException(msg)
except Exception as e:
LOG.exception(e)
msg = '%s: %s' % (e.__class__.__name__, e)
raise MonascaException(msg)
return _inner()
def metrics_create(self, **kwargs):
return self.call_func(self._mon_client.metrics.create,
**kwargs)
def metrics_list(self, **kwargs):
return self.call_func(self._mon_client.metrics.list,
**kwargs)
"""Using monasca pagination to get all metrics.
We yield endless metrics till caller doesn't want more or
no more is left.
"""
search_args = copy.deepcopy(kwargs)
metrics = self.call_func(self._mon_client.metrics.list, **search_args)
# check of api pagination is enabled
if self._enable_api_pagination:
# page through monasca results
while metrics:
for metric in metrics:
yield metric
# offset for metircs is the last metric's id
search_args['offset'] = metric['id']
metrics = self.call_func(self._mon_client.metrics.list,
**search_args)
else:
for metric in metrics:
yield metric
def metric_names_list(self, **kwargs):
return self.call_func(self._mon_client.metrics.list_names,
**kwargs)
def measurements_list(self, **kwargs):
return self.call_func(self._mon_client.metrics.list_measurements,
**kwargs)
"""Using monasca pagination to get all measurements.
We yield endless measurements till caller doesn't want more or
no more is left.
"""
search_args = copy.deepcopy(kwargs)
measurements = self.call_func(
self._mon_client.metrics.list_measurements,
**search_args)
# check of api pagination is enabled
if self._enable_api_pagination:
while measurements:
for measurement in measurements:
yield measurement
# offset for measurements is measurement id composited with
# the last measurement's timestamp
search_args['offset'] = '%s_%s' % (
measurement['id'], measurement['measurements'][-1][0])
measurements = self.call_func(
self._mon_client.metrics.list_measurements,
**search_args)
else:
for measurement in measurements:
yield measurement
def statistics_list(self, **kwargs):
return self.call_func(self._mon_client.metrics.list_statistics,
**kwargs)
"""Using monasca pagination to get all statistics.
We yield endless statistics till caller doesn't want more or
no more is left.
"""
search_args = copy.deepcopy(kwargs)
statistics = self.call_func(self._mon_client.metrics.list_statistics,
**search_args)
# check of api pagination is enabled
if self._enable_api_pagination:
while statistics:
for statistic in statistics:
yield statistic
# with groupby, the offset is unpredictable to me, we don't
# support pagination for it now.
if kwargs.get('group_by'):
break
# offset for statistics is statistic id composited with
# the last statistic's timestamp
search_args['offset'] = '%s_%s' % (
statistic['id'], statistic['statistics'][-1][0])
statistics = self.call_func(
self._mon_client.metrics.list_statistics,
**search_args)
# unlike metrics.list and metrics.list_measurements
# return whole new data, metrics.list_statistics
# next page will use last page's final statistic
# data as the first one, so we need to pop it here.
# I think Monasca should treat this as a bug and fix it.
if statistics:
statistics[0]['statistics'].pop(0)
if len(statistics[0]['statistics']) == 0:
statistics.pop(0)
else:
for statistic in statistics:
yield statistic

View File

@ -27,9 +27,21 @@ OPTS = [
default='/etc/ceilometer/monasca_field_definitions.yaml',
help='Monasca static and dynamic field mappings'),
]
cfg.CONF.register_opts(OPTS, group='monasca')
MULTI_REGION_OPTS = [
cfg.StrOpt('control_plane',
default='None',
help='The name of control plane'),
cfg.StrOpt('cluster',
default='None',
help='The name of cluster'),
cfg.StrOpt('cloud_name',
default='None',
help='The name of cloud')
]
cfg.CONF.register_opts(MULTI_REGION_OPTS)
LOG = log.getLogger(__name__)
@ -75,11 +87,26 @@ class MonascaDataFilter(object):
resource_metadata=s['resource_metadata'],
source=s.get('source')).as_dict()
def get_value_for_nested_dictionary(self, lst, dct):
val = dct
for element in lst:
if isinstance(val, dict) and element in val:
val = val.get(element)
else:
return
return val
def process_sample_for_monasca(self, sample_obj):
if not self._mapping:
raise NoMappingsFound("Unable to process the sample")
dimensions = {}
dimensions['datasource'] = 'ceilometer'
# control_plane, cluster and cloud_name can be None, but we use
# literal 'None' for such case
dimensions['control_plane'] = cfg.CONF.control_plane or 'None'
dimensions['cluster'] = cfg.CONF.cluster or 'None'
dimensions['cloud_name'] = cfg.CONF.cloud_name or 'None'
if isinstance(sample_obj, sample_util.Sample):
sample = sample_obj.as_dict()
elif isinstance(sample_obj, dict):
@ -88,26 +115,48 @@ class MonascaDataFilter(object):
else:
sample = sample_obj
sample_meta = sample.get('resource_metadata', None)
for dim in self._mapping['dimensions']:
val = sample.get(dim, None)
if val:
if val is not None:
dimensions[dim] = val
else:
dimensions[dim] = 'None'
sample_meta = sample.get('resource_metadata', None)
value_meta = {}
meter_name = sample.get('name') or sample.get('counter_name')
if sample_meta:
for meta_key in self._mapping['metadata']['common']:
val = sample_meta.get(meta_key, None)
if val:
value_meta[meta_key] = str(val)
if val is not None:
value_meta[meta_key] = val
else:
if len(meta_key.split('.')) > 1:
val = self.get_value_for_nested_dictionary(
meta_key.split('.'), sample_meta)
if val is not None:
value_meta[meta_key] = val
else:
value_meta[meta_key] = 'None'
else:
value_meta[meta_key] = 'None'
if meter_name in self._mapping['metadata'].keys():
for meta_key in self._mapping['metadata'][meter_name]:
val = sample_meta.get(meta_key, None)
if val:
value_meta[meta_key] = str(val)
if val is not None:
value_meta[meta_key] = val
else:
if len(meta_key.split('.')) > 1:
val = self.get_value_for_nested_dictionary(
meta_key.split('.'), sample_meta)
if val is not None:
value_meta[meta_key] = val
else:
value_meta[meta_key] = 'None'
else:
value_meta[meta_key] = 'None'
meter_value = sample.get('volume') or sample.get('counter_volume')
if meter_value is None:

View File

@ -13,18 +13,20 @@
# License for the specific language governing permissions and limitations
# under the License.
from futurist import periodics
import os
import threading
import time
from oslo_config import cfg
from oslo_log import log
from oslo_service import loopingcall
import ceilometer
from ceilometer.i18n import _
from ceilometer import monasca_client as mon_client
from ceilometer import publisher
from ceilometer.publisher import monasca_data_filter
from ceilometer.publisher.monasca_data_filter import MonascaDataFilter
from monascaclient import exc
@ -89,21 +91,24 @@ class MonascaPublisher(publisher.PublisherBase):
self.time_of_last_batch_run = time.time()
self.mon_client = mon_client.Client(parsed_url)
self.mon_filter = monasca_data_filter.MonascaDataFilter()
self.mon_filter = MonascaDataFilter()
batch_timer = loopingcall.FixedIntervalLoopingCall(self.flush_batch)
batch_timer.start(interval=cfg.CONF.monasca.batch_polling_interval)
# add flush_batch function to periodic callables
periodic_callables = [
# The function to run + any automatically provided
# positional and keyword arguments to provide to it
# everytime it is activated.
(self.flush_batch, (), {}),
]
if cfg.CONF.monasca.retry_on_failure:
# list to hold metrics to be re-tried (behaves like queue)
self.retry_queue = []
# list to store retry attempts for metrics in retry_queue
self.retry_counter = []
retry_timer = loopingcall.FixedIntervalLoopingCall(
self.retry_batch)
retry_timer.start(
interval=cfg.CONF.monasca.retry_interval,
initial_delay=cfg.CONF.monasca.batch_polling_interval)
# add retry_batch function to periodic callables
periodic_callables.append((self.retry_batch, (), {}))
if cfg.CONF.monasca.archive_on_failure:
archive_path = cfg.CONF.monasca.archive_path
@ -113,6 +118,13 @@ class MonascaPublisher(publisher.PublisherBase):
self.archive_handler = publisher.get_publisher('file://' +
str(archive_path))
# start periodic worker
self.periodic_worker = periodics.PeriodicWorker(periodic_callables)
self.periodic_thread = threading.Thread(
target=self.periodic_worker.start)
self.periodic_thread.daemon = True
self.periodic_thread.start()
def _publish_handler(self, func, metrics, batch=False):
"""Handles publishing and exceptions that arise."""
@ -186,9 +198,10 @@ class MonascaPublisher(publisher.PublisherBase):
else:
return False
@periodics.periodic(cfg.CONF.monasca.batch_polling_interval)
def flush_batch(self):
"""Method to flush the queued metrics."""
# print "flush batch... %s" % str(time.time())
if self.is_batch_ready():
# publish all metrics in queue at this point
batch_count = len(self.metric_queue)
@ -212,9 +225,10 @@ class MonascaPublisher(publisher.PublisherBase):
else:
return False
@periodics.periodic(cfg.CONF.monasca.retry_interval)
def retry_batch(self):
"""Method to retry the failed metrics."""
# print "retry batch...%s" % str(time.time())
if self.is_retry_ready():
retry_count = len(self.retry_queue)
@ -256,6 +270,10 @@ class MonascaPublisher(publisher.PublisherBase):
self.retry_counter[ctr] += 1
ctr += 1
def flush_to_file(self):
# TODO(persist maxed-out metrics to file)
pass
def publish_events(self, events):
"""Send an event message for publishing

View File

@ -1,5 +1,5 @@
#
# Copyright 2015 Hewlett Packard
# (C) Copyright 2015-2017 Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
@ -17,16 +17,24 @@
"""
from collections import defaultdict
import copy
import datetime
import itertools
import operator
from monascaclient import exc as monasca_exc
from oslo_config import cfg
from oslo_log import log
from oslo_serialization import jsonutils
from oslo_utils import netutils
from oslo_utils import timeutils
import ceilometer
from ceilometer.ceilosca_mapping.ceilometer_static_info_mapping import (
ProcessMappedCeilometerStaticInfo)
from ceilometer.ceilosca_mapping.ceilosca_mapping import (
ProcessMappedCeiloscaMetric)
from ceilometer.ceilosca_mapping.ceilosca_mapping import PipelineReader
from ceilometer.i18n import _
from ceilometer import monasca_client
from ceilometer.publisher.monasca_data_filter import MonascaDataFilter
@ -35,7 +43,6 @@ from ceilometer.storage import base
from ceilometer.storage import models as api_models
from ceilometer import utils
OPTS = [
cfg.IntOpt('default_stats_period',
default=300,
@ -52,9 +59,7 @@ AVAILABLE_CAPABILITIES = {
'metadata': False}},
'resources': {'query': {'simple': True,
'metadata': True}},
'samples': {'pagination': False,
'groupby': False,
'query': {'simple': True,
'samples': {'query': {'simple': True,
'metadata': True,
'complex': True}},
'statistics': {'groupby': False,
@ -70,14 +75,19 @@ AVAILABLE_CAPABILITIES = {
'stddev': False,
'cardinality': False}}
},
'events': {'query': {'simple': False}}
}
AVAILABLE_STORAGE_CAPABILITIES = {
'storage': {'production_ready': True},
'storage': {'production_ready': True}
}
class InvalidInputException(Exception):
code = 400
class Connection(base.Connection):
CAPABILITIES = utils.update_nested(base.Connection.CAPABILITIES,
AVAILABLE_CAPABILITIES)
@ -89,6 +99,10 @@ class Connection(base.Connection):
def __init__(self, url):
self.mc = monasca_client.Client(netutils.urlsplit(url))
self.mon_filter = MonascaDataFilter()
self.ceilosca_mapper = ProcessMappedCeiloscaMetric()
self.pipeline_reader = PipelineReader()
self.meter_static_info = ProcessMappedCeilometerStaticInfo()
self.meters_from_pipeline = self.pipeline_reader.get_pipeline_meters()
@staticmethod
def _convert_to_dict(stats, cols):
@ -102,7 +116,7 @@ class Connection(base.Connection):
"""
query = {}
for k, v in metaquery.items():
key = k.split('.')[1]
key = '.'.join(k.split('.')[1:])
if isinstance(v, basestring):
query[key] = v
else:
@ -122,6 +136,24 @@ class Connection(base.Connection):
else:
return True
def _incr_date_by_millisecond(self, date):
# Monasca only supports millisecond for now
epoch = datetime.datetime(1970, 1, 1)
seconds_since_epoch = timeutils.delta_seconds(epoch, date)
millis_since_epoch = seconds_since_epoch * 1000
millis_since_epoch += 1
return (timeutils.iso8601_from_timestamp(
millis_since_epoch / 1000, True))
def _decr_date_by_millisecond(self, date):
epoch = datetime.datetime(1970, 1, 1)
seconds_since_epoch = timeutils.delta_seconds(epoch, date)
millis_since_epoch = seconds_since_epoch * 1000
millis_since_epoch -= 1
return (timeutils.iso8601_from_timestamp(
millis_since_epoch / 1000, True))
def upgrade(self):
pass
@ -151,6 +183,30 @@ class Connection(base.Connection):
"""
LOG.info(_("Dropping data with TTL %d"), ttl)
def get_metrics_with_mapped_dimensions(self, source_dimension,
mapped_dimension, search_args=None):
# Return metric list with the results of mapped dimensions.
if source_dimension in search_args['dimensions']:
search_args2 = copy.deepcopy(search_args)
filter_val = search_args2['dimensions'].pop(source_dimension, None)
search_args2['dimensions'][mapped_dimension] = filter_val
metric_list = self.mc.metrics_list(**search_args2)
if metric_list is not None:
return metric_list
return []
def get_metric_names_with_mapped_dimensions(
self, source_dimension, mapped_dimension, search_args=None):
# Return metric list with the results of mapped dimensions.
if source_dimension in search_args['dimensions']:
search_args2 = copy.deepcopy(search_args)
filter_val = search_args2['dimensions'].pop(source_dimension, None)
search_args2['dimensions'][mapped_dimension] = filter_val
metric_names_list = self.mc.metric_names_list(**search_args2)
if metric_names_list is not None:
return metric_names_list
return []
def get_resources(self, user=None, project=None, source=None,
start_timestamp=None, start_timestamp_op=None,
end_timestamp=None, end_timestamp_op=None,
@ -183,23 +239,43 @@ class Connection(base.Connection):
if metaquery:
q = self._convert_metaquery(metaquery)
if start_timestamp_op and start_timestamp_op != 'ge':
if start_timestamp_op and start_timestamp_op not in ['ge', 'gt']:
raise ceilometer.NotImplementedError(('Start time op %s '
'not implemented') %
start_timestamp_op)
if end_timestamp_op and end_timestamp_op != 'le':
if end_timestamp_op and end_timestamp_op not in ['le', 'lt']:
raise ceilometer.NotImplementedError(('End time op %s '
'not implemented') %
end_timestamp_op)
if not start_timestamp:
start_timestamp = timeutils.isotime(datetime.datetime(1970, 1, 1))
else:
start_timestamp = timeutils.isotime(start_timestamp)
start_timestamp = datetime.datetime(1970, 1, 1)
if end_timestamp:
end_timestamp = timeutils.isotime(end_timestamp)
if not end_timestamp:
end_timestamp = timeutils.utcnow()
self._ensure_start_time_le_end_time(start_timestamp,
end_timestamp)
# Equivalent of doing a start_timestamp_op = 'ge'
if (start_timestamp_op and
start_timestamp_op == 'gt'):
start_timestamp = self._incr_date_by_millisecond(
start_timestamp)
start_timestamp_op = 'ge'
else:
start_timestamp = timeutils.isotime(start_timestamp,
subsecond=True)
# Equivalent of doing a end_timestamp_op = 'le'
if (end_timestamp_op and
end_timestamp_op == 'lt'):
end_timestamp = self._decr_date_by_millisecond(
end_timestamp)
end_timestamp_op = 'le'
else:
end_timestamp = timeutils.isotime(end_timestamp, subsecond=True)
dims_filter = dict(user_id=user,
project_id=project,
@ -210,47 +286,139 @@ class Connection(base.Connection):
_search_args = dict(
start_time=start_timestamp,
end_time=end_timestamp
)
end_time=end_timestamp,
start_timestamp_op=start_timestamp_op,
end_timestamp_op=end_timestamp_op)
_search_args = {k: v for k, v in _search_args.items()
if v is not None}
result_count = 0
_search_args_metric = _search_args
_search_args_metric['dimensions'] = dims_filter
for metric in self.mc.metrics_list(
**_search_args_metric):
_search_args['name'] = metric['name']
_search_args['dimensions'] = metric['dimensions']
_search_args['limit'] = 1
try:
for sample in self.mc.measurements_list(**_search_args):
d = sample['dimensions']
m = self._convert_to_dict(
sample['measurements'][0], sample['columns'])
vm = m['value_meta']
if not self._match_metaquery_to_value_meta(q, vm):
continue
if d.get('resource_id'):
result_count += 1
_search_kwargs = {'dimensions': dims_filter}
meter_names = list()
meter_names_list = itertools.chain(
# Accumulate a list from monascaclient starting with no filter
self.mc.metric_names_list(**_search_kwargs),
# query monasca with hostname = resource_id filter
self.get_metric_names_with_mapped_dimensions(
"resource_id", "hostname", _search_kwargs),
# query monasca with tenant_id = project_id filter
self.get_metric_names_with_mapped_dimensions(
"project_id", "tenant_id", _search_kwargs)
)
yield api_models.Resource(
resource_id=d.get('resource_id'),
first_sample_timestamp=(
timeutils.parse_isotime(m['timestamp'])),
last_sample_timestamp=timeutils.utcnow(),
project_id=d.get('project_id'),
source=d.get('source'),
user_id=d.get('user_id'),
metadata=m['value_meta']
)
for metric in meter_names_list:
if metric['name'] in meter_names:
continue
elif (metric['name'] in
self.meter_static_info.get_list_supported_meters()):
meter_names.insert(0, metric['name'])
elif (metric['name'] in
self.ceilosca_mapper.get_list_monasca_metrics()):
meter_names.append(metric['name'])
if result_count == limit:
return
for meter_name in meter_names:
_search_args['name'] = meter_name
_search_args['group_by'] = '*'
_search_args.pop('dimensions', None)
_search_args['dimensions'] = dims_filter
except monasca_exc.HTTPConflict:
pass
# if meter is a Ceilometer meter...
if (meter_name not in
self.ceilosca_mapper.get_list_monasca_metrics()):
try:
if meter_name not in self.meters_from_pipeline:
_search_args['dimensions']['datasource'] = 'ceilometer'
for sample in (self.mc.measurements_list(**_search_args)):
d = sample['dimensions']
for meas in sample['measurements']:
m = self._convert_to_dict(meas, sample['columns'])
vm = m['value_meta']
if not self._match_metaquery_to_value_meta(q, vm):
continue
if d.get('resource_id'):
result_count += 1
yield api_models.Resource(
resource_id=d.get('resource_id'),
first_sample_timestamp=(
timeutils.parse_isotime(
m['timestamp'])),
last_sample_timestamp=timeutils.utcnow(),
project_id=d.get('project_id'),
source=d.get('source'),
user_id=d.get('user_id'),
metadata=m['value_meta']
)
if result_count == limit:
return
except monasca_exc.HTTPConflict:
pass
# else if meter is a Monasca meter...
else:
try:
meter_def = self.ceilosca_mapper.\
get_ceilosca_mapped_metric_definition(meter_name)
# if for a meter name being queried, project exists in
# ceilometer-monasca mapping file, query by
# mapped_field instead
if not (project is None):
mapped_field = self.ceilosca_mapper.\
get_ceilosca_mapped_definition_key_val(
_search_args['name'], 'project_id')
if 'dimensions' in mapped_field:
_search_args['dimensions'].pop('project_id', None)
_search_args['dimensions'][mapped_field.split(".")[-1]] = \
project
# if for a meter name being queried, resource_id exists in
# ceilometer-monasca mapping file, query by
# mapped_field instead
if not (resource is None):
mapped_field = self.ceilosca_mapper.\
get_ceilosca_mapped_definition_key_val(
_search_args['name'], 'resource_id')
if 'dimensions' in mapped_field:
_search_args['dimensions'].pop('resource_id', None)
_search_args['dimensions'][mapped_field.split(".")[-1]] = \
resource
for sample in (self.mc.measurements_list(**_search_args)):
d = sample['dimensions']
for meas in sample['measurements']:
m = self._convert_to_dict(meas, sample['columns'])
vm = m['value_meta']
if not self._match_metaquery_to_value_meta(q, vm):
continue
if meter_def.parse_fields('resource_id', sample):
result_count += 1
yield api_models.Resource(
resource_id=meter_def.parse_fields(
'resource_id', sample),
first_sample_timestamp=(
timeutils.parse_isotime(
m['timestamp'])),
last_sample_timestamp=(timeutils.utcnow()),
project_id=meter_def.parse_fields(
'project_id', sample),
source=meter_def.parse_fields('source',
sample),
user_id=meter_def.parse_fields(
'user_id', sample),
metadata=meter_def.parse_fields(
'resource_metadata', sample)
)
if result_count == limit:
return
except monasca_exc.HTTPConflict:
pass
def get_meters(self, user=None, project=None, resource=None, source=None,
metaquery=None, limit=None, unique=False):
@ -269,6 +437,7 @@ class Connection(base.Connection):
:param source: Optional source filter.
:param metaquery: Optional dict with metadata to match on.
:param limit: Maximum number of results to return.
:param unique: If set to true, return only unique meter information.
"""
if limit == 0:
return
@ -282,23 +451,114 @@ class Connection(base.Connection):
resource_id=resource,
source=source
)
_dimensions = {k: v for k, v in _dimensions.items() if v is not None}
_search_kwargs = {'dimensions': _dimensions}
if unique:
meter_names = set()
for metric in self.mc.metric_names_list(**_search_kwargs):
if metric['name'] in meter_names:
continue
elif (metric['name'] in
self.meter_static_info.get_list_supported_meters()):
if limit and len(meter_names) >= limit:
return
meter_names.add(metric['name'])
yield api_models.Meter(
name=metric['name'],
type=self.meter_static_info
.get_meter_static_info_key_val(metric['name'],
'type'),
unit=self.meter_static_info
.get_meter_static_info_key_val(metric['name'],
'unit'),
resource_id=None,
project_id=None,
source=None,
user_id=None)
if limit:
_search_kwargs['limit'] = limit
elif (metric['name'] not in
self.ceilosca_mapper.get_list_monasca_metrics()):
continue
else:
if limit and len(meter_names) >= limit:
return
meter_names.add(metric['name'])
meter = (self.ceilosca_mapper.
get_ceilosca_mapped_metric_definition
(metric['name']))
yield api_models.Meter(
name=meter.parse_fields('name', metric),
type=meter.parse_fields('type', metric),
unit=meter.parse_fields('unit', metric),
resource_id=None,
project_id=None,
source=None,
user_id=None)
for metric in self.mc.metrics_list(**_search_kwargs):
yield api_models.Meter(
name=metric['name'],
type=metric['dimensions'].get('type') or 'cumulative',
unit=metric['dimensions'].get('unit'),
resource_id=metric['dimensions'].get('resource_id'),
project_id=metric['dimensions'].get('project_id'),
source=metric['dimensions'].get('source'),
user_id=metric['dimensions'].get('user_id'))
else:
result_count = 0
# Search for ceilometer published data first
_search_kwargs_tmp = copy.deepcopy(_search_kwargs)
_search_kwargs_tmp['dimensions']['datasource'] = 'ceilometer'
metrics_list = self.mc.metrics_list(**_search_kwargs_tmp)
for metric in metrics_list:
if result_count == limit:
return
result_count += 1
yield api_models.Meter(
name=metric['name'],
type=metric['dimensions'].get('type') or 'cumulative',
unit=metric['dimensions'].get('unit'),
resource_id=metric['dimensions'].get('resource_id'),
project_id=metric['dimensions'].get('project_id'),
source=metric['dimensions'].get('source'),
user_id=metric['dimensions'].get('user_id'))
# because we enable monasca pagination, so we should use iterator
# instead of a list, to reduce unnecessary requests to monasca-api
metrics_list = itertools.chain(
self.mc.metrics_list(**_search_kwargs),
# for vm performance metrics collected by monasca-agent, the
# project_id is mapped to tenant_id
self.get_metrics_with_mapped_dimensions("project_id",
"tenant_id",
_search_kwargs),
# for compute.node metrics collected by monasca-agent, the
# resource_id is mapped to hostname
self.get_metrics_with_mapped_dimensions("resource_id",
"hostname",
_search_kwargs),
)
for metric in metrics_list:
if result_count == limit:
return
if (metric['dimensions'].get('datasource') != 'ceilometer' and
metric['name'] in self.meters_from_pipeline):
result_count += 1
yield api_models.Meter(
name=metric['name'],
type=metric['dimensions'].get('type') or 'cumulative',
unit=metric['dimensions'].get('unit'),
resource_id=metric['dimensions'].get('resource_id'),
project_id=metric['dimensions'].get('project_id'),
source=metric['dimensions'].get('source'),
user_id=metric['dimensions'].get('user_id'))
elif (metric['name'] in
self.ceilosca_mapper.get_list_monasca_metrics()):
meter = (self.ceilosca_mapper.
get_ceilosca_mapped_metric_definition
(metric['name']))
result_count += 1
yield api_models.Meter(
name=meter.parse_fields('name', metric),
type=meter.parse_fields('type', metric),
unit=meter.parse_fields('unit', metric),
resource_id=meter.parse_fields('resource_id', metric),
project_id=meter.parse_fields('project_id', metric),
source=meter.parse_fields('source', metric),
user_id=meter.parse_fields('user_id', metric))
def get_samples(self, sample_filter, limit=None):
"""Return an iterable of dictionaries containing sample information.
@ -332,14 +592,14 @@ class Connection(base.Connection):
"Supply meter name at the least")
if (sample_filter.start_timestamp_op and
sample_filter.start_timestamp_op != 'ge'):
sample_filter.start_timestamp_op not in ['ge', 'gt']):
raise ceilometer.NotImplementedError(('Start time op %s '
'not implemented') %
sample_filter.
start_timestamp_op)
if (sample_filter.end_timestamp_op and
sample_filter.end_timestamp_op != 'le'):
sample_filter.end_timestamp_op not in ['le', 'lt']):
raise ceilometer.NotImplementedError(('End time op %s '
'not implemented') %
sample_filter.
@ -358,7 +618,28 @@ class Connection(base.Connection):
sample_filter.start_timestamp = datetime.datetime(1970, 1, 1)
if not sample_filter.end_timestamp:
sample_filter.end_timestamp = datetime.datetime.utcnow()
sample_filter.end_timestamp = timeutils.utcnow()
self._ensure_start_time_le_end_time(sample_filter.start_timestamp,
sample_filter.end_timestamp)
if (sample_filter.start_timestamp_op and sample_filter.
start_timestamp_op == 'gt'):
sample_filter.start_timestamp = self._incr_date_by_millisecond(
sample_filter.start_timestamp)
sample_filter.start_timestamp_op = 'ge'
else:
sample_filter.start_timestamp = timeutils.isotime(
sample_filter.start_timestamp, subsecond=True)
if (sample_filter.end_timestamp_op and sample_filter.
end_timestamp_op == 'lt'):
sample_filter.end_timestamp = self._decr_date_by_millisecond(
sample_filter.end_timestamp)
sample_filter.end_timestamp_op = 'le'
else:
sample_filter.end_timestamp = timeutils.isotime(
sample_filter.end_timestamp, subsecond=True)
_dimensions = dict(
user_id=sample_filter.user,
@ -373,27 +654,49 @@ class Connection(base.Connection):
_dimensions = {k: v for k, v in _dimensions.items() if v is not None}
start_ts = timeutils.isotime(sample_filter.start_timestamp)
end_ts = timeutils.isotime(sample_filter.end_timestamp)
_search_args = dict(
start_time=start_ts,
start_time=sample_filter.start_timestamp,
start_timestamp_op=sample_filter.start_timestamp_op,
end_time=end_ts,
end_time=sample_filter.end_timestamp,
end_timestamp_op=sample_filter.end_timestamp_op,
dimensions=_dimensions,
name=sample_filter.meter,
group_by='*'
)
result_count = 0
_search_args_metrics = _search_args
_search_args_metrics['dimensions'] = _dimensions
_search_args_metrics['name'] = sample_filter.meter
for metric in self.mc.metrics_list(
**_search_args_metrics):
_search_args['name'] = metric['name']
_search_args['dimensions'] = metric['dimensions']
_search_args['merge_metrics'] = False
_search_args = {k: v for k, v in _search_args.items()
if v is not None}
_search_args = {k: v for k, v in _search_args.items()
if v is not None}
if self.ceilosca_mapper.get_monasca_metric_name(sample_filter.meter):
_search_args['name'] = (
self.ceilosca_mapper.get_monasca_metric_name(
sample_filter.meter))
meter_def = (
self.ceilosca_mapper.get_ceilosca_mapped_metric_definition(
_search_args['name']))
# if for a meter name being queried, project exists in
# ceilometer-monasca mapping file, query by mapped_field instead
if not (sample_filter.project is None):
mapped_field = self.ceilosca_mapper.\
get_ceilosca_mapped_definition_key_val(
_search_args['name'], 'project_id')
if 'dimensions' in mapped_field:
_search_args['dimensions'].pop('project_id', None)
_search_args['dimensions'][mapped_field.split(".")[-1]] = \
sample_filter.project
# if for a meter name being queried, resource_id exists in
# ceilometer-monasca mapping file, query by mapped_field instead
if not (sample_filter.resource is None):
mapped_field = self.ceilosca_mapper.\
get_ceilosca_mapped_definition_key_val(
_search_args['name'], 'resource_id')
if 'dimensions' in mapped_field:
_search_args['dimensions'].pop('resource_id', None)
_search_args['dimensions'][mapped_field.split(".")[-1]] = \
sample_filter.resource
for sample in self.mc.measurements_list(**_search_args):
d = sample['dimensions']
@ -404,17 +707,21 @@ class Connection(base.Connection):
if not self._match_metaquery_to_value_meta(q, vm):
continue
result_count += 1
yield api_models.Sample(
source=d.get('source'),
counter_name=sample['name'],
counter_type=d.get('type'),
counter_unit=d.get('unit'),
source=meter_def.parse_fields('source', sample),
counter_name=sample_filter.meter,
counter_type=meter_def.parse_fields('type', sample),
counter_unit=meter_def.parse_fields('unit', sample),
counter_volume=m['value'],
user_id=d.get('user_id'),
project_id=d.get('project_id'),
resource_id=d.get('resource_id'),
user_id=meter_def.parse_fields('user_id', sample),
project_id=meter_def.parse_fields('project_id',
sample),
resource_id=meter_def.parse_fields('resource_id',
sample),
timestamp=timeutils.parse_isotime(m['timestamp']),
resource_metadata=m['value_meta'],
resource_metadata=meter_def.parse_fields(
'resource_metadata', sample),
message_id=sample['id'],
message_signature='',
recorded_at=(timeutils.parse_isotime(m['timestamp'])))
@ -422,6 +729,46 @@ class Connection(base.Connection):
if result_count == limit:
return
return
# This if statement is putting the dimension information for 2 cases
# 1. To safeguard against querying an existing monasca metric which is
# not mapped in ceilometer, unless it is published in monasca by
# ceilometer itself
# 2. Also, this will allow you to query a metric which was being
# collected historically but not being collected any more as far as
# those metrics exists in monasca
if sample_filter.meter not in self.meters_from_pipeline:
_search_args['dimensions']['datasource'] = 'ceilometer'
for sample in self.mc.measurements_list(**_search_args):
d = sample['dimensions']
for meas in sample['measurements']:
m = self._convert_to_dict(
meas, sample['columns'])
vm = m['value_meta']
if not self._match_metaquery_to_value_meta(q, vm):
continue
result_count += 1
yield api_models.Sample(
source=d.get('source'),
counter_name=sample_filter.meter,
counter_type=d.get('type'),
counter_unit=d.get('unit'),
counter_volume=m['value'],
user_id=d.get('user_id'),
project_id=d.get('project_id'),
resource_id=d.get('resource_id'),
timestamp=timeutils.parse_isotime(m['timestamp']),
resource_metadata=m['value_meta'],
message_id=sample['id'],
message_signature='',
recorded_at=(timeutils.parse_isotime(m['timestamp'])))
if result_count == limit:
return
def get_meter_statistics(self, filter, period=None, groupby=None,
aggregate=None):
"""Return a dictionary containing meter statistics.
@ -469,23 +816,44 @@ class Connection(base.Connection):
raise ceilometer.NotImplementedError('Message_id query '
'not implemented')
if filter.start_timestamp_op and filter.start_timestamp_op != 'ge':
if filter.start_timestamp_op and (
filter.start_timestamp_op not in ['ge', 'gt']):
raise ceilometer.NotImplementedError(('Start time op %s '
'not implemented') %
filter.start_timestamp_op)
if filter.end_timestamp_op and filter.end_timestamp_op != 'le':
if filter.end_timestamp_op and (
filter.end_timestamp_op not in ['le', 'lt']):
raise ceilometer.NotImplementedError(('End time op %s '
'not implemented') %
filter.end_timestamp_op)
if not filter.start_timestamp:
filter.start_timestamp = timeutils.isotime(
datetime.datetime(1970, 1, 1))
else:
filter.start_timestamp = timeutils.isotime(filter.start_timestamp)
if filter.end_timestamp:
filter.end_timestamp = timeutils.isotime(filter.end_timestamp)
if not filter.start_timestamp:
filter.start_timestamp = datetime.datetime(1970, 1, 1)
if not filter.end_timestamp:
filter.end_timestamp = timeutils.utcnow()
self._ensure_start_time_le_end_time(filter.start_timestamp,
filter.end_timestamp)
if (filter.start_timestamp_op and filter.
start_timestamp_op == 'gt'):
filter.start_timestamp = self._incr_date_by_millisecond(
filter.start_timestamp)
filter.start_timestamp_op = 'ge'
else:
filter.start_timestamp = timeutils.isotime(filter.start_timestamp,
subsecond=True)
if (filter.end_timestamp_op and filter.
end_timestamp_op == 'lt'):
filter.end_timestamp = self._decr_date_by_millisecond(
filter.end_timestamp)
filter.end_timestamp_op = 'le'
else:
filter.end_timestamp = timeutils.isotime(filter.end_timestamp,
subsecond=True)
# TODO(monasca): Add this a config parameter
allowed_stats = ['avg', 'min', 'max', 'sum', 'count']
@ -524,13 +892,49 @@ class Connection(base.Connection):
_search_args = {k: v for k, v in _search_args.items()
if v is not None}
is_mapped_metric = False
if self.ceilosca_mapper.get_monasca_metric_name(filter.meter):
_search_args['name'] = self.ceilosca_mapper.\
get_monasca_metric_name(filter.meter)
meter_def = (
self.ceilosca_mapper.get_ceilosca_mapped_metric_definition(
_search_args['name']))
is_mapped_metric = True
# if for a meter name being queried, project exists in
# ceilometer-monasca mapping file, query by mapped_field instead
if not (filter.project is None):
mapped_field = self.ceilosca_mapper.\
get_ceilosca_mapped_definition_key_val(
_search_args['name'], 'project_id')
if 'dimensions' in mapped_field:
_search_args['dimensions'].pop('project_id', None)
_search_args['dimensions'][mapped_field.split(".")[-1]] = \
filter.project
# if for a meter name being queried, resource_id exists in
# ceilometer-monasca mapping file, query by mapped_field instead
if not (filter.resource is None):
mapped_field = self.ceilosca_mapper.\
get_ceilosca_mapped_definition_key_val(
_search_args['name'], 'resource_id')
if 'dimensions' in mapped_field:
_search_args['dimensions'].pop('resource_id', None)
_search_args['dimensions'][mapped_field.split(".")[-1]] = \
filter.resource
elif filter.meter not in self.meters_from_pipeline:
_search_args['dimensions']['datasource'] = 'ceilometer'
if groupby:
_search_args['group_by'] = '*'
stats_list = self.mc.statistics_list(**_search_args)
group_stats_dict = defaultdict(list)
for stats in stats_list:
groupby_val = stats['dimensions'].get(groupby)
if is_mapped_metric:
groupby_val = meter_def.parse_fields(groupby, stats)
else:
groupby_val = stats['dimensions'].get(groupby)
group_stats_dict[groupby_val].append(stats)
def get_max(items):
@ -575,8 +979,12 @@ class Connection(base.Connection):
count_list.append(stats_dict['count'])
ts_list.append(stats_dict['timestamp'])
group_statistics['unit'] = (stats['dimensions'].
get('unit'))
if is_mapped_metric:
group_statistics['unit'] = (meter_def.parse_fields(
'unit', stats))
else:
group_statistics['unit'] = (stats['dimensions'].
get('unit'))
if len(max_list):
group_statistics['max'] = get_max(max_list)
@ -648,7 +1056,10 @@ class Connection(base.Connection):
key = '%s%s' % (a.func, '/%s' % a.param if a.param
else '')
stats_dict['aggregate'][key] = stats_dict.get(key)
unit = stats['dimensions'].get('unit')
if is_mapped_metric:
unit = meter_def.parse_fields('unit', stats)
else:
unit = stats['dimensions'].get('unit')
if ts_start and ts_end:
yield api_models.Statistics(
unit=unit,
@ -672,7 +1083,7 @@ class Connection(base.Connection):
[{"=":{"counter_name":"memory"}}]]
"""
op, nodes = filter_expr.items()[0]
msg = "%s operand is not supported" % op
msg = "%s operator is not supported" % op
if op == 'or':
filter_list = []
@ -698,8 +1109,23 @@ class Connection(base.Connection):
else:
raise ceilometer.NotImplementedError(msg)
def _ensure_start_time_le_end_time(self, start_time, end_time):
if start_time is None:
start_time = datetime.datetime(1970, 1, 1)
if end_time is None:
end_time = timeutils.utcnow()
# here we don't handle the corner case of start_time == end_time,
# while start_time_op & end_time_op can be gt and lt, let Monasca
# decides it valid or not.
if start_time > end_time:
msg = _('start time (%(start_time)s) should not after end_time '
'(%(end_time)s). (start time defaults to '
'1970-01-01T00:00:00Z, end time defaults to utc now)') % {
'start_time': start_time, 'end_time': end_time}
raise InvalidInputException(msg)
def _parse_to_sample_filter(self, simple_filters):
"""Parse to simple filters to sample filter.
"""Parse simple filters to sample filter.
For i.e.: parse
[{"=":{"counter_name":"cpu"}},{"=":{"counter_volume": 1}}]
@ -728,7 +1154,7 @@ class Connection(base.Connection):
"counter_type": "type",
"counter_unit": "unit",
}
msg = "operand %s cannot be applied to field %s"
msg = "operator %s cannot be applied to field %s"
kwargs = {'metaquery': {}}
for sf in simple_filters:
op = sf.keys()[0]
@ -742,9 +1168,15 @@ class Connection(base.Connection):
if op == '>=':
kwargs['start_timestamp'] = value
kwargs['start_timestamp_op'] = 'ge'
elif op == '>':
kwargs['start_timestamp'] = value
kwargs['start_timestamp_op'] = 'gt'
elif op == '<=':
kwargs['end_timestamp'] = value
kwargs['end_timestamp_op'] = 'le'
elif op == '<':
kwargs['end_timestamp'] = value
kwargs['end_timestamp_op'] = 'lt'
else:
raise ceilometer.NotImplementedError(msg % (op, field))
elif field == 'counter_volume':
@ -756,6 +1188,8 @@ class Connection(base.Connection):
else:
ra_msg = "field %s is not supported" % field
raise ceilometer.NotImplementedError(ra_msg)
self._ensure_start_time_le_end_time(kwargs.get('start_timestamp'),
kwargs.get('end_timestamp'))
sample_type = kwargs.pop('type', None)
sample_unit = kwargs.pop('unit', None)
sample_volume = kwargs.pop('volume', None)
@ -770,11 +1204,30 @@ class Connection(base.Connection):
sample_filter.volume_op = sample_volume_op
return sample_filter
def _is_meter_name_exist(self, filters):
for f in filters:
field = f.values()[0].keys()[0]
if field == 'counter_name':
return True
return False
def _validate_filters(self, filters, msg=''):
"""Validate filters to ensure fail at early stage."""
if not self._is_meter_name_exist(filters):
msg = _('%(msg)s, meter name is not found in %(filters)s') % {
'msg': msg, 'filters': jsonutils.dumps(filters)}
raise InvalidInputException(msg)
def _parse_to_sample_filters(self, filter_expr):
"""Parse complex query expression to sample filter list."""
filter_list = self._parse_to_filter_list(filter_expr)
msg = _('complex query filter expression has been translated to '
'%(count)d sub queries: %(filters)s') % {
'count': len(filter_list),
'filters': jsonutils.dumps(filter_list)}
sample_filters = []
for filters in filter_list:
self._validate_filters(filters, msg)
sf = self._parse_to_sample_filter(filters)
if sf:
sample_filters.append(sf)
@ -803,17 +1256,23 @@ class Connection(base.Connection):
def query_samples(self, filter_expr=None, orderby=None, limit=None):
if not filter_expr:
msg = "fitler must be specified"
msg = _("fitler must be specified")
raise ceilometer.NotImplementedError(msg)
if orderby:
msg = "orderby is not supported"
raise ceilometer.NotImplementedError(msg)
if not limit:
msg = "limit must be specified"
msg = _("orderby is not supported")
raise ceilometer.NotImplementedError(msg)
# limit won't be None because we have limit enforcement in API level
if limit == 0:
return []
LOG.debug("filter_expr = %s", filter_expr)
sample_filters = self._parse_to_sample_filters(filter_expr)
try:
sample_filters = self._parse_to_sample_filters(filter_expr)
# ValueError: year=1016 is before 1900; the datetime strftime()
# methods require year >= 1900
except ValueError as e:
raise InvalidInputException(e)
LOG.debug("sample_filters = %s", sample_filters)
ret = []

View File

@ -0,0 +1,183 @@
#
# Copyright 2012 New Dream Network, LLC (DreamHost)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Base classes for API tests.
"""
from oslo_config import cfg
from oslo_config import fixture as fixture_config
from oslo_policy import opts
import pecan
import pecan.testing
from ceilometer.api import rbac
from ceilometer.tests import db as db_test_base
cfg.CONF.import_group('api', 'ceilometer.api.controllers.v2.root')
class FunctionalTest(db_test_base.TestBase):
"""Used for functional tests of Pecan controllers.
Used in case when you need to test your literal application and its
integration with the framework.
"""
PATH_PREFIX = ''
def setUp(self):
super(FunctionalTest, self).setUp()
self.CONF = self.useFixture(fixture_config.Config()).conf
self.setup_messaging(self.CONF)
opts.set_defaults(self.CONF)
self.CONF.set_override("policy_file",
self.path_get('etc/ceilometer/policy.json'),
group='oslo_policy')
self.CONF.set_override('gnocchi_is_enabled', False, group='api')
self.CONF.set_override('aodh_is_enabled', False, group='api')
self.app = self._make_app()
def _make_app(self, enable_acl=False):
self.config = {
'app': {
'root': 'ceilometer.api.controllers.root.RootController',
'modules': ['ceilometer.api'],
'enable_acl': enable_acl,
},
'wsme': {
'debug': True,
},
}
return pecan.testing.load_test_app(self.config)
def tearDown(self):
super(FunctionalTest, self).tearDown()
rbac.reset()
pecan.set_config({}, overwrite=True)
def put_json(self, path, params, expect_errors=False, headers=None,
extra_environ=None, status=None):
"""Sends simulated HTTP PUT request to Pecan test app.
:param path: url path of target service
:param params: content for wsgi.input of request
:param expect_errors: boolean value whether an error is expected based
on request
:param headers: A dictionary of headers to send along with the request
:param extra_environ: A dictionary of environ variables to send along
with the request
:param status: Expected status code of response
"""
return self.post_json(path=path, params=params,
expect_errors=expect_errors,
headers=headers, extra_environ=extra_environ,
status=status, method="put")
def post_json(self, path, params, expect_errors=False, headers=None,
method="post", extra_environ=None, status=None):
"""Sends simulated HTTP POST request to Pecan test app.
:param path: url path of target service
:param params: content for wsgi.input of request
:param expect_errors: boolean value whether an error is expected based
on request
:param headers: A dictionary of headers to send along with the request
:param method: Request method type. Appropriate method function call
should be used rather than passing attribute in.
:param extra_environ: A dictionary of environ variables to send along
with the request
:param status: Expected status code of response
"""
full_path = self.PATH_PREFIX + path
response = getattr(self.app, "%s_json" % method)(
str(full_path),
params=params,
headers=headers,
status=status,
extra_environ=extra_environ,
expect_errors=expect_errors
)
return response
def delete(self, path, expect_errors=False, headers=None,
extra_environ=None, status=None):
"""Sends simulated HTTP DELETE request to Pecan test app.
:param path: url path of target service
:param expect_errors: boolean value whether an error is expected based
on request
:param headers: A dictionary of headers to send along with the request
:param extra_environ: A dictionary of environ variables to send along
with the request
:param status: Expected status code of response
"""
full_path = self.PATH_PREFIX + path
response = self.app.delete(str(full_path),
headers=headers,
status=status,
extra_environ=extra_environ,
expect_errors=expect_errors)
return response
def get_json(self, path, expect_errors=False, headers=None,
extra_environ=None, q=None, groupby=None, status=None,
override_params=None, **params):
"""Sends simulated HTTP GET request to Pecan test app.
:param path: url path of target service
:param expect_errors: boolean value whether an error is expected based
on request
:param headers: A dictionary of headers to send along with the request
:param extra_environ: A dictionary of environ variables to send along
with the request
:param q: list of queries consisting of: field, value, op, and type
keys
:param groupby: list of fields to group by
:param status: Expected status code of response
:param override_params: literally encoded query param string
:param params: content for wsgi.input of request
"""
q = q or []
groupby = groupby or []
full_path = self.PATH_PREFIX + path
if override_params:
all_params = override_params
else:
query_params = {'q.field': [],
'q.value': [],
'q.op': [],
'q.type': [],
}
for query in q:
for name in ['field', 'op', 'value', 'type']:
query_params['q.%s' % name].append(query.get(name, ''))
all_params = {}
all_params.update(params)
if q:
all_params.update(query_params)
if groupby:
all_params.update({'groupby': groupby})
response = self.app.get(full_path,
params=all_params,
headers=headers,
extra_environ=extra_environ,
expect_errors=expect_errors,
status=status)
if not expect_errors:
response = response.json
return response

View File

@ -0,0 +1,20 @@
#
# Copyright 2012 New Dream Network, LLC (DreamHost)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from ceilometer.tests.functional import api
class FunctionalTest(api.FunctionalTest):
PATH_PREFIX = '/v2'

View File

@ -84,7 +84,7 @@ class TestApi(test_base.BaseTestCase):
self.CONF.import_opt('pipeline_cfg_file', 'ceilometer.pipeline')
self.CONF.set_override(
'pipeline_cfg_file',
self.path_get('etc/ceilometer/pipeline.yaml')
self.path_get('etc/ceilometer/monasca_pipeline.yaml')
)
self.CONF.import_opt('monasca_mappings',
@ -147,6 +147,7 @@ class TestApi(test_base.BaseTestCase):
:param override_params: literally encoded query param string
:param params: content for wsgi.input of request
"""
q = q or []
groupby = groupby or []
full_path = self.PATH_PREFIX + path
@ -211,7 +212,8 @@ class TestListMeters(TestApi):
data = self.get_json('/meters')
self.assertEqual(True, mnl_mock.called)
self.assertEqual(1, mnl_mock.call_count)
self.assertEqual(2, mnl_mock.call_count,
"impl_monasca.py calls the metrics_list api twice.")
self.assertEqual(2, len(data))
(self.assertIn(meter['name'],
@ -219,6 +221,17 @@ class TestListMeters(TestApi):
self.meter_payload]) for meter in data)
def test_get_meters_query_with_project_resource(self):
"""Test meter name conversion for project-id and resource-id.
Previous versions of the monasca client did not do this conversion.
Pre-Newton expected:
'dimensions': {'project_id': u'project-1','resource_id': u'resource-1'}
Newton expected:
'dimensions': {'hostname': u'resource-1','project_id': u'project-1'}
"""
mnl_mock = self.mock_mon_client().metrics_list
mnl_mock.return_value = self.meter_payload
@ -228,10 +241,11 @@ class TestListMeters(TestApi):
{'field': 'project_id',
'value': 'project-1'}])
self.assertEqual(True, mnl_mock.called)
self.assertEqual(1, mnl_mock.call_count)
self.assertEqual(dict(dimensions=dict(resource_id=u'resource-1',
project_id=u'project-1'),
limit=100),
self.assertEqual(4, mnl_mock.call_count,
"impl_monasca.py expected to make 4 calls to mock.")
# Note - previous versions of the api included a limit value
self.assertEqual(dict(dimensions=dict(hostname=u'resource-1',
project_id=u'project-1')),
mnl_mock.call_args[1])
def test_get_meters_query_with_user(self):
@ -242,7 +256,13 @@ class TestListMeters(TestApi):
q=[{'field': 'user_id',
'value': 'user-1'}])
self.assertEqual(True, mnl_mock.called)
self.assertEqual(1, mnl_mock.call_count)
self.assertEqual(dict(dimensions=dict(user_id=u'user-1'),
limit=100),
self.assertEqual(2, mnl_mock.call_count,
"impl_monasca.py calls the metrics_list api twice.")
# Note - previous versions of the api included a limit value
self.assertEqual(dict(dimensions=dict(user_id=u'user-1')),
mnl_mock.call_args[1])
# TODO(joadavis) Test a bad query parameter
# Like using 'hostname' instead of 'resource_id'
# Expected result with bad parameter:
# webtest.app.AppError: Bad response: 400 Bad Request

View File

@ -0,0 +1,586 @@
#
# Copyright 2016 Hewlett Packard
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import os
import mock
from oslo_config import fixture as fixture_config
from oslo_utils import fileutils
from oslo_utils import timeutils
from oslotest import base
from oslotest import mockpatch
import six
import yaml
from ceilometer.ceilosca_mapping import ceilosca_mapping
from ceilometer.ceilosca_mapping.ceilosca_mapping import (
CeiloscaMappingDefinition)
from ceilometer.ceilosca_mapping.ceilosca_mapping import (
CeiloscaMappingDefinitionException)
from ceilometer.ceilosca_mapping.ceilosca_mapping import PipelineReader
from ceilometer import storage
from ceilometer.storage import impl_monasca
from ceilometer.storage import models as storage_models
MONASCA_MEASUREMENT = {
"id": "fef26f9d27f8027ea44b940cf3626fc398f7edfb",
"name": "fake_metric",
"dimensions": {
"resource_id": "2fe6e3a9-9bdf-4c98-882c-a826cf0107a1",
"cloud_name": "helion-poc-hlm-003",
"component": "vm",
"control_plane": "control-plane-1",
"service": "compute",
"device": "tap3356676e-a5",
"tenant_id": "50ce24dd577c43879cede72b77224e2f",
"hostname": "hlm003-cp1-comp0003-mgmt",
"cluster": "compute",
"zone": "nova"
},
"columns": ["timestamp", "value", "value_meta"],
"measurements": [["2016-05-23T22:22:42.000Z", 54.0, {
"audit_period_ending": "None",
"audit_period_beginning": "None",
"host": "network.hlm003-cp1-c1-m2-mgmt",
"availability_zone": "None",
"event_type": "subnet.create.end",
"enable_dhcp": "true",
"gateway_ip": "10.43.0.1",
"ip_version": "4",
"cidr": "10.43.0.0/28"}]]
}
MONASCA_VALUE_META = {
'audit_period_beginning': 'None',
'audit_period_ending': 'None',
'availability_zone': 'None',
'cidr': '10.43.0.0/28',
'enable_dhcp': 'true',
'event_type': 'subnet.create.end',
'gateway_ip': '10.43.0.1',
'host': 'network.hlm003-cp1-c1-m2-mgmt',
'ip_version': '4'
}
class TestCeiloscaMapping(base.BaseTestCase):
pipeline_data = yaml.dump({
'sources': [{
'name': 'test_pipeline',
'interval': 1,
'meters': ['testbatch', 'testbatch2'],
'resources': ['alpha', 'beta', 'gamma', 'delta'],
'sinks': ['test_sink']}],
'sinks': [{
'name': 'test_sink',
'transformers': [],
'publishers': ["test"]}]
})
cfg = yaml.dump({
'meter_metric_map': [{
'user_id': '$.dimensions.user_id',
'name': 'fake_meter',
'resource_id': '$.dimensions.resource_id',
'region': 'NA',
'monasca_metric_name': 'fake_metric',
'source': 'NA',
'project_id': '$.dimensions.tenant_id',
'type': 'gauge',
'resource_metadata': '$.measurements[0][2]',
'unit': 'B/s'
}, {
'user_id': '$.dimensions.user_id',
'name': 'fake_meter2',
'resource_id': '$.dimensions.resource_id',
'region': 'NA',
'monasca_metric_name': 'fake_metric2',
'source': 'NA',
'project_id': '$.dimensions.project_id',
'type': 'delta',
'resource_metadata': '$.measurements[0][2]',
'unit': 'B/s'
}, {
'user_id': '$.dimensions.user_id',
'name': 'fake_meter3',
'resource_id': '$.dimensions.hostname',
'region': 'NA',
'monasca_metric_name': 'fake_metric3',
'source': 'NA',
'project_id': '$.dimensions.project_id',
'type': 'delta',
'resource_metadata': '$.measurements[0][2]',
'unit': 'B/s'
}
]
})
def setup_pipeline_file(self, pipeline_data):
if six.PY3:
pipeline_data = pipeline_data.encode('utf-8')
pipeline_cfg_file = fileutils.write_to_tempfile(content=pipeline_data,
prefix="pipeline",
suffix="yaml")
self.addCleanup(os.remove, pipeline_cfg_file)
return pipeline_cfg_file
def setup_ceilosca_mapping_def_file(self, cfg):
if six.PY3:
cfg = cfg.encode('utf-8')
ceilosca_mapping_file = fileutils.write_to_tempfile(
content=cfg, prefix='ceilosca_mapping', suffix='yaml')
self.addCleanup(os.remove, ceilosca_mapping_file)
return ceilosca_mapping_file
class TestGetPipelineReader(TestCeiloscaMapping):
def setUp(self):
super(TestGetPipelineReader, self).setUp()
self.CONF = self.useFixture(fixture_config.Config()).conf
self.CONF([], project='ceilometer', validate_default_values=True)
def test_pipeline_reader(self):
pipeline_cfg_file = self.setup_pipeline_file(
self.pipeline_data)
self.CONF.set_override("pipeline_cfg_file", pipeline_cfg_file)
test_pipeline_reader = PipelineReader()
self.assertEqual(set(['testbatch', 'testbatch2']),
test_pipeline_reader.get_pipeline_meters()
)
class TestMappingDefinition(base.BaseTestCase):
def test_mapping_definition(self):
cfg = dict(name="network.outgoing.rate",
monasca_metric_name="vm.net.out_bytes_sec",
resource_id="$.dimensions.resource_id",
project_id="$.dimensions.tenant_id",
user_id="$.dimensions.user_id",
region="NA",
type="gauge",
unit="B/s",
source="NA",
resource_metadata="$.measurements[0][2]")
handler = CeiloscaMappingDefinition(cfg)
self.assertIsNone(handler.parse_fields("user_id", MONASCA_MEASUREMENT))
self.assertEqual("2fe6e3a9-9bdf-4c98-882c-a826cf0107a1",
handler.parse_fields("resource_id",
MONASCA_MEASUREMENT))
self.assertEqual("50ce24dd577c43879cede72b77224e2f",
handler.parse_fields("project_id",
MONASCA_MEASUREMENT))
self.assertEqual(MONASCA_VALUE_META,
handler.parse_fields("resource_metadata",
MONASCA_MEASUREMENT))
self.assertEqual("$.dimensions.tenant_id", handler.cfg["project_id"])
def test_config_required_missing_fields(self):
cfg = dict()
try:
CeiloscaMappingDefinition(cfg)
except CeiloscaMappingDefinitionException as e:
self.assertEqual("Required fields ["
"'name', 'monasca_metric_name', 'type', 'unit', "
"'source', 'resource_metadata', 'resource_id', "
"'project_id', 'user_id', 'region'] "
"not specified", e.message)
def test_bad_type_cfg_definition(self):
cfg = dict(name="fake_meter",
monasca_metric_name="fake_metric",
resource_id="$.dimensions.resource_id",
project_id="$.dimensions.tenant_id",
user_id="$.dimensions.user_id",
region="NA",
type="foo",
unit="B/s",
source="NA",
resource_metadata="$.measurements[0][2]")
try:
CeiloscaMappingDefinition(cfg)
except CeiloscaMappingDefinitionException as e:
self.assertEqual("Invalid type foo specified", e.message)
class TestMappedCeiloscaMetricProcessing(TestCeiloscaMapping):
def setUp(self):
super(TestMappedCeiloscaMetricProcessing, self).setUp()
self.CONF = self.useFixture(fixture_config.Config()).conf
self.CONF([], project='ceilometer', validate_default_values=True)
def test_fallback_mapping_file_path(self):
self.useFixture(mockpatch.PatchObject(self.CONF,
'find_file', return_value=None))
fall_bak_path = ceilosca_mapping.get_config_file()
self.assertIn("ceilosca_mapping/data/ceilosca_mapping.yaml",
fall_bak_path)
@mock.patch('ceilometer.ceilosca_mapping.ceilosca_mapping.LOG')
def test_bad_meter_definition_skip(self, LOG):
cfg = yaml.dump({
'meter_metric_map': [{
'user_id': '$.dimensions.user_id',
'name': 'fake_meter',
'resource_id': '$.dimensions.resource_id',
'region': 'NA',
'monasca_metric_name': 'fake_metric',
'source': 'NA',
'project_id': '$.dimensions.tenant_id',
'type': 'gauge',
'resource_metadata': '$.measurements[0][2]',
'unit': 'B/s'
}, {
'user_id': '$.dimensions.user_id',
'name': 'fake_meter',
'resource_id': '$.dimensions.resource_id',
'region': 'NA',
'monasca_metric_name': 'fake_metric',
'source': 'NA',
'project_id': '$.dimensions.tenant_id',
'type': 'foo',
'resource_metadata': '$.measurements[0][2]',
'unit': 'B/s'
}]
})
ceilosca_mapping_file = self.setup_ceilosca_mapping_def_file(cfg)
self.CONF.set_override('ceilometer_monasca_metrics_mapping',
ceilosca_mapping_file, group='monasca')
data = ceilosca_mapping.setup_ceilosca_mapping_config()
meter_loaded = ceilosca_mapping.load_definitions(data)
self.assertEqual(1, len(meter_loaded))
LOG.error.assert_called_with(
"Error loading Ceilometer Monasca Mapping Definition : "
"Invalid type foo specified")
def test_list_of_meters_returned(self):
ceilosca_mapping_file = self.setup_ceilosca_mapping_def_file(self.cfg)
self.CONF.set_override('ceilometer_monasca_metrics_mapping',
ceilosca_mapping_file, group='monasca')
ceilosca_mapper = ceilosca_mapping.ProcessMappedCeiloscaMetric()
ceilosca_mapper.reinitialize()
self.assertItemsEqual(['fake_metric', 'fake_metric2', 'fake_metric3'],
ceilosca_mapper.get_list_monasca_metrics().keys()
)
def test_monasca_metric_name_map_ceilometer_meter(self):
cfg = yaml.dump({
'meter_metric_map': [{
'user_id': '$.dimensions.user_id',
'name': 'fake_meter',
'resource_id': '$.dimensions.resource_id',
'region': 'NA',
'monasca_metric_name': 'fake_metric',
'source': 'NA',
'project_id': '$.dimensions.tenant_id',
'type': 'gauge',
'resource_metadata': '$.measurements[0][2]',
'unit': 'B/s'
}]
})
ceilosca_mapping_file = self.setup_ceilosca_mapping_def_file(cfg)
self.CONF.set_override('ceilometer_monasca_metrics_mapping',
ceilosca_mapping_file, group='monasca')
ceilosca_mapper = ceilosca_mapping.ProcessMappedCeiloscaMetric()
ceilosca_mapper.reinitialize()
self.assertEqual('fake_metric',
ceilosca_mapper.get_monasca_metric_name('fake_meter')
)
self.assertEqual('$.dimensions.tenant_id',
ceilosca_mapper.
get_ceilosca_mapped_definition_key_val('fake_metric',
'project_id'))
# This Class will only test the driver for the mapped meteric
# Impl_Monasca Tests will be doing exhaustive tests for non mapped metrics
@mock.patch("ceilometer.storage.impl_monasca.MonascaDataFilter")
class TestMoanscaDriverForMappedMetrics(TestCeiloscaMapping):
Aggregate = collections.namedtuple("Aggregate", ['func', 'param'])
def setUp(self):
super(TestMoanscaDriverForMappedMetrics, self).setUp()
self.CONF = self.useFixture(fixture_config.Config()).conf
self.CONF([], project='ceilometer', validate_default_values=True)
pipeline_cfg_file = self.setup_pipeline_file(self.pipeline_data)
self.CONF.set_override("pipeline_cfg_file", pipeline_cfg_file)
ceilosca_mapping_file = self.setup_ceilosca_mapping_def_file(self.cfg)
self.CONF.set_override('ceilometer_monasca_metrics_mapping',
ceilosca_mapping_file, group='monasca')
ceilosca_mapper = ceilosca_mapping.ProcessMappedCeiloscaMetric()
ceilosca_mapper.reinitialize()
def test_get_samples_for_mapped_meters(self, mdf_mock):
with mock.patch("ceilometer.monasca_client.Client") as mock_client:
conn = impl_monasca.Connection("127.0.0.1:8080")
ml_mock = mock_client().measurements_list
# TODO(this test case needs more work)
ml_mock.return_value = ([MONASCA_MEASUREMENT])
sample_filter = storage.SampleFilter(
meter='fake_meter',
start_timestamp='2015-03-20T00:00:00Z')
results = list(conn.get_samples(sample_filter))
self.assertEqual(True, ml_mock.called)
self.assertEqual('fake_meter', results[0].counter_name)
self.assertEqual(54.0, results[0].counter_volume)
self.assertEqual('gauge', results[0].counter_type)
self.assertEqual('2fe6e3a9-9bdf-4c98-882c-a826cf0107a1',
results[0].resource_id
)
self.assertEqual(MONASCA_VALUE_META, results[0].resource_metadata)
self.assertEqual('50ce24dd577c43879cede72b77224e2f',
results[0].project_id,
)
self.assertEqual('B/s', results[0].counter_unit)
self.assertIsNone(results[0].user_id)
def test_get_meter_for_mapped_meters_non_uniq(self, mdf_mock):
data1 = (
[{u'dimensions': {u'datasource': u'ceilometer'},
u'id': u'2015-04-14T18:42:31Z',
u'name': u'meter-1'},
{u'dimensions': {u'datasource': u'ceilometer'},
u'id': u'2015-04-15T18:42:31Z',
u'name': u'meter-1'}])
data2 = (
[{u'dimensions': {u'datasource': u'ceilometer'},
u'id': u'2015-04-14T18:42:31Z',
u'name': u'meter-1'},
{u'dimensions': {u'datasource': u'ceilometer'},
u'id': u'2015-04-15T18:42:31Z',
u'name': u'meter-1'},
{u'id': u'fef26f9d27f8027ea44b940cf3626fc398f7edfb',
u'name': u'fake_metric',
u'dimensions': {
u'resource_id': u'2fe6e3a9-9bdf-4c98-882c-a826cf0107a1',
u'cloud_name': u'helion-poc-hlm-003',
u'component': u'vm',
u'control_plane': u'control-plane-1',
u'service': u'compute',
u'device': u'tap3356676e-a5',
u'tenant_id': u'50ce24dd577c43879cede72b77224e2f',
u'hostname': u'hlm003-cp1-comp0003-mgmt',
u'cluster': u'compute',
u'zone': u'nova'}
},
{u'dimensions': {},
u'id': u'2015-04-16T18:42:31Z',
u'name': u'testbatch'}])
with mock.patch("ceilometer.monasca_client.Client") as mock_client:
conn = impl_monasca.Connection("127.0.0.1:8080")
metrics_list_mock = mock_client().metrics_list
metrics_list_mock.side_effect = [data1, data2]
kwargs = dict(limit=4)
results = list(conn.get_meters(**kwargs))
# result contains 2 records from data 1 since datasource
# = ceilometer, 2 records from data 2, 1 for pipeline
# meter but no datasource set to ceilometer and one for
# mapped meter
self.assertEqual(4, len(results))
self.assertEqual(True, metrics_list_mock.called)
self.assertEqual(2, metrics_list_mock.call_count)
def test_get_meter_for_mapped_meters_uniq(self, mdf_mock):
dummy_metric_names_mocked_return_value = (
[{"id": "015c995b1a770147f4ef18f5841ef566ab33521d",
"name": "network.delete"},
{"id": "335b5d569ad29dc61b3dc24609fad3619e947944",
"name": "subnet.update"}])
with mock.patch("ceilometer.monasca_client.Client") as mock_client:
conn = impl_monasca.Connection("127.0.0.1:8080")
metric_names_list_mock = mock_client().metric_names_list
metric_names_list_mock.return_value = (
dummy_metric_names_mocked_return_value)
kwargs = dict(limit=4, unique=True)
results = list(conn.get_meters(**kwargs))
self.assertEqual(2, len(results))
self.assertEqual(True, metric_names_list_mock.called)
self.assertEqual(1, metric_names_list_mock.call_count)
def test_stats_list_mapped_meters(self, mock_mdf):
with mock.patch("ceilometer.monasca_client.Client") as mock_client:
conn = impl_monasca.Connection("127.0.0.1:8080")
sl_mock = mock_client().statistics_list
sl_mock.return_value = [
{
'statistics':
[
['2014-10-24T12:12:12Z', 0.008],
['2014-10-24T12:52:12Z', 0.018]
],
'dimensions': {'unit': 'gb'},
'columns': ['timestamp', 'min']
}
]
sf = storage.SampleFilter()
sf.meter = "fake_meter"
aggregate = self.Aggregate(func="min", param=None)
sf.start_timestamp = timeutils.parse_isotime(
'2014-10-24T12:12:42').replace(tzinfo=None)
stats = list(conn.get_meter_statistics(sf, aggregate=[aggregate],
period=30))
self.assertEqual(2, len(stats))
self.assertEqual('B/s', stats[0].unit)
self.assertEqual('B/s', stats[1].unit)
self.assertEqual(0.008, stats[0].min)
self.assertEqual(0.018, stats[1].min)
self.assertEqual(30, stats[0].period)
self.assertEqual('2014-10-24T12:12:42',
stats[0].period_end.isoformat())
self.assertEqual('2014-10-24T12:52:42',
stats[1].period_end.isoformat())
self.assertIsNotNone(stats[0].as_dict().get('aggregate'))
self.assertEqual({u'min': 0.008}, stats[0].as_dict()['aggregate'])
def test_get_resources_for_mapped_meters(self, mock_mdf):
with mock.patch("ceilometer.monasca_client.Client") as mock_client:
conn = impl_monasca.Connection("127.0.0.1:8080")
dummy_metric_names_mocked_return_value = (
[{"id": "015c995b1a770147f4ef18f5841ef566ab33521d",
"name": "fake_metric"},
{"id": "335b5d569ad29dc61b3dc24609fad3619e947944",
"name": "metric1"}])
mnl_mock = mock_client().metric_names_list
mnl_mock.return_value = (
dummy_metric_names_mocked_return_value)
dummy_get_resources_mocked_return_value = (
[{u'dimensions': {u'resource_id': u'abcd'},
u'measurements': [[u'2015-04-14T17:52:31Z', 1.0, {}],
[u'2015-04-15T17:52:31Z', 2.0, {}],
[u'2015-04-16T17:52:31Z', 3.0, {}]],
u'id': u'2015-04-14T18:42:31Z',
u'columns': [u'timestamp', u'value', u'value_meta'],
u'name': u'fake_metric'}])
ml_mock = mock_client().measurements_list
ml_mock.return_value = (
dummy_get_resources_mocked_return_value)
sample_filter = storage.SampleFilter(
meter='fake_meter', end_timestamp='2015-04-20T00:00:00Z')
resources = list(conn.get_resources(sample_filter, limit=2))
self.assertEqual(2, len(resources))
self.assertEqual(True, ml_mock.called)
self.assertEqual(1, ml_mock.call_count)
resources_without_limit = list(conn.get_resources(sample_filter))
self.assertEqual(3, len(resources_without_limit))
def test_stats_list_with_groupby_for_mapped_meters(self, mock_mdf):
with mock.patch("ceilometer.monasca_client.Client") as mock_client:
conn = impl_monasca.Connection("127.0.0.1:8080")
sl_mock = mock_client().statistics_list
sl_mock.return_value = [
{
'statistics':
[
['2014-10-24T12:12:12Z', 0.008, 1.3, 3, 0.34],
['2014-10-24T12:20:12Z', 0.078, 1.25, 2, 0.21],
['2014-10-24T12:52:12Z', 0.018, 0.9, 4, 0.14]
],
'dimensions': {'hostname': '1234', 'unit': 'gb'},
'columns': ['timestamp', 'min', 'max', 'count', 'avg']
},
{
'statistics':
[
['2014-10-24T12:14:12Z', 0.45, 2.5, 2, 2.1],
['2014-10-24T12:20:12Z', 0.58, 3.2, 3, 3.4],
['2014-10-24T13:52:42Z', 1.67, 3.5, 1, 5.3]
],
'dimensions': {'hostname': '5678', 'unit': 'gb'},
'columns': ['timestamp', 'min', 'max', 'count', 'avg']
}]
sf = storage.SampleFilter()
sf.meter = "fake_meter3"
sf.start_timestamp = timeutils.parse_isotime(
'2014-10-24T12:12:42').replace(tzinfo=None)
groupby = ['resource_id']
stats = list(conn.get_meter_statistics(sf, period=30,
groupby=groupby))
self.assertEqual(2, len(stats))
for stat in stats:
self.assertIsNotNone(stat.groupby)
resource_id = stat.groupby.get('resource_id')
self.assertIn(resource_id, ['1234', '5678'])
if resource_id == '1234':
self.assertEqual(0.008, stat.min)
self.assertEqual(1.3, stat.max)
self.assertEqual(0.23, stat.avg)
self.assertEqual(9, stat.count)
self.assertEqual(30, stat.period)
self.assertEqual('2014-10-24T12:12:12',
stat.period_start.isoformat())
if resource_id == '5678':
self.assertEqual(0.45, stat.min)
self.assertEqual(3.5, stat.max)
self.assertEqual(3.6, stat.avg)
self.assertEqual(6, stat.count)
self.assertEqual(30, stat.period)
self.assertEqual('2014-10-24T13:52:42',
stat.period_end.isoformat())
def test_query_samples_for_mapped_meter(self, mock_mdf):
SAMPLES = [[
storage_models.Sample(
counter_name="fake_meter",
counter_type="gauge",
counter_unit="instance",
counter_volume=1,
project_id="123",
user_id="456",
resource_id="789",
resource_metadata={},
source="openstack",
recorded_at=timeutils.utcnow(),
timestamp=timeutils.utcnow(),
message_id="0",
message_signature='', )
]] * 2
samples = SAMPLES[:]
def _get_samples(*args, **kwargs):
return samples.pop()
with mock.patch("ceilometer.monasca_client.Client"):
conn = impl_monasca.Connection("127.0.0.1:8080")
with mock.patch.object(conn, 'get_samples') as gsm:
gsm.side_effect = _get_samples
query = {'and': [{'=': {'counter_name': 'fake_meter'}},
{'or': [{'=': {"project_id": "123"}},
{'=': {"user_id": "456"}}]}]}
samples = conn.query_samples(query, None, 100)
self.assertEqual(2, len(samples))
self.assertEqual(2, gsm.call_count)
samples = SAMPLES[:]
query = {'and': [{'=': {'counter_name': 'fake_meter'}},
{'or': [{'=': {"project_id": "123"}},
{'>': {"counter_volume": 2}}]}]}
samples = conn.query_samples(query, None, 100)
self.assertEqual(1, len(samples))
self.assertEqual(4, gsm.call_count)

View File

@ -0,0 +1,275 @@
#
# Copyright 2016 Hewlett Packard
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import mock
from oslo_config import fixture as fixture_config
from oslo_utils import fileutils
from oslotest import base
from oslotest import mockpatch
import six
import yaml
from ceilometer.ceilosca_mapping import ceilometer_static_info_mapping
from ceilometer.ceilosca_mapping.ceilometer_static_info_mapping import (
CeilometerStaticMappingDefinition)
from ceilometer.ceilosca_mapping.ceilometer_static_info_mapping import (
CeilometerStaticMappingDefinitionException)
from ceilometer.storage import impl_monasca
class TestStaticInfoBase(base.BaseTestCase):
pipeline_data = yaml.dump({
'sources': [{
'name': 'test_pipeline',
'interval': 1,
'meters': ['testbatch', 'testbatch2'],
'resources': ['alpha', 'beta', 'gamma', 'delta'],
'sinks': ['test_sink']}],
'sinks': [{
'name': 'test_sink',
'transformers': [],
'publishers': ["test"]}]
})
cfg = yaml.dump({
'meter_info_static_map': [{
'name': "disk.ephemeral.size",
'type': "gauge",
'unit': "GB"
}, {
'name': "image.delete",
'type': "delta",
'unit': "image"
}, {
'name': "image",
'type': "gauge",
'unit': "image"
}, {
'name': "disk.root.size",
'type': "gauge",
'unit': "GB"
}
]
})
ceilosca_cfg = yaml.dump({
'meter_metric_map': [{
'user_id': '$.dimensions.user_id',
'name': 'fake_meter',
'resource_id': '$.dimensions.resource_id',
'region': 'NA',
'monasca_metric_name': 'fake_metric',
'source': 'NA',
'project_id': '$.dimensions.tenant_id',
'type': 'gauge',
'resource_metadata': '$.measurements[0][2]',
'unit': 'B/s'
}, {
'user_id': '$.dimensions.user_id',
'name': 'fake_meter2',
'resource_id': '$.dimensions.resource_id',
'region': 'NA',
'monasca_metric_name': 'fake_metric2',
'source': 'NA',
'project_id': '$.dimensions.project_id',
'type': 'delta',
'resource_metadata': '$.measurements[0][2]',
'unit': 'B/s'
}]
})
def setup_static_mapping_def_file(self, cfg):
if six.PY3:
cfg = cfg.encode('utf-8')
ceilometer_static_info_mapping = fileutils.write_to_tempfile(
content=cfg, prefix='ceilometer_static_info_mapping', suffix='yaml'
)
self.addCleanup(os.remove, ceilometer_static_info_mapping)
return ceilometer_static_info_mapping
def setup_ceilosca_mapping_def_file(self, ceilosca_cfg):
if six.PY3:
ceilosca_cfg = ceilosca_cfg.encode('utf-8')
ceilosca_mapping_file = fileutils.write_to_tempfile(
content=ceilosca_cfg, prefix='ceilosca_mapping', suffix='yaml')
self.addCleanup(os.remove, ceilosca_mapping_file)
return ceilosca_mapping_file
def setup_pipeline_file(self, pipeline_data):
if six.PY3:
pipeline_data = pipeline_data.encode('utf-8')
pipeline_cfg_file = fileutils.write_to_tempfile(content=pipeline_data,
prefix="pipeline",
suffix="yaml")
self.addCleanup(os.remove, pipeline_cfg_file)
return pipeline_cfg_file
class TestStaticInfoDefinition(base.BaseTestCase):
def test_static_info_definition(self):
cfg = dict(name="image.delete",
type="delta",
unit="image")
handler = CeilometerStaticMappingDefinition(cfg)
self.assertEqual("delta", handler.cfg['type'])
self.assertEqual("image.delete", handler.cfg['name'])
self.assertEqual("image", handler.cfg['unit'])
def test_config_required_missing_fields(self):
cfg = dict()
try:
CeilometerStaticMappingDefinition(cfg)
except CeilometerStaticMappingDefinitionException as e:
self.assertEqual("Required fields ["
"'name', 'type', 'unit'] "
"not specified", e.message)
def test_bad_type_cfg_definition(self):
cfg = dict(name="fake_meter",
type="foo",
unit="B/s")
try:
CeilometerStaticMappingDefinition(cfg)
except CeilometerStaticMappingDefinitionException as e:
self.assertEqual("Invalid type foo specified", e.message)
class TestMappedCeilometerStaticInfoProcessing(TestStaticInfoBase):
def setUp(self):
super(TestMappedCeilometerStaticInfoProcessing, self).setUp()
self.CONF = self.useFixture(fixture_config.Config()).conf
static_info_mapping_file = self.setup_static_mapping_def_file(self.cfg)
self.CONF.set_override('ceilometer_static_info_mapping',
static_info_mapping_file, group='monasca')
self.static_info_mapper = ceilometer_static_info_mapping\
.ProcessMappedCeilometerStaticInfo()
self.CONF([], project='ceilometer', validate_default_values=True)
def test_fallback_mapping_file_path(self):
self.useFixture(mockpatch.PatchObject(self.CONF,
'find_file', return_value=None))
self.CONF.set_override('ceilometer_static_info_mapping',
' ', group='monasca')
self.static_info_mapper.reinitialize()
fall_bak_path = ceilometer_static_info_mapping.get_config_file()
self.assertIn(
"ceilosca_mapping/data/ceilometer_static_info_mapping.yaml",
fall_bak_path)
@mock.patch(
'ceilometer.ceilosca_mapping.ceilometer_static_info_mapping.LOG')
def test_bad_mapping_definition_skip(self, LOG):
cfg = yaml.dump({
'meter_info_static_map': [{
'name': "disk.ephemeral.size",
'type': "gauge",
'unit': "GB"
}, {
'name': "image.delete",
'type': "delta",
'unit': "image"
}, {
'name': "image",
'type': "gauge",
'unit': "image"
}, {
'name': "disk.root.size",
'type': "foo",
'unit': "GB"
}]
})
static_info_mapping_file = self.setup_static_mapping_def_file(cfg)
self.CONF.set_override('ceilometer_static_info_mapping',
static_info_mapping_file, group='monasca')
data = ceilometer_static_info_mapping.\
setup_ceilometer_static_mapping_config()
meter_loaded = ceilometer_static_info_mapping.load_definitions(data)
self.assertEqual(3, len(meter_loaded))
LOG.error.assert_called_with(
"Error loading Ceilometer Static Mapping Definition : "
"Invalid type foo specified")
def test_list_of_meters_returned(self):
self.static_info_mapper.reinitialize()
self.assertItemsEqual(['disk.ephemeral.size', 'disk.root.size',
'image', 'image.delete'],
self.static_info_mapper.
get_list_supported_meters().
keys()
)
def test_static_info_of_ceilometer_meter(self):
cfg = yaml.dump({
'meter_info_static_map': [{
'name': "disk.ephemeral.size",
'type': "gauge",
'unit': "GB"
}]
})
static_info_mapping_file = self.setup_static_mapping_def_file(cfg)
self.CONF.set_override('ceilometer_static_info_mapping',
static_info_mapping_file, group='monasca')
self.static_info_mapper.reinitialize()
self.assertEqual('gauge',
self.static_info_mapper.get_meter_static_info_key_val(
'disk.ephemeral.size', 'type')
)
# This Class will only test the driver for the mapped static info
# Impl_Monasca Tests will be doing exhaustive tests for other test cases
@mock.patch("ceilometer.storage.impl_monasca.MonascaDataFilter")
class TestMoanscaDriverForMappedStaticInfo(TestStaticInfoBase):
def setUp(self):
super(TestMoanscaDriverForMappedStaticInfo, self).setUp()
self.CONF = self.useFixture(fixture_config.Config()).conf
self.CONF([], project='ceilometer', validate_default_values=True)
pipeline_cfg_file = self.setup_pipeline_file(self.pipeline_data)
self.CONF.set_override("pipeline_cfg_file", pipeline_cfg_file)
static_info_mapping_file = self.setup_static_mapping_def_file(self.cfg)
self.CONF.set_override('ceilometer_static_info_mapping',
static_info_mapping_file, group='monasca')
ceilosca_mapping_file = self.setup_ceilosca_mapping_def_file(
self.ceilosca_cfg)
self.CONF.set_override('ceilometer_monasca_metrics_mapping',
ceilosca_mapping_file, group='monasca')
self.static_info_mapper = ceilometer_static_info_mapping\
.ProcessMappedCeilometerStaticInfo()
self.static_info_mapper.reinitialize()
def test_get_statc_info_for_mapped_meters_uniq(self, mdf_mock):
dummy_metric_names_mocked_return_value = (
[{"id": "015c995b1a770147f4ef18f5841ef566ab33521d",
"name": "image"},
{"id": "335b5d569ad29dc61b3dc24609fad3619e947944",
"name": "fake_metric"}])
with mock.patch('ceilometer.monasca_client.Client') as mock_client:
conn = impl_monasca.Connection('127.0.0.1:8080')
metric_names_list_mock = mock_client().metric_names_list
metric_names_list_mock.return_value = (
dummy_metric_names_mocked_return_value
)
kwargs = dict(limit=4,
unique=True)
results = list(conn.get_meters(**kwargs))
self.assertEqual(2, len(results))
self.assertEqual(True, metric_names_list_mock.called)
self.assertEqual(1, metric_names_list_mock.call_count)

View File

@ -31,13 +31,16 @@ class TestMonUtils(base.BaseTestCase):
'user_id',
'geolocation',
'region',
'source',
'availability_zone'],
'metadata': {
'common': ['event_type',
'audit_period_beginning',
'audit_period_ending'],
'image': ['size', 'status'],
'image': ['size', 'status', 'image_meta.base_url',
'image_meta.base_url2', 'image_meta.base_url3',
'image_meta.base_url4'],
'image.delete': ['size', 'status'],
'image.size': ['size', 'status'],
'image.update': ['size', 'status'],
@ -108,6 +111,18 @@ class TestMonUtils(base.BaseTestCase):
self.assertIsNone(r['dimensions'].get('project_id'))
self.assertIsNone(r['dimensions'].get('user_id'))
def convert_dict_to_list(self, dct, prefix=None, outlst={}):
prefix = prefix+'.' if prefix else ""
for k, v in dct.items():
if type(v) is dict:
self.convert_dict_to_list(v, prefix+k, outlst)
else:
if v is not None:
outlst[prefix+k] = v
else:
outlst[prefix+k] = 'None'
return outlst
def test_process_sample_metadata(self):
s = sample.Sample(
name='image',
@ -120,8 +135,40 @@ class TestMonUtils(base.BaseTestCase):
timestamp=datetime.datetime.utcnow().isoformat(),
resource_metadata={'event_type': 'notification',
'status': 'active',
'size': '1500'},
)
'image_meta': {'base_url': 'http://image.url',
'base_url2': '',
'base_url3': None},
'size': 1500},
)
to_patch = ("ceilometer.publisher.monasca_data_filter."
"MonascaDataFilter._get_mapping")
with mock.patch(to_patch, side_effect=[self._field_mappings]):
data_filter = mdf.MonascaDataFilter()
r = data_filter.process_sample_for_monasca(s)
self.assertEqual(s.name, r['name'])
self.assertIsNotNone(r.get('value_meta'))
self.assertTrue(set(self.convert_dict_to_list(s.resource_metadata).
items()).issubset(set(r['value_meta'].items())))
def test_process_sample_metadata_with_empty_data(self):
s = sample.Sample(
name='image',
type=sample.TYPE_CUMULATIVE,
unit='',
volume=1,
user_id='test',
project_id='test',
resource_id='test_run_tasks',
source='',
timestamp=datetime.datetime.utcnow().isoformat(),
resource_metadata={'event_type': 'notification',
'status': 'active',
'image_meta': {'base_url': 'http://image.url',
'base_url2': '',
'base_url3': None},
'size': 0},
)
to_patch = ("ceilometer.publisher.monasca_data_filter."
"MonascaDataFilter._get_mapping")
@ -131,6 +178,47 @@ class TestMonUtils(base.BaseTestCase):
self.assertEqual(s.name, r['name'])
self.assertIsNotNone(r.get('value_meta'))
self.assertEqual(s.source, r['dimensions']['source'])
self.assertTrue(set(self.convert_dict_to_list(s.resource_metadata).
items()).issubset(set(r['value_meta'].items())))
self.assertEqual(s.resource_metadata.items(),
r['value_meta'].items())
def test_process_sample_metadata_with_extendedKey(self):
s = sample.Sample(
name='image',
type=sample.TYPE_CUMULATIVE,
unit='',
volume=1,
user_id='test',
project_id='test',
resource_id='test_run_tasks',
source='',
timestamp=datetime.datetime.utcnow().isoformat(),
resource_metadata={'event_type': 'notification',
'status': 'active',
'image_meta': {'base_url': 'http://image.url',
'base_url2': '',
'base_url3': None},
'size': 0},
)
to_patch = ("ceilometer.publisher.monasca_data_filter."
"MonascaDataFilter._get_mapping")
with mock.patch(to_patch, side_effect=[self._field_mappings]):
data_filter = mdf.MonascaDataFilter()
r = data_filter.process_sample_for_monasca(s)
self.assertEqual(s.name, r['name'])
self.assertIsNotNone(r.get('value_meta'))
self.assertTrue(set(self.convert_dict_to_list(s.resource_metadata).
items()).issubset(set(r['value_meta'].items())))
self.assertEqual(r.get('value_meta')['image_meta.base_url'],
s.resource_metadata.get('image_meta')
['base_url'])
self.assertEqual(r.get('value_meta')['image_meta.base_url2'],
s.resource_metadata.get('image_meta')
['base_url2'])
self.assertEqual(r.get('value_meta')['image_meta.base_url3'],
str(s.resource_metadata.get('image_meta')
['base_url3']))
self.assertEqual(r.get('value_meta')['image_meta.base_url4'],
'None')

View File

@ -16,10 +16,14 @@
"""
import datetime
import eventlet
import os
import time
from keystoneauth1 import loading as ka_loading
import mock
from oslo_config import cfg
from oslo_config import fixture as fixture_config
from oslo_utils import fileutils
from oslotest import base
from oslotest import mockpatch
@ -97,14 +101,6 @@ class TestMonascaPublisher(base.BaseTestCase):
}
}
opts = [
cfg.StrOpt("username", default="ceilometer"),
cfg.StrOpt("password", default="password"),
cfg.StrOpt("auth_url", default="http://192.168.10.6:5000"),
cfg.StrOpt("project_name", default="service"),
cfg.StrOpt("project_id", default="service"),
]
@staticmethod
def create_side_effect(exception_type, test_exception):
def side_effect(*args, **kwargs):
@ -116,12 +112,32 @@ class TestMonascaPublisher(base.BaseTestCase):
def setUp(self):
super(TestMonascaPublisher, self).setUp()
content = ("[service_credentials]\n"
"auth_type = password\n"
"username = ceilometer\n"
"password = admin\n"
"auth_url = http://localhost:5000/v2.0\n")
tempfile = fileutils.write_to_tempfile(content=content,
prefix='ceilometer',
suffix='.conf')
self.addCleanup(os.remove, tempfile)
self.CONF = self.useFixture(fixture_config.Config()).conf
self.CONF([], project='ceilometer', validate_default_values=True)
self.CONF.register_opts(self.opts, group="service_credentials")
self.CONF([], default_config_files=[tempfile])
ka_loading.load_auth_from_conf_options(self.CONF,
"service_credentials")
self.parsed_url = mock.MagicMock()
ksclient.KSClient = mock.MagicMock()
def tearDown(self):
# For some reason, cfg.CONF is registered a required option named
# auth_url after these tests run, which occasionally blocks test
# case test_event_pipeline_endpoint_requeue_on_failure, so we
# unregister it here.
self.CONF.reset()
self.CONF.unregister_opt(cfg.StrOpt('auth_url'),
group='service_credentials')
super(TestMonascaPublisher, self).tearDown()
@mock.patch("ceilometer.publisher.monasca_data_filter."
"MonascaDataFilter._get_mapping",
side_effect=[field_mappings])
@ -147,12 +163,11 @@ class TestMonascaPublisher(base.BaseTestCase):
publisher = monclient.MonascaPublisher(self.parsed_url)
publisher.mon_client = mock.MagicMock()
with mock.patch.object(publisher.mon_client,
'metrics_create') as mock_create:
mock_create.return_value = FakeResponse(204)
publisher.publish_samples(self.test_data)
eventlet.sleep(2)
time.sleep(10)
self.assertEqual(1, mock_create.call_count)
self.assertEqual(1, mapping_patch.called)
@ -176,7 +191,7 @@ class TestMonascaPublisher(base.BaseTestCase):
mon_client.MonascaServiceException,
raise_http_error)
publisher.publish_samples(self.test_data)
eventlet.sleep(5)
time.sleep(60)
self.assertEqual(4, mock_create.call_count)
self.assertEqual(1, mapping_patch.called)

View File

@ -15,20 +15,76 @@
import collections
import datetime
import os
import dateutil.parser
import mock
from oslo_config import fixture as fixture_config
from oslo_utils import fileutils
from oslo_utils import timeutils
from oslotest import base
import six
import yaml
import ceilometer
from ceilometer.api.controllers.v2 import meters
from ceilometer.api.controllers.v2.meters import Aggregate
from ceilometer.ceilosca_mapping import ceilometer_static_info_mapping
from ceilometer.ceilosca_mapping import ceilosca_mapping
from ceilometer import storage
from ceilometer.storage import impl_monasca
from ceilometer.storage import models as storage_models
class TestGetResources(base.BaseTestCase):
class _BaseTestCase(base.BaseTestCase):
def setUp(self):
super(_BaseTestCase, self).setUp()
content = ("[service_credentials]\n"
"auth_type = password\n"
"username = ceilometer\n"
"password = admin\n"
"auth_url = http://localhost:5000/v2.0\n")
tempfile = fileutils.write_to_tempfile(content=content,
prefix='ceilometer',
suffix='.conf')
self.addCleanup(os.remove, tempfile)
conf = self.useFixture(fixture_config.Config()).conf
conf([], default_config_files=[tempfile])
self.CONF = conf
mdf = mock.patch.object(impl_monasca, 'MonascaDataFilter')
mdf.start()
self.addCleanup(mdf.stop)
spl = mock.patch('ceilometer.pipeline.setup_pipeline')
spl.start()
self.addCleanup(spl.stop)
self.static_info_mapper = ceilometer_static_info_mapping\
.ProcessMappedCeilometerStaticInfo()
self.static_info_mapper.reinitialize()
def assertRaisesWithMessage(self, msg, exc_class, func, *args, **kwargs):
try:
func(*args, **kwargs)
self.fail('Expecting %s exception, none raised' %
exc_class.__name__)
except AssertionError:
raise
# Only catch specific exception so we can get stack trace when fail
except exc_class as e:
self.assertEqual(msg, e.message)
def assert_raise_within_message(self, msg, e_cls, func, *args, **kwargs):
try:
func(*args, **kwargs)
self.fail('Expecting %s exception, none raised' %
e_cls.__name__)
except AssertionError:
raise
# Only catch specific exception so we can get stack trace when fail
except e_cls as e:
self.assertIn(msg, '%s' % e)
class TestGetResources(_BaseTestCase):
dummy_get_resources_mocked_return_value = (
[{u'dimensions': {},
@ -37,13 +93,50 @@ class TestGetResources(base.BaseTestCase):
u'columns': [u'timestamp', u'value', u'value_meta'],
u'name': u'image'}])
cfg = yaml.dump({
'meter_metric_map': [{
'user_id': '$.dimensions.user_id',
'name': 'network.incoming.rate',
'resource_id': '$.dimensions.resource_id',
'region': 'NA',
'monasca_metric_name': 'vm.net.in_rate',
'source': 'NA',
'project_id': '$.dimensions.tenant_id',
'type': 'gauge',
'resource_metadata': '$.measurements[0][2]',
'unit': 'B/s'
}, {
'user_id': '$.dimensions.user_id',
'name': 'network.outgoing.rate',
'resource_id': '$.dimensions.resource_id',
'region': 'NA',
'monasca_metric_name': 'vm.net.out_rate',
'source': 'NA',
'project_id': '$.dimensions.project_id',
'type': 'delta',
'resource_metadata': '$.measurements[0][2]',
'unit': 'B/s'
}]
})
def setup_ceilosca_mapping_def_file(self, cfg):
if six.PY3:
cfg = cfg.encode('utf-8')
ceilosca_mapping_file = fileutils.write_to_tempfile(
content=cfg, prefix='ceilosca_mapping', suffix='yaml')
self.addCleanup(os.remove, ceilosca_mapping_file)
return ceilosca_mapping_file
def setUp(self):
super(TestGetResources, self).setUp()
self.CONF = self.useFixture(fixture_config.Config()).conf
self.CONF([], project='ceilometer', validate_default_values=True)
ceilosca_mapping_file = self.setup_ceilosca_mapping_def_file(
TestGetResources.cfg)
self.CONF.set_override('ceilometer_monasca_metrics_mapping',
ceilosca_mapping_file, group='monasca')
ceilosca_mapper = ceilosca_mapping.ProcessMappedCeiloscaMetric()
ceilosca_mapper.reinitialize()
@mock.patch("ceilometer.storage.impl_monasca.MonascaDataFilter")
def test_not_implemented_params(self, mock_mdf):
def test_not_implemented_params(self):
with mock.patch("ceilometer.monasca_client.Client"):
conn = impl_monasca.Connection("127.0.0.1:8080")
@ -54,62 +147,100 @@ class TestGetResources(base.BaseTestCase):
self.assertRaises(ceilometer.NotImplementedError,
lambda: list(conn.get_resources(**kwargs)))
@mock.patch("ceilometer.storage.impl_monasca.MonascaDataFilter")
def test_dims_filter(self, mdf_patch):
def test_dims_filter(self):
with mock.patch("ceilometer.monasca_client.Client") as mock_client:
conn = impl_monasca.Connection("127.0.0.1:8080")
start_timestamp = timeutils.isotime(datetime.datetime(1970, 1, 1))
mnl_mock = mock_client().metrics_list
mnl_mock = mock_client().metric_names_list
mnl_mock.return_value = [
{
'name': 'some',
'dimensions': {}
"id": "335b5d569ad29dc61b3dc24609fad3619e947944",
"name": "some"
}
]
kwargs = dict(project='proj1')
end_time = datetime.datetime(2015, 4, 1, 12, 00, 00)
kwargs = dict(project='proj1',
end_timestamp=end_time)
list(conn.get_resources(**kwargs))
self.assertEqual(True, mnl_mock.called)
self.assertEqual(dict(dimensions=dict(
project_id='proj1'), start_time=start_timestamp),
mnl_mock.call_args[1])
self.assertEqual(1, mnl_mock.call_count)
@mock.patch("ceilometer.storage.impl_monasca.MonascaDataFilter")
def test_get_resources(self, mock_mdf):
expected = [
mock.call(
dimensions={
'project_id': 'proj1'}),
mock.call(
dimensions={
'tenant_id': 'proj1'})
]
self.assertTrue(expected == mnl_mock.call_args_list)
self.assertEqual(2, mnl_mock.call_count)
@mock.patch('oslo_utils.timeutils.utcnow')
def test_get_resources(self, mock_utcnow):
mock_utcnow.return_value = datetime.datetime(2016, 4, 7, 18, 20)
with mock.patch("ceilometer.monasca_client.Client") as mock_client:
conn = impl_monasca.Connection("127.0.0.1:8080")
mnl_mock = mock_client().metrics_list
mnl_mock.return_value = [{'name': 'metric1',
'dimensions': {}},
{'name': 'metric2',
'dimensions': {}}
]
mnl_mock = mock_client().metric_names_list
mnl_mock.return_value = [
{
"id": "335b5d569ad29dc61b3dc24609fad3619e947944",
"name": "storage.objects.size"
},
{
"id": "335b5d569ad29dc61b3dc24609fad3619e947944",
"name": "vm.net.in_rate"
}
]
kwargs = dict(source='openstack')
ml_mock = mock_client().measurements_list
ml_mock.return_value = (
TestGetResources.dummy_get_resources_mocked_return_value)
data1 = (
[{u'dimensions': {u'resource_id': u'abcd',
u'datasource': u'ceilometer'},
u'measurements': [[u'2015-04-14T17:52:31Z', 1.0, {}],
[u'2015-04-15T17:52:31Z', 2.0, {}],
[u'2015-04-16T17:52:31Z', 3.0, {}]],
u'id': u'2015-04-14T18:42:31Z',
u'columns': [u'timestamp', u'value', u'value_meta'],
u'name': u'storage.objects.size'}])
data2 = (
[{u'dimensions': {u'resource_id': u'abcd',
u'datasource': u'ceilometer'},
u'measurements': [[u'2015-04-14T17:52:31Z', 1.0, {}],
[u'2015-04-15T17:52:31Z', 2.0, {}],
[u'2015-04-16T17:52:31Z', 3.0, {}]],
u'id': u'2015-04-14T18:42:31Z',
u'columns': [u'timestamp', u'value', u'value_meta'],
u'name': u'vm.net.in_rate'}])
ml_mock.side_effect = [data1, data2]
list(conn.get_resources(**kwargs))
self.assertEqual(2, ml_mock.call_count)
self.assertEqual(dict(dimensions={},
name='metric1',
limit=1,
start_time='1970-01-01T00:00:00Z'),
self.assertEqual(dict(dimensions=dict(datasource='ceilometer',
source='openstack'),
name='storage.objects.size',
start_time='1970-01-01T00:00:00.000000Z',
group_by='*',
end_time='2016-04-07T18:20:00.000000Z'),
ml_mock.call_args_list[0][1])
@mock.patch("ceilometer.storage.impl_monasca.MonascaDataFilter")
def test_get_resources_limit(self, mdf_mock):
def test_get_resources_limit(self):
with mock.patch("ceilometer.monasca_client.Client") as mock_client:
conn = impl_monasca.Connection("127.0.0.1:8080")
mnl_mock = mock_client().metrics_list
mnl_mock.return_value = [{'name': 'metric1',
'dimensions': {'resource_id': 'abcd'}},
{'name': 'metric2',
'dimensions': {'resource_id': 'abcd'}}
]
mnl_mock = mock_client().metric_names_list
mnl_mock.return_value = [
{
"id": "335b5d569ad29dc61b3dc24609fad3619e947944",
"name": "storage.objects.size"
},
{
"id": "335b5d569ad29dc61b3dc24609fad3619e947944",
"name": "vm.net.in_rate"
}
]
dummy_get_resources_mocked_return_value = (
[{u'dimensions': {u'resource_id': u'abcd'},
[{u'dimensions': {u'resource_id': u'abcd',
u'datasource': u'ceilometer'},
u'measurements': [[u'2015-04-14T17:52:31Z', 1.0, {}],
[u'2015-04-15T17:52:31Z', 2.0, {}],
[u'2015-04-16T17:52:31Z', 3.0, {}]],
@ -117,10 +248,6 @@ class TestGetResources(base.BaseTestCase):
u'columns': [u'timestamp', u'value', u'value_meta'],
u'name': u'image'}])
ml_mock = mock_client().measurements_list
ml_mock.return_value = (
TestGetSamples.dummy_metrics_mocked_return_value
)
ml_mock = mock_client().measurements_list
ml_mock.return_value = (
dummy_get_resources_mocked_return_value)
@ -130,39 +257,73 @@ class TestGetResources(base.BaseTestCase):
resources = list(conn.get_resources(sample_filter, limit=2))
self.assertEqual(2, len(resources))
self.assertEqual(True, ml_mock.called)
self.assertEqual(2, ml_mock.call_count)
self.assertEqual(1, ml_mock.call_count)
@mock.patch("ceilometer.storage.impl_monasca.MonascaDataFilter")
def test_get_resources_simple_metaquery(self, mock_mdf):
@mock.patch('oslo_utils.timeutils.utcnow')
def test_get_resources_simple_metaquery(self, mock_utcnow):
mock_utcnow.return_value = datetime.datetime(2016, 4, 7, 18, 28)
with mock.patch("ceilometer.monasca_client.Client") as mock_client:
conn = impl_monasca.Connection("127.0.0.1:8080")
mnl_mock = mock_client().metrics_list
mnl_mock.return_value = [{'name': 'metric1',
'dimensions': {},
'value_meta': {'key': 'value1'}},
{'name': 'metric2',
'dimensions': {},
'value_meta': {'key': 'value2'}},
]
mnl_mock = mock_client().metric_names_list
mnl_mock.return_value = [
{
"id": "335b5d569ad29dc61b3dc24609fad3619e947944",
"name": "storage.objects.size"
},
{
"id": "335b5d569ad29dc61b3dc24609fad3619e947944",
"name": "vm.net.in_rate"
}
]
kwargs = dict(metaquery={'metadata.key': 'value1'})
ml_mock = mock_client().measurements_list
ml_mock.return_value = (
TestGetResources.dummy_get_resources_mocked_return_value)
data1 = (
[{u'dimensions': {u'resource_id': u'abcd',
u'datasource': u'ceilometer'},
u'measurements': [[u'2015-04-14T17:52:31Z', 1.0, {}],
[u'2015-04-15T17:52:31Z', 2.0, {}],
[u'2015-04-16T17:52:31Z', 3.0, {}]],
u'id': u'2015-04-14T18:42:31Z',
u'columns': [u'timestamp', u'value', u'value_meta'],
u'name': u'storage.objects.size'}])
data2 = (
[{u'dimensions': {u'resource_id': u'abcd',
u'datasource': u'ceilometer'},
u'measurements': [[u'2015-04-14T17:52:31Z', 1.0, {}],
[u'2015-04-15T17:52:31Z', 2.0, {}],
[u'2015-04-16T17:52:31Z', 3.0, {}]],
u'id': u'2015-04-14T18:42:31Z',
u'columns': [u'timestamp', u'value', u'value_meta'],
u'name': u'vm.net.in_rate'}])
ml_mock.side_effect = [data1, data2]
list(conn.get_resources(**kwargs))
self.assertEqual(2, ml_mock.call_count)
self.assertEqual(dict(dimensions={},
name='metric2',
limit=1,
start_time='1970-01-01T00:00:00Z'),
ml_mock.call_args_list[1][1])
self.assertEqual(dict(dimensions=dict(datasource='ceilometer'),
name="storage.objects.size",
start_time='1970-01-01T00:00:00.000000Z',
group_by='*',
end_time='2016-04-07T18:28:00.000000Z'),
ml_mock.call_args_list[0][1])
class MeterTest(base.BaseTestCase):
class MeterTest(_BaseTestCase):
@mock.patch("ceilometer.storage.impl_monasca.MonascaDataFilter")
def test_not_implemented_params(self, mock_mdf):
dummy_metrics_mocked_return_value = (
[{u'dimensions': {u'datasource': u'ceilometer'},
u'id': u'2015-04-14T18:42:31Z',
u'name': u'meter-1'},
{u'dimensions': {u'datasource': u'ceilometer'},
u'id': u'2015-04-15T18:42:31Z',
u'name': u'meter-1'},
{u'dimensions': {u'datasource': u'ceilometer'},
u'id': u'2015-04-16T18:42:31Z',
u'name': u'meter-2'}])
def test_not_implemented_params(self):
with mock.patch('ceilometer.monasca_client.Client'):
conn = impl_monasca.Connection('127.0.0.1:8080')
@ -170,8 +331,7 @@ class MeterTest(base.BaseTestCase):
self.assertRaises(ceilometer.NotImplementedError,
lambda: list(conn.get_meters(**kwargs)))
@mock.patch("ceilometer.storage.impl_monasca.MonascaDataFilter")
def test_metrics_list_call(self, mock_mdf):
def test_metrics_list_call(self):
with mock.patch('ceilometer.monasca_client.Client') as mock_client:
conn = impl_monasca.Connection('127.0.0.1:8080')
metrics_list_mock = mock_client().metrics_list
@ -181,20 +341,70 @@ class MeterTest(base.BaseTestCase):
resource='resource-1',
source='openstack',
limit=100)
list(conn.get_meters(**kwargs))
self.assertEqual(True, metrics_list_mock.called)
self.assertEqual(1, metrics_list_mock.call_count)
self.assertEqual(4, metrics_list_mock.call_count)
expected = [
mock.call(
dimensions={
'source': 'openstack',
'project_id': 'project-1',
'user_id': 'user-1',
'datasource': 'ceilometer',
'resource_id': 'resource-1'}),
mock.call(
dimensions={
'source': 'openstack',
'project_id': 'project-1',
'user_id': 'user-1',
'resource_id': 'resource-1'}),
mock.call(
dimensions={
'source': 'openstack',
'tenant_id': 'project-1',
'user_id': 'user-1',
'resource_id': 'resource-1'}),
mock.call(
dimensions={
'source': 'openstack',
'project_id': 'project-1',
'user_id': 'user-1',
'hostname': 'resource-1'})
]
self.assertTrue(expected == metrics_list_mock.call_args_list)
def test_unique_metrics_list_call(self):
dummy_metric_names_mocked_return_value = (
[{"id": "015c995b1a770147f4ef18f5841ef566ab33521d",
"name": "network.delete"},
{"id": "335b5d569ad29dc61b3dc24609fad3619e947944",
"name": "subnet.update"}])
with mock.patch('ceilometer.monasca_client.Client') as mock_client:
conn = impl_monasca.Connection('127.0.0.1:8080')
metric_names_list_mock = mock_client().metric_names_list
metric_names_list_mock.return_value = (
dummy_metric_names_mocked_return_value
)
kwargs = dict(user='user-1',
project='project-1',
resource='resource-1',
source='openstack',
limit=2,
unique=True)
self.assertEqual(2, len(list(conn.get_meters(**kwargs))))
self.assertEqual(True, metric_names_list_mock.called)
self.assertEqual(1, metric_names_list_mock.call_count)
self.assertEqual(dict(dimensions=dict(user_id='user-1',
project_id='project-1',
resource_id='resource-1',
source='openstack'),
limit=100),
metrics_list_mock.call_args[1])
source='openstack')),
metric_names_list_mock.call_args[1])
class TestGetSamples(base.BaseTestCase):
class TestGetSamples(_BaseTestCase):
dummy_get_samples_mocked_return_value = (
[{u'dimensions': {},
@ -208,33 +418,25 @@ class TestGetSamples(base.BaseTestCase):
u'id': u'2015-04-14T18:42:31Z',
u'name': u'specific meter'}])
def setUp(self):
super(TestGetSamples, self).setUp()
self.CONF = self.useFixture(fixture_config.Config()).conf
self.CONF([], project='ceilometer', validate_default_values=True)
dummy_get_samples_mocked_return_extendedkey_value = (
[{u'dimensions': {},
u'measurements': [[u'2015-04-14T17:52:31Z',
1.0,
{'image_meta.base_url': 'base_url'}]],
u'id': u'2015-04-14T18:42:31Z',
u'columns': [u'timestamp', u'value', u'value_meta'],
u'name': u'image'}])
@mock.patch("ceilometer.storage.impl_monasca.MonascaDataFilter")
def test_get_samples_not_implemented_params(self, mdf_mock):
def test_get_samples_not_implemented_params(self):
with mock.patch("ceilometer.monasca_client.Client"):
conn = impl_monasca.Connection("127.0.0.1:8080")
sample_filter = storage.SampleFilter(meter='specific meter',
start_timestamp_op='<')
self.assertRaises(ceilometer.NotImplementedError,
lambda: list(conn.get_samples(sample_filter)))
sample_filter = storage.SampleFilter(meter='specific meter',
end_timestamp_op='>')
self.assertRaises(ceilometer.NotImplementedError,
lambda: list(conn.get_samples(sample_filter)))
sample_filter = storage.SampleFilter(meter='specific meter',
message_id='specific message')
self.assertRaises(ceilometer.NotImplementedError,
lambda: list(conn.get_samples(sample_filter)))
@mock.patch("ceilometer.storage.impl_monasca.MonascaDataFilter")
def test_get_samples_name(self, mdf_mock):
def test_get_samples_name(self):
with mock.patch("ceilometer.monasca_client.Client") as mock_client:
conn = impl_monasca.Connection("127.0.0.1:8080")
metrics_list_mock = mock_client().metrics_list
@ -249,15 +451,14 @@ class TestGetSamples(base.BaseTestCase):
list(conn.get_samples(sample_filter))
self.assertEqual(True, ml_mock.called)
self.assertEqual(dict(
dimensions={},
start_time='1970-01-01T00:00:00Z',
merge_metrics=False, name='specific meter',
end_time='2015-04-20T00:00:00Z'),
dimensions=dict(datasource='ceilometer'),
start_time='1970-01-01T00:00:00.000000Z',
group_by='*', name='specific meter',
end_time='2015-04-20T00:00:00.000000Z'),
ml_mock.call_args[1])
self.assertEqual(1, ml_mock.call_count)
@mock.patch("ceilometer.storage.impl_monasca.MonascaDataFilter")
def test_get_samples_start_timestamp_filter(self, mdf_mock):
def test_get_samples_start_timestamp_filter(self):
with mock.patch("ceilometer.monasca_client.Client") as mock_client:
conn = impl_monasca.Connection("127.0.0.1:8080")
@ -279,8 +480,40 @@ class TestGetSamples(base.BaseTestCase):
self.assertEqual(True, ml_mock.called)
self.assertEqual(1, ml_mock.call_count)
@mock.patch("ceilometer.storage.impl_monasca.MonascaDataFilter")
def test_get_samples_limit(self, mdf_mock):
def test_get_samples_timestamp_filter_exclusive_range(self):
with mock.patch("ceilometer.monasca_client.Client") as mock_client:
conn = impl_monasca.Connection("127.0.0.1:8080")
metrics_list_mock = mock_client().metrics_list
metrics_list_mock.return_value = (
TestGetSamples.dummy_metrics_mocked_return_value
)
ml_mock = mock_client().measurements_list
ml_mock.return_value = (
TestGetSamples.dummy_get_samples_mocked_return_value)
start_time = datetime.datetime(2015, 3, 20)
end_time = datetime.datetime(2015, 4, 1, 12, 00, 00)
sample_filter = storage.SampleFilter(
meter='specific meter',
start_timestamp=timeutils.isotime(start_time),
start_timestamp_op='gt',
end_timestamp=timeutils.isotime(end_time),
end_timestamp_op='lt')
list(conn.get_samples(sample_filter))
self.assertEqual(True, ml_mock.called)
self.assertEqual(1, ml_mock.call_count)
self.assertEqual(dict(dimensions=dict(datasource='ceilometer'),
name='specific meter',
start_time='2015-03-20T00:00:00.001000Z',
end_time='2015-04-01T11:59:59.999000Z',
start_timestamp_op='ge',
end_timestamp_op='le',
group_by='*'),
ml_mock.call_args_list[0][1])
def test_get_samples_limit(self):
with mock.patch("ceilometer.monasca_client.Client") as mock_client:
conn = impl_monasca.Connection("127.0.0.1:8080")
@ -309,8 +542,7 @@ class TestGetSamples(base.BaseTestCase):
self.assertEqual(True, ml_mock.called)
self.assertEqual(1, ml_mock.call_count)
@mock.patch("ceilometer.storage.impl_monasca.MonascaDataFilter")
def test_get_samples_project_filter(self, mock_mdf):
def test_get_samples_project_filter(self):
with mock.patch("ceilometer.monasca_client.Client") as mock_client:
conn = impl_monasca.Connection("127.0.0.1:8080")
metrics_list_mock = mock_client().metrics_list
@ -330,8 +562,7 @@ class TestGetSamples(base.BaseTestCase):
self.assertEqual(True, ml_mock.called)
self.assertEqual(1, ml_mock.call_count)
@mock.patch("ceilometer.storage.impl_monasca.MonascaDataFilter")
def test_get_samples_resource_filter(self, mock_mdf):
def test_get_samples_resource_filter(self):
with mock.patch("ceilometer.monasca_client.Client") as mock_client:
conn = impl_monasca.Connection("127.0.0.1:8080")
metrics_list_mock = mock_client().metrics_list
@ -350,8 +581,7 @@ class TestGetSamples(base.BaseTestCase):
self.assertEqual(True, ml_mock.called)
self.assertEqual(1, ml_mock.call_count)
@mock.patch("ceilometer.storage.impl_monasca.MonascaDataFilter")
def test_get_samples_source_filter(self, mdf_mock):
def test_get_samples_source_filter(self):
with mock.patch("ceilometer.monasca_client.Client") as mock_client:
conn = impl_monasca.Connection("127.0.0.1:8080")
metrics_list_mock = mock_client().metrics_list
@ -370,8 +600,7 @@ class TestGetSamples(base.BaseTestCase):
self.assertEqual(True, ml_mock.called)
self.assertEqual(1, ml_mock.call_count)
@mock.patch("ceilometer.storage.impl_monasca.MonascaDataFilter")
def test_get_samples_simple_metaquery(self, mdf_mock):
def test_get_samples_simple_metaquery(self):
with mock.patch("ceilometer.monasca_client.Client") as mock_client:
conn = impl_monasca.Connection("127.0.0.1:8080")
metrics_list_mock = mock_client().metrics_list
@ -389,14 +618,33 @@ class TestGetSamples(base.BaseTestCase):
self.assertEqual(True, ml_mock.called)
self.assertEqual(1, ml_mock.call_count)
@mock.patch("ceilometer.storage.impl_monasca.MonascaDataFilter")
def test_get_samples_results(self, mdf_mock):
def test_get_samples_simple_metaquery_with_extended_key(self):
with mock.patch("ceilometer.monasca_client.Client") as mock_client:
conn = impl_monasca.Connection("127.0.0.1:8080")
metrics_list_mock = mock_client().metrics_list
metrics_list_mock.return_value = (
TestGetSamples.dummy_metrics_mocked_return_value
)
ml_mock = mock_client().measurements_list
ml_mock.return_value = (
TestGetSamples.
dummy_get_samples_mocked_return_extendedkey_value
)
sample_filter = storage.SampleFilter(
meter='specific meter',
metaquery={'metadata.image_meta.base_url': u'base_url'})
self.assertTrue(len(list(conn.get_samples(sample_filter))) > 0)
self.assertEqual(True, ml_mock.called)
self.assertEqual(1, ml_mock.call_count)
def test_get_samples_results(self):
with mock.patch("ceilometer.monasca_client.Client") as mock_client:
conn = impl_monasca.Connection("127.0.0.1:8080")
metrics_list_mock = mock_client().metrics_list
metrics_list_mock.return_value = (
[{u'dimensions': {
'source': 'some source',
'datasource': 'ceilometer',
'project_id': 'some project ID',
'resource_id': 'some resource ID',
'type': 'some type',
@ -409,6 +657,7 @@ class TestGetSamples(base.BaseTestCase):
ml_mock.return_value = (
[{u'dimensions': {
'source': 'some source',
'datasource': 'ceilometer',
'project_id': 'some project ID',
'resource_id': 'some resource ID',
'type': 'some type',
@ -421,7 +670,7 @@ class TestGetSamples(base.BaseTestCase):
u'name': u'image'}])
sample_filter = storage.SampleFilter(
meter='specific meter',
meter='image',
start_timestamp='2015-03-20T00:00:00Z')
results = list(conn.get_samples(sample_filter))
self.assertEqual(True, ml_mock.called)
@ -463,25 +712,11 @@ class TestGetSamples(base.BaseTestCase):
self.assertEqual(1, ml_mock.call_count)
class _BaseTestCase(base.BaseTestCase):
def assertRaisesWithMessage(self, msg, exc_class, func, *args, **kwargs):
try:
func(*args, **kwargs)
self.fail('Expecting %s exception, none raised' %
exc_class.__name__)
except AssertionError:
raise
# Only catch specific exception so we can get stack trace when fail
except exc_class as e:
self.assertEqual(msg, e.message)
class MeterStatisticsTest(_BaseTestCase):
Aggregate = collections.namedtuple("Aggregate", ['func', 'param'])
@mock.patch("ceilometer.storage.impl_monasca.MonascaDataFilter")
def test_not_implemented_params(self, mock_mdf):
def test_not_implemented_params(self):
with mock.patch("ceilometer.monasca_client.Client"):
conn = impl_monasca.Connection("127.0.0.1:8080")
@ -542,8 +777,9 @@ class MeterStatisticsTest(_BaseTestCase):
conn.get_meter_statistics(
sf, aggregate=aggregate)))
@mock.patch("ceilometer.storage.impl_monasca.MonascaDataFilter")
def test_stats_list_called_with(self, mock_mdf):
@mock.patch('oslo_utils.timeutils.utcnow')
def test_stats_list_called_with(self, mock_utcnow):
mock_utcnow.return_value = datetime.datetime(2016, 4, 7, 18, 31)
with mock.patch("ceilometer.monasca_client.Client") as mock_client:
conn = impl_monasca.Connection("127.0.0.1:8080")
sl_mock = mock_client().statistics_list
@ -563,9 +799,11 @@ class MeterStatisticsTest(_BaseTestCase):
'dimensions': {'source': 'source_id',
'project_id': 'project_id',
'user_id': 'user_id',
'resource_id': 'resource_id'
'resource_id': 'resource_id',
'datasource': 'ceilometer'
},
'start_time': '1970-01-01T00:00:00Z',
'end_time': '2016-04-07T18:31:00.000000Z',
'start_time': '1970-01-01T00:00:00.000000Z',
'period': 10,
'statistics': 'min',
'name': 'image'
@ -573,8 +811,7 @@ class MeterStatisticsTest(_BaseTestCase):
sl_mock.call_args[1]
)
@mock.patch("ceilometer.storage.impl_monasca.MonascaDataFilter")
def test_stats_list(self, mock_mdf):
def test_stats_list(self):
with mock.patch("ceilometer.monasca_client.Client") as mock_client:
conn = impl_monasca.Connection("127.0.0.1:8080")
sl_mock = mock_client().statistics_list
@ -592,7 +829,7 @@ class MeterStatisticsTest(_BaseTestCase):
sf = storage.SampleFilter()
sf.meter = "image"
aggregate = meters.Aggregate()
aggregate = Aggregate()
aggregate.func = 'min'
sf.start_timestamp = timeutils.parse_isotime(
'2014-10-24T12:12:42').replace(tzinfo=None)
@ -612,8 +849,7 @@ class MeterStatisticsTest(_BaseTestCase):
self.assertIsNotNone(stats[0].as_dict().get('aggregate'))
self.assertEqual({u'min': 0.008}, stats[0].as_dict()['aggregate'])
@mock.patch("ceilometer.storage.impl_monasca.MonascaDataFilter")
def test_stats_list_with_groupby(self, mock_mdf):
def test_stats_list_with_groupby(self):
with mock.patch("ceilometer.monasca_client.Client") as mock_client:
conn = impl_monasca.Connection("127.0.0.1:8080")
sl_mock = mock_client().statistics_list
@ -672,38 +908,32 @@ class MeterStatisticsTest(_BaseTestCase):
class TestQuerySamples(_BaseTestCase):
def setUp(self):
super(TestQuerySamples, self).setUp()
self.CONF = self.useFixture(fixture_config.Config()).conf
self.CONF([], project='ceilometer', validate_default_values=True)
@mock.patch("ceilometer.storage.impl_monasca.MonascaDataFilter")
def test_query_samples_not_implemented_params(self, mdf_mock):
def test_query_samples_not_implemented_params(self):
with mock.patch("ceilometer.monasca_client.Client"):
conn = impl_monasca.Connection("127.0.0.1:8080")
query = {'or': [{'=': {"project_id": "123"}},
{'=': {"user_id": "456"}}]}
query = {'and': [{'=': {'counter_name': 'instance'}},
{'or': [{'=': {"project_id": "123"}},
{'=': {"user_id": "456"}}]}]}
self.assertRaisesWithMessage(
'fitler must be specified',
ceilometer.NotImplementedError,
lambda: list(conn.query_samples()))
self.assertRaisesWithMessage(
'limit must be specified',
ceilometer.NotImplementedError,
lambda: list(conn.query_samples(query)))
order_by = [{"timestamp": "desc"}]
self.assertRaisesWithMessage(
'orderby is not supported',
ceilometer.NotImplementedError,
lambda: list(conn.query_samples(query, order_by)))
self.assertRaisesWithMessage(
'Supply meter name at the least',
ceilometer.NotImplementedError,
query = {'or': [{'=': {"project_id": "123"}},
{'=': {"user_id": "456"}}]}
self.assert_raise_within_message(
'meter name is not found in',
impl_monasca.InvalidInputException,
lambda: list(conn.query_samples(query, None, 1)))
@mock.patch("ceilometer.storage.impl_monasca.MonascaDataFilter")
def test_query_samples(self, mdf_mock):
def test_query_samples(self):
SAMPLES = [[
storage_models.Sample(
counter_name="instance",
@ -730,18 +960,59 @@ class TestQuerySamples(_BaseTestCase):
with mock.patch.object(conn, 'get_samples') as gsm:
gsm.side_effect = _get_samples
query = {'or': [{'=': {"project_id": "123"}},
{'=': {"user_id": "456"}}]}
query = {'and': [{'=': {'counter_name': 'instance'}},
{'or': [{'=': {"project_id": "123"}},
{'=': {"user_id": "456"}}]}]}
samples = conn.query_samples(query, None, 100)
self.assertEqual(2, len(samples))
self.assertEqual(2, gsm.call_count)
samples = SAMPLES[:]
query = {'and': [{'=': {"project_id": "123"}},
{'>': {"counter_volume": 2}}]}
query = {'and': [{'=': {'counter_name': 'instance'}},
{'or': [{'=': {"project_id": "123"}},
{'>': {"counter_volume": 2}}]}]}
samples = conn.query_samples(query, None, 100)
self.assertEqual(0, len(samples))
self.assertEqual(3, gsm.call_count)
self.assertEqual(1, len(samples))
self.assertEqual(4, gsm.call_count)
def test_query_samples_timestamp_gt_lt(self):
SAMPLES = [[
storage_models.Sample(
counter_name="instance",
counter_type="gauge",
counter_unit="instance",
counter_volume=1,
project_id="123",
user_id="456",
resource_id="789",
resource_metadata={},
source="openstack",
recorded_at=timeutils.utcnow(),
timestamp=timeutils.utcnow(),
message_id="0",
message_signature='',)
]] * 2
samples = SAMPLES[:]
def _get_samples(*args, **kwargs):
return samples.pop()
with mock.patch("ceilometer.monasca_client.Client"):
conn = impl_monasca.Connection("127.0.0.1:8080")
with mock.patch.object(conn, 'get_samples') as gsm:
gsm.side_effect = _get_samples
start = datetime.datetime(2014, 10, 24, 13, 52, 42)
end = datetime.datetime(2014, 10, 24, 14, 52, 42)
ts_query = {
'or': [{'>': {"timestamp": start}},
{'<': {"timestamp": end}}]
}
query = {'and': [{'=': {'counter_name': 'instance'}},
ts_query]}
samples = conn.query_samples(query, None, 100)
self.assertEqual(2, len(samples))
self.assertEqual(2, gsm.call_count)
class CapabilitiesTest(base.BaseTestCase):
@ -752,7 +1023,6 @@ class CapabilitiesTest(base.BaseTestCase):
{
'query':
{
'complex': False,
'metadata': False,
'simple': True
}
@ -761,13 +1031,11 @@ class CapabilitiesTest(base.BaseTestCase):
{
'query':
{
'complex': False, 'metadata': True, 'simple': True
'metadata': True, 'simple': True
}
},
'samples':
{
'groupby': False,
'pagination': False,
'query':
{
'complex': True,
@ -793,10 +1061,16 @@ class CapabilitiesTest(base.BaseTestCase):
'groupby': False,
'query':
{
'complex': False,
'metadata': False,
'simple': True
}
},
'events':
{
'query':
{
'simple': False
}
}
}

View File

@ -12,36 +12,51 @@
# License for the specific language governing permissions and limitations
# under the License.
import os
from keystoneauth1 import loading as ka_loading
import mock
from oslo_config import cfg
from oslo_config import fixture as fixture_config
from oslo_utils import fileutils
from oslo_utils import netutils
from oslotest import base
from ceilometer import monasca_client
from monascaclient import exc
cfg.CONF.import_group('service_credentials', 'ceilometer.keystone_client')
cfg.CONF.import_group('service_credentials', 'ceilometer.service')
class TestMonascaClient(base.BaseTestCase):
opts = [
cfg.StrOpt("username", default="ceilometer"),
cfg.StrOpt("password", default="password"),
cfg.StrOpt("auth_url", default="http://192.168.10.6:5000"),
cfg.StrOpt("project_name", default="service"),
cfg.StrOpt("project_id", default="service"),
]
def setUp(self):
super(TestMonascaClient, self).setUp()
self.CONF = self.useFixture(fixture_config.Config()).conf
self.CONF([], project='ceilometer', validate_default_values=True)
self.CONF.register_opts(self.opts, group="service_credentials")
content = ("[service_credentials]\n"
"auth_type = password\n"
"username = ceilometer\n"
"password = admin\n"
"auth_url = http://localhost:5000/v2.0\n")
tempfile = fileutils.write_to_tempfile(content=content,
prefix='ceilometer',
suffix='.conf')
self.addCleanup(os.remove, tempfile)
self.conf = self.useFixture(fixture_config.Config()).conf
self.conf([], default_config_files=[tempfile])
ka_loading.load_auth_from_conf_options(self.conf,
"service_credentials")
self.conf.set_override('max_retries', 0, 'database')
self.mc = self._get_client()
def tearDown(self):
# For some reason, cfg.CONF is registered a required option named
# auth_url after these tests run, which occasionally blocks test
# case test_event_pipeline_endpoint_requeue_on_failure, so we
# unregister it here.
self.conf.reset()
self.conf.unregister_opt(cfg.StrOpt('auth_url'),
group='service_credentials')
super(TestMonascaClient, self).tearDown()
@mock.patch('monascaclient.client.Client')
@mock.patch('monascaclient.ksclient.KSClient')
def _get_client(self, ksclass_mock, monclient_mock):
@ -50,6 +65,15 @@ class TestMonascaClient(base.BaseTestCase):
return monasca_client.Client(
netutils.urlsplit("http://127.0.0.1:8080"))
@mock.patch('monascaclient.client.Client')
@mock.patch('monascaclient.ksclient.KSClient')
def test_client_url_correctness(self, ksclass_mock, monclient_mock):
ksclient_mock = ksclass_mock.return_value
ksclient_mock.token.return_value = "token123"
mon_client = monasca_client.Client(
netutils.urlsplit("monasca://https://127.0.0.1:8080"))
self.assertEqual("https://127.0.0.1:8080", mon_client._endpoint)
def test_metrics_create(self):
with mock.patch.object(self.mc._mon_client.metrics, 'create',
side_effect=[True]) as create_patch:
@ -92,3 +116,70 @@ class TestMonascaClient(base.BaseTestCase):
self._get_client)
self.assertIsNotNone(True, conf.username)
def test_retry_on_key_error(self):
self.conf.set_override('max_retries', 2, 'database')
self.conf.set_override('retry_interval', 1, 'database')
self.mc = self._get_client()
with mock.patch.object(
self.mc._mon_client.metrics, 'list',
side_effect=[KeyError, []]) as mocked_metrics_list:
list(self.mc.metrics_list())
self.assertEqual(2, mocked_metrics_list.call_count)
def test_no_retry_on_invalid_parameter(self):
self.conf.set_override('max_retries', 2, 'database')
self.conf.set_override('retry_interval', 1, 'database')
self.mc = self._get_client()
def _check(exception):
expected_exc = monasca_client.MonascaInvalidParametersException
with mock.patch.object(
self.mc._mon_client.metrics, 'list',
side_effect=[exception, []]
) as mocked_metrics_list:
self.assertRaises(expected_exc, list, self.mc.metrics_list())
self.assertEqual(1, mocked_metrics_list.call_count)
_check(exc.HTTPUnProcessable)
_check(exc.HTTPBadRequest)
def test_max_retris_not_too_much(self):
def _check(configured, expected):
self.conf.set_override('max_retries', configured, 'database')
self.mc = self._get_client()
self.assertEqual(expected, self.mc._max_retries)
_check(-1, 10)
_check(11, 10)
_check(5, 5)
_check(None, 1)
def test_meaningful_exception_message(self):
with mock.patch.object(
self.mc._mon_client.metrics, 'list',
side_effect=[exc.HTTPInternalServerError,
exc.HTTPUnProcessable,
KeyError]):
e = self.assertRaises(
monasca_client.MonascaServiceException,
list, self.mc.metrics_list())
self.assertIn('Monasca service is unavailable', str(e))
e = self.assertRaises(
monasca_client.MonascaInvalidParametersException,
list, self.mc.metrics_list())
self.assertIn('Request cannot be handled by Monasca', str(e))
e = self.assertRaises(
monasca_client.MonascaException,
list, self.mc.metrics_list())
self.assertIn('An exception is raised from Monasca', str(e))
@mock.patch.object(monasca_client.Client, '_refresh_client')
def test_metrics_create_with_401(self, rc_patch):
with mock.patch.object(
self.mc._mon_client.metrics, 'create',
side_effect=[exc.HTTPUnauthorized, True]):
self.assertRaises(
monasca_client.MonascaInvalidParametersException,
self.mc.metrics_create)

View File

@ -0,0 +1,14 @@
---
sources:
- name: meter_source
interval: 60
meters:
- "testbatch"
- "testbatch2"
sinks:
- meter_sink
sinks:
- name: meter_sink
transformers:
publishers:
- monasca://http://192.168.10.6:8070/v2.0

View File

@ -3,7 +3,7 @@ sources:
- name: meter_source
interval: 60
meters:
- "*"
- "instance"
sinks:
- meter_sink
sinks:

View File

@ -34,6 +34,11 @@ ceilosca_files = {
for src, dest in ceilosca_files.items():
shutil.copyfile(src, dest)
# Include new module
shutil.rmtree(ceilo_dir + "/ceilosca_mapping/", True)
shutil.copytree('ceilosca/ceilometer/ceilosca_mapping',
ceilo_dir + "/ceilosca_mapping/")
ceilo_parent_dir = os.path.dirname(os.path.abspath(
os.path.dirname(ceilometer.__file__)))
@ -43,6 +48,7 @@ ceilosca_conf_files = {
[
'etc/ceilometer/monasca_field_definitions.yaml',
'etc/ceilometer/pipeline.yaml',
'etc/ceilometer/monasca_pipeline.yaml',
'etc/ceilometer/ceilometer.conf',
'etc/ceilometer/policy.json'
]

View File

@ -1,15 +1,15 @@
hacking<0.11,>=0.10.0
git+https://github.com/openstack/ceilometer.git@stable/mitaka#egg=ceilometer
git+https://github.com/openstack/ceilometer.git@stable/newton#egg=ceilometer
mock>=1.2
testrepository>=0.0.18
testscenarios>=0.4
testtools>=1.4.0
oslosphinx>=2.5.0 # Apache-2.0
oslotest>=1.10.0 # Apache-2.0
oslo.vmware>=1.16.0 # Apache-2.0
oslo.vmware>=1.16.0,<2.17.0 # Apache-2.0
# Use lower versions of config and utils since
# Keystone client depends on it
oslo.config>=2.3.0 # Apache-2.0
oslo.utils!=2.6.0,>=2.0.0 # Apache-2.0
oslo.log>=1.8.0 # Apache-2.0
python-monascaclient
python-monascaclient<=1.2.0