Added gnocchi collector
CloudKitty now supports gnocchi data collection. Support is still experimental as it's lacking some data. Some fetched data might be inaccurate as it highly depend on your archive policies. Change-Id: I7e6668c766b7bd4641cccc2bd841a7aed1d2e2d5
This commit is contained in:
parent
ea8a86811e
commit
f7641aaa9f
|
@ -107,10 +107,19 @@ class BaseCollector(object):
|
|||
month_start = ck_utils.get_month_start()
|
||||
return ck_utils.dt2ts(month_start)
|
||||
|
||||
def retrieve(self, resource, start, end=None, project_id=None,
|
||||
q_filter=None):
|
||||
@classmethod
|
||||
def _res_to_func(cls, resource_name):
|
||||
trans_resource = 'get_'
|
||||
trans_resource += resource.replace('.', '_')
|
||||
trans_resource += resource_name.replace('.', '_')
|
||||
return trans_resource
|
||||
|
||||
def retrieve(self,
|
||||
resource,
|
||||
start,
|
||||
end=None,
|
||||
project_id=None,
|
||||
q_filter=None):
|
||||
trans_resource = self._res_to_func(resource)
|
||||
if not hasattr(self, trans_resource):
|
||||
raise NotImplementedError(
|
||||
"No method found in collector '%s' for resource '%s'."
|
||||
|
|
|
@ -0,0 +1,274 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2015 Objectif Libre
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
from gnocchiclient import client as gclient
|
||||
from keystoneauth1 import loading as ks_loading
|
||||
from oslo_config import cfg
|
||||
|
||||
from cloudkitty import collector
|
||||
|
||||
GNOCCHI_COLLECTOR_OPTS = 'gnocchi_collector'
|
||||
ks_loading.register_session_conf_options(
|
||||
cfg.CONF,
|
||||
GNOCCHI_COLLECTOR_OPTS)
|
||||
ks_loading.register_auth_conf_options(
|
||||
cfg.CONF,
|
||||
GNOCCHI_COLLECTOR_OPTS)
|
||||
CONF = cfg.CONF
|
||||
|
||||
|
||||
class GnocchiCollector(collector.BaseCollector):
|
||||
collector_name = 'gnocchi'
|
||||
dependencies = ('GnocchiTransformer',
|
||||
'CloudKittyFormatTransformer')
|
||||
retrieve_mappings = {
|
||||
'compute': 'instance',
|
||||
'image': 'image',
|
||||
'volume': 'volume',
|
||||
'network.bw.out': 'instance_network_interface',
|
||||
'network.bw.in': 'instance_network_interface',
|
||||
}
|
||||
metrics_mappings = {
|
||||
'compute': [
|
||||
('vcpus', 'max'),
|
||||
('memory', 'max'),
|
||||
('cpu', 'max'),
|
||||
('disk.root.size', 'max'),
|
||||
('disk.ephemeral.size', 'max')],
|
||||
'image': [
|
||||
('image.size', 'max'),
|
||||
('image.download', 'max'),
|
||||
('image.serve', 'max')],
|
||||
'volume': [
|
||||
('volume.size', 'max')],
|
||||
'network.bw.out': [
|
||||
('network.outgoing.bytes', 'max')],
|
||||
'network.bw.in': [
|
||||
('network.incoming.bytes', 'max')],
|
||||
}
|
||||
volumes_mappings = {
|
||||
'compute': (1, 'instance'),
|
||||
'image': ('image.size', 'MB'),
|
||||
'volume': ('volume.size', 'GB'),
|
||||
'network.bw.out': ('network.outgoing.bytes', 'MB'),
|
||||
'network.bw.in': ('network.incoming.bytes', 'MB'),
|
||||
}
|
||||
|
||||
def __init__(self, transformers, **kwargs):
|
||||
super(GnocchiCollector, self).__init__(transformers, **kwargs)
|
||||
|
||||
self.t_gnocchi = self.transformers['GnocchiTransformer']
|
||||
self.t_cloudkitty = self.transformers['CloudKittyFormatTransformer']
|
||||
|
||||
self.auth = ks_loading.load_auth_from_conf_options(
|
||||
CONF,
|
||||
GNOCCHI_COLLECTOR_OPTS)
|
||||
self.session = ks_loading.load_session_from_conf_options(
|
||||
CONF,
|
||||
GNOCCHI_COLLECTOR_OPTS,
|
||||
auth=self.auth)
|
||||
self._conn = gclient.Client(
|
||||
'1',
|
||||
session=self.session)
|
||||
|
||||
@classmethod
|
||||
def gen_filter(cls, cop='=', lop='and', **kwargs):
|
||||
"""Generate gnocchi filter from kwargs.
|
||||
|
||||
:param cop: Comparison operator.
|
||||
:param lop: Logical operator in case of multiple filters.
|
||||
"""
|
||||
q_filter = []
|
||||
for kwarg in sorted(kwargs):
|
||||
q_filter.append({cop: {kwarg: kwargs[kwarg]}})
|
||||
if len(kwargs) > 1:
|
||||
return cls.extend_filter(q_filter, lop=lop)
|
||||
else:
|
||||
return q_filter[0] if len(kwargs) else {}
|
||||
|
||||
@classmethod
|
||||
def extend_filter(cls, *args, **kwargs):
|
||||
"""Extend an existing gnocchi filter with multiple operations.
|
||||
|
||||
:param lop: Logical operator in case of multiple filters.
|
||||
"""
|
||||
lop = kwargs.get('lop', 'and')
|
||||
filter_list = []
|
||||
for cur_filter in args:
|
||||
if isinstance(cur_filter, dict) and cur_filter:
|
||||
filter_list.append(cur_filter)
|
||||
elif isinstance(cur_filter, list):
|
||||
filter_list.extend(cur_filter)
|
||||
if len(filter_list) > 1:
|
||||
return {lop: filter_list}
|
||||
else:
|
||||
return filter_list[0] if len(filter_list) else {}
|
||||
|
||||
def _generate_time_filter(self, start, end=None, with_revision=False):
|
||||
"""Generate timeframe filter.
|
||||
|
||||
:param start: Start of the timeframe.
|
||||
:param end: End of the timeframe if needed.
|
||||
:param with_revision: Filter on the resource revision.
|
||||
:type with_revision: bool
|
||||
"""
|
||||
time_filter = list()
|
||||
time_filter.append(self.extend_filter(
|
||||
self.gen_filter(ended_at=None),
|
||||
self.gen_filter(cop=">=", ended_at=start),
|
||||
lop='or'))
|
||||
if end:
|
||||
time_filter.append(self.extend_filter(
|
||||
self.gen_filter(ended_at=None),
|
||||
self.gen_filter(cop="<=", ended_at=end),
|
||||
lop='or'))
|
||||
time_filter.append(
|
||||
self.gen_filter(cop="<=", started_at=end))
|
||||
if with_revision:
|
||||
time_filter.append(
|
||||
self.gen_filter(cop="<=", revision_start=end))
|
||||
return time_filter
|
||||
|
||||
def _expand_metrics(self, resources, mappings, start, end=None):
|
||||
for resource in resources:
|
||||
metrics = resource.get('metrics', {})
|
||||
for name, aggregate in mappings:
|
||||
value = self._conn.metric.get_measures(
|
||||
metric=metrics.get(name),
|
||||
start=start,
|
||||
stop=end,
|
||||
aggregation=aggregate)
|
||||
try:
|
||||
resource[name] = value[0][2]
|
||||
except IndexError:
|
||||
resource[name] = None
|
||||
|
||||
def resource_info(self,
|
||||
resource_type,
|
||||
start,
|
||||
end=None,
|
||||
resource_id=None,
|
||||
project_id=None,
|
||||
q_filter=None):
|
||||
"""Get resources during the timeframe.
|
||||
|
||||
Set the resource_id if you want to get a specific resource.
|
||||
:param resource_type: Resource type to filter on.
|
||||
:type resource_type: str
|
||||
:param start: Start of the timeframe.
|
||||
:param end: End of the timeframe if needed.
|
||||
:param resource_id: Retrieve a specific resource based on its id.
|
||||
:type resource_id: str
|
||||
:param project_id: Filter on a specific tenant/project.
|
||||
:type project_id: str
|
||||
:param q_filter: Append a custom filter.
|
||||
:type q_filter: list
|
||||
"""
|
||||
# Translating to resource name if needed
|
||||
translated_resource = self.retrieve_mappings.get(resource_type,
|
||||
resource_type)
|
||||
qty, unit = self.volumes_mappings.get(
|
||||
resource_type,
|
||||
(1, 'unknown'))
|
||||
# NOTE(sheeprine): Only filter revision on resource retrieval
|
||||
query_parameters = self._generate_time_filter(
|
||||
start,
|
||||
end,
|
||||
True if resource_id else False)
|
||||
need_subquery = True
|
||||
if resource_id:
|
||||
need_subquery = False
|
||||
query_parameters.append(
|
||||
self.gen_filter(id=resource_id))
|
||||
resources = self._conn.resource.search(
|
||||
resource_type=translated_resource,
|
||||
query=self.extend_filter(*query_parameters),
|
||||
history=True,
|
||||
limit=1,
|
||||
sorts=['revision_start:desc'])
|
||||
else:
|
||||
if end:
|
||||
query_parameters.append(
|
||||
self.gen_filter(cop="=", type=translated_resource))
|
||||
else:
|
||||
need_subquery = False
|
||||
if project_id:
|
||||
query_parameters.append(
|
||||
self.gen_filter(project_id=project_id))
|
||||
if q_filter:
|
||||
query_parameters.append(q_filter)
|
||||
final_query = self.extend_filter(*query_parameters)
|
||||
resources = self._conn.resource.search(
|
||||
resource_type='generic' if end else translated_resource,
|
||||
query=final_query)
|
||||
resource_list = list()
|
||||
if not need_subquery:
|
||||
for resource in resources:
|
||||
resource_data = self.t_gnocchi.strip_resource_data(
|
||||
resource_type,
|
||||
resource)
|
||||
self._expand_metrics(
|
||||
[resource_data],
|
||||
self.metrics_mappings[resource_type],
|
||||
start,
|
||||
end)
|
||||
resource_data.pop('metrics', None)
|
||||
data = self.t_cloudkitty.format_item(
|
||||
resource_data,
|
||||
unit,
|
||||
qty if isinstance(qty, int) else resource_data[qty])
|
||||
resource_list.append(data)
|
||||
return resource_list[0] if resource_id else resource_list
|
||||
for resource in resources:
|
||||
res = self.resource_info(
|
||||
resource_type,
|
||||
start,
|
||||
end,
|
||||
resource_id=resource.get('id', ''))
|
||||
resource_list.append(res)
|
||||
return resource_list
|
||||
|
||||
def generic_retrieve(self,
|
||||
resource_name,
|
||||
start,
|
||||
end=None,
|
||||
project_id=None,
|
||||
q_filter=None):
|
||||
resources = self.resource_info(
|
||||
resource_name,
|
||||
start,
|
||||
end,
|
||||
project_id,
|
||||
q_filter)
|
||||
if not resources:
|
||||
raise collector.NoDataCollected(self.collector_name, resource_name)
|
||||
for resource in resources:
|
||||
# NOTE(sheeprine): Reference to gnocchi resource used by storage
|
||||
resource['resource_id'] = resource['desc']['resource_id']
|
||||
return self.t_cloudkitty.format_service(resource_name, resources)
|
||||
|
||||
def retrieve(self,
|
||||
resource,
|
||||
start,
|
||||
end=None,
|
||||
project_id=None,
|
||||
q_filter=None):
|
||||
trans_resource = resource.replace('_', '.')
|
||||
return self.generic_retrieve(
|
||||
trans_resource,
|
||||
start,
|
||||
end,
|
||||
project_id,
|
||||
q_filter)
|
|
@ -68,7 +68,8 @@ class TestCase(testscenarios.TestWithScenarios, base.BaseTestCase):
|
|||
|
||||
def setUp(self):
|
||||
super(TestCase, self).setUp()
|
||||
self.conf = self.useFixture(config_fixture.Config()).conf
|
||||
self._conf_fixture = self.useFixture(config_fixture.Config())
|
||||
self.conf = self._conf_fixture.conf
|
||||
self.conf.set_override('connection', self.db_url, 'database',
|
||||
enforce_type=True)
|
||||
self.conn = ck_db_api.get_instance()
|
||||
|
|
|
@ -0,0 +1,130 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2015 Objectif Libre
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
#
|
||||
from cloudkitty.collector import gnocchi
|
||||
from cloudkitty import tests
|
||||
from cloudkitty.tests import samples
|
||||
|
||||
|
||||
class GnocchiCollectorTest(tests.TestCase):
|
||||
def setUp(self):
|
||||
super(GnocchiCollectorTest, self).setUp()
|
||||
self._tenant_id = samples.TENANT
|
||||
self.collector = gnocchi.GnocchiCollector
|
||||
|
||||
# Filter generation
|
||||
def test_generate_one_field_filter(self):
|
||||
actual = self.collector.gen_filter(value1=2)
|
||||
expected = {
|
||||
'=': {
|
||||
'value1': 2
|
||||
}}
|
||||
self.assertEqual(expected, actual)
|
||||
|
||||
def test_generate_two_fields_filter(self):
|
||||
actual = self.collector.gen_filter(value1=2, value2=3)
|
||||
expected = {'and': [{
|
||||
'=': {
|
||||
'value1': 2
|
||||
}}, {
|
||||
'=': {
|
||||
'value2': 3
|
||||
}}]}
|
||||
self.assertEqual(expected, actual)
|
||||
|
||||
def test_generate_two_fields_filter_different_operations(self):
|
||||
actual = self.collector.gen_filter(
|
||||
cop='>=',
|
||||
lop='or',
|
||||
value1=2,
|
||||
value2=3)
|
||||
expected = {'or': [{
|
||||
'>=': {
|
||||
'value1': 2
|
||||
}}, {
|
||||
'>=': {
|
||||
'value2': 3
|
||||
}}]}
|
||||
self.assertEqual(expected, actual)
|
||||
|
||||
def test_generate_two_filters_and_add_logical(self):
|
||||
filter1 = self.collector.gen_filter(value1=2)
|
||||
filter2 = self.collector.gen_filter(cop='>', value2=3)
|
||||
actual = self.collector.extend_filter(filter1, filter2, lop='or')
|
||||
expected = {'or': [{
|
||||
'=': {
|
||||
'value1': 2
|
||||
}}, {
|
||||
'>': {
|
||||
'value2': 3
|
||||
}}]}
|
||||
self.assertEqual(expected, actual)
|
||||
|
||||
def test_noop_on_single_filter(self):
|
||||
filter1 = self.collector.gen_filter(value1=2)
|
||||
actual = self.collector.extend_filter(filter1, lop='or')
|
||||
self.assertEqual(filter1, actual)
|
||||
|
||||
def test_try_extend_empty_filter(self):
|
||||
actual = self.collector.extend_filter()
|
||||
self.assertEqual({}, actual)
|
||||
actual = self.collector.extend_filter(actual, actual)
|
||||
self.assertEqual({}, actual)
|
||||
|
||||
def test_try_extend_filter_with_none(self):
|
||||
filter1 = self.collector.gen_filter(value1=2)
|
||||
actual = self.collector.extend_filter(filter1, None)
|
||||
self.assertEqual(filter1, actual)
|
||||
|
||||
def test_generate_two_logical_ops(self):
|
||||
filter1 = self.collector.gen_filter(value1=2, value2=3)
|
||||
filter2 = self.collector.gen_filter(cop='<=', value3=1)
|
||||
actual = self.collector.extend_filter(filter1, filter2, lop='or')
|
||||
expected = {'or': [{
|
||||
'and': [{
|
||||
'=': {
|
||||
'value1': 2
|
||||
}}, {
|
||||
'=': {
|
||||
'value2': 3
|
||||
}}]}, {
|
||||
'<=': {
|
||||
'value3': 1
|
||||
}}]}
|
||||
self.assertEqual(expected, actual)
|
||||
|
||||
def test_gen_filter_parameters(self):
|
||||
actual = self.collector.gen_filter(
|
||||
cop='>',
|
||||
lop='or',
|
||||
value1=2,
|
||||
value2=3)
|
||||
expected = {'or': [{
|
||||
'>': {
|
||||
'value1': 2
|
||||
}}, {
|
||||
'>': {
|
||||
'value2': 3
|
||||
}}]}
|
||||
self.assertEqual(expected, actual)
|
||||
|
||||
def test_extend_filter_parameters(self):
|
||||
actual = self.collector.extend_filter(
|
||||
['dummy1'],
|
||||
['dummy2'],
|
||||
lop='or')
|
||||
expected = {'or': ['dummy1', 'dummy2']}
|
||||
self.assertEqual(expected, actual)
|
|
@ -0,0 +1,72 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2015 Objectif Libre
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
from cloudkitty import transformer
|
||||
|
||||
|
||||
class GnocchiTransformer(transformer.BaseTransformer):
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def _generic_strip(self, data):
|
||||
res_data = {
|
||||
'resource_id': data['id'],
|
||||
'project_id': data['project_id'],
|
||||
'user_id': data['user_id'],
|
||||
'metrics': data['metrics']}
|
||||
return res_data
|
||||
|
||||
def _strip_compute(self, data):
|
||||
res_data = self._generic_strip(data)
|
||||
res_data.update({
|
||||
'instance_id': data['id'],
|
||||
'project_id': data['project_id'],
|
||||
'user_id': data['user_id'],
|
||||
'name': data['display_name'],
|
||||
'flavor_id': data['flavor_id']})
|
||||
if 'image_ref' in data:
|
||||
res_data['image_id'] = data.rpartition['image_ref'][-1]
|
||||
return res_data
|
||||
|
||||
def _strip_image(self, data):
|
||||
res_data = self._generic_strip(data)
|
||||
res_data.update({
|
||||
'container_format': data['container_format'],
|
||||
'disk_format': data['disk_format']})
|
||||
return res_data
|
||||
|
||||
def _strip_volume(self, data):
|
||||
res_data = self._generic_strip(data)
|
||||
res_data.update({
|
||||
'name': data['display_name']})
|
||||
return res_data
|
||||
|
||||
def _strip_network(self, data):
|
||||
res_data = self._generic_strip(data)
|
||||
res_data.update({
|
||||
'name': data['name']})
|
||||
return res_data
|
||||
|
||||
def strip_resource_data(self, res_type, res_data):
|
||||
if res_type == 'compute':
|
||||
return self._strip_compute(res_data)
|
||||
elif res_type == 'image':
|
||||
return self._strip_image(res_data)
|
||||
elif res_type == 'volume':
|
||||
return self._strip_volume(res_data)
|
||||
elif res_type.startswith('network.'):
|
||||
return self._strip_network(res_data)
|
||||
else:
|
||||
return self._generic_strip(res_data)
|
|
@ -5,6 +5,7 @@ pbr>=1.6 # Apache-2.0
|
|||
eventlet!=0.18.3,>=0.18.2 # MIT
|
||||
keystonemiddleware!=4.1.0,>=4.0.0 # Apache-2.0
|
||||
python-ceilometerclient>=2.2.1 # Apache-2.0
|
||||
gnocchiclient>=2.1.0 # Apache-2.0
|
||||
python-keystoneclient!=1.8.0,!=2.1.0,>=1.6.0 # Apache-2.0
|
||||
keystoneauth1>=2.1.0 # Apache-2.0
|
||||
iso8601>=0.1.9 # MIT
|
||||
|
|
|
@ -39,6 +39,7 @@ oslo.config.opts.defaults =
|
|||
cloudkitty.collector.backends =
|
||||
fake = cloudkitty.collector.fake:CSVCollector
|
||||
ceilometer = cloudkitty.collector.ceilometer:CeilometerCollector
|
||||
gnocchi = cloudkitty.collector.gnocchi:GnocchiCollector
|
||||
meta = cloudkitty.collector.meta:MetaCollector
|
||||
|
||||
cloudkitty.tenant.fetchers =
|
||||
|
@ -48,6 +49,7 @@ cloudkitty.tenant.fetchers =
|
|||
cloudkitty.transformers =
|
||||
CloudKittyFormatTransformer = cloudkitty.transformer.format:CloudKittyFormatTransformer
|
||||
CeilometerTransformer = cloudkitty.transformer.ceilometer:CeilometerTransformer
|
||||
GnocchiTransformer = cloudkitty.transformer.gnocchi:GnocchiTransformer
|
||||
|
||||
cloudkitty.rating.processors =
|
||||
noop = cloudkitty.rating.noop:Noop
|
||||
|
|
Loading…
Reference in New Issue