Merge "Added aggregation of storage-objects.size."

This commit is contained in:
Jenkins 2016-08-19 22:09:44 +00:00 committed by Gerrit Code Review
commit 424077c581
5 changed files with 100 additions and 17 deletions

View File

@ -14,3 +14,4 @@
{"event_processing_params":{"set_default_zone_to":"1","set_default_geolocation_to":"1","set_default_region_to":"W"},"event_type":"nova.vm.cpu.total_allocated","metric_id_list":["nova_vm_cpu_total_all"],"required_raw_fields_list":["creation_time"],"service_id":"host_metrics"}
{"event_processing_params":{"set_default_zone_to":"1","set_default_geolocation_to":"1","set_default_region_to":"W"},"event_type":"swiftlm.diskusage.host.val.size","metric_id_list":["swift_usage_all","swift_usage_rate","swift_usage_host"],"required_raw_fields_list":["creation_time", "hostname", "mount"],"service_id":"host_metrics"}
{"event_processing_params":{"set_default_zone_to":"1","set_default_geolocation_to":"1","set_default_region_to":"W"},"event_type":"swiftlm.diskusage.host.val.avail","metric_id_list":["swift_avail_all","swift_avail_host"],"required_raw_fields_list":["creation_time", "hostname", "mount"],"service_id":"host_metrics"}
{"event_processing_params":{"set_default_zone_to":"1","set_default_geolocation_to":"1","set_default_region_to":"W"},"event_type":"storage.objects.size","metric_id_list":["storage_objects_size_all"],"required_raw_fields_list":["creation_time", "project_id"],"service_id":"host_metrics"}

View File

@ -23,3 +23,4 @@
{"aggregation_params_map":{"aggregation_pipeline":{"source":"streaming","usage":"calculate_rate","setters":["set_aggregated_metric_name","set_aggregated_period"],"insert":["prepare_data","insert_data_pre_hourly"]},"aggregated_metric_name":"swiftlm.diskusage.rate_agg","aggregation_period":"hourly","aggregation_group_by_list": ["host", "metric_id", "mount"],"filter_by_list": [],"setter_rollup_group_by_list": [],"dimension_list":["aggregation_period","host","project_id"],"pre_hourly_operation":"rate","pre_hourly_group_by_list":["default"]},"metric_group":"swift_usage_rate","metric_id":"swift_usage_rate"}
{"aggregation_params_map":{"aggregation_pipeline":{"source":"streaming","usage":"fetch_quantity","setters":["rollup_quantity","set_aggregated_metric_name","set_aggregated_period"],"insert":["prepare_data","insert_data_pre_hourly"]},"aggregated_metric_name":"swiftlm.diskusage.val.avail_agg","aggregation_period":"hourly","aggregation_group_by_list": ["host", "metric_id", "mount"],"usage_fetch_operation": "avg","filter_by_list": [],"setter_rollup_group_by_list":[],"setter_rollup_operation": "sum","dimension_list":["aggregation_period","host","project_id"],"pre_hourly_operation":"avg","pre_hourly_group_by_list":["default"]},"metric_group":"swift_avail_all","metric_id":"swift_avail_all"}
{"aggregation_params_map":{"aggregation_pipeline":{"source":"streaming","usage":"fetch_quantity","setters":["rollup_quantity","set_aggregated_metric_name","set_aggregated_period"],"insert":["prepare_data","insert_data_pre_hourly"]},"aggregated_metric_name":"swiftlm.diskusage.val.avail_agg","aggregation_period":"hourly","aggregation_group_by_list": ["host", "metric_id", "mount"],"usage_fetch_operation": "avg","filter_by_list": [],"setter_rollup_group_by_list":["host"],"setter_rollup_operation": "sum","dimension_list":["aggregation_period","host","project_id"],"pre_hourly_operation":"avg","pre_hourly_group_by_list":["default"]},"metric_group":"swift_avail_host","metric_id":"swift_avail_host"}
{"aggregation_params_map":{"aggregation_pipeline":{"source":"streaming","usage":"fetch_quantity","setters":["rollup_quantity","set_aggregated_metric_name","set_aggregated_period"],"insert":["prepare_data","insert_data_pre_hourly"]},"aggregated_metric_name":"storage.objects.size_agg","aggregation_period":"hourly","aggregation_group_by_list": ["metric_id", "tenant_id"],"usage_fetch_operation": "avg","filter_by_list": [],"setter_rollup_group_by_list":[],"setter_rollup_operation": "sum","dimension_list":["aggregation_period","host","project_id"],"pre_hourly_operation":"avg","pre_hourly_group_by_list":["default"]},"metric_group":"storage_objects_size_all","metric_id":"storage_objects_size_all"}

View File

@ -95,6 +95,10 @@ class TestDataDrivenSpecsRepo(SparkContextTest):
metric_id='swift_usage_rate',
expected_agg_metric_name='swiftlm.diskusage.rate_agg',
transform_specs_dataframe=transform_specs_data_frame)
self.check_metric(
metric_id='storage_objects_size_all',
expected_agg_metric_name='storage.objects.size_agg',
transform_specs_dataframe=transform_specs_data_frame)
def check_metric(self, metric_id=None, expected_agg_metric_name=None,
transform_specs_dataframe=None):
@ -104,9 +108,9 @@ class TestDataDrivenSpecsRepo(SparkContextTest):
"metric_id"]
).where(
transform_specs_dataframe.metric_id == metric_id)
agg_params_json = transform_specs_data_frame.select(
"aggregation_params_map.aggregated_metric_name").collect()[0].\
asDict()
agg_params_json = (transform_specs_data_frame.select(
"aggregation_params_map.aggregated_metric_name").collect()[0]
.asDict())
self.assertEqual(expected_agg_metric_name,
agg_params_json["aggregated_metric_name"])
@ -125,7 +129,8 @@ class TestDataDrivenSpecsRepo(SparkContextTest):
u'nova.vm.disk.total_allocated_gb',
u'vm.disk.allocation', u'vm.cpu.utilization_perc',
u'swiftlm.diskusage.host.val.size',
u'swiftlm.diskusage.host.val.avail']),
u'swiftlm.diskusage.host.val.avail',
u'storage.objects.size']),
Counter([row.event_type for row in
pre_transform_specs_data_frame.collect()]))
@ -469,6 +474,34 @@ class TestDataDrivenSpecsRepo(SparkContextTest):
expected_value='host_metrics'
)
# storage.objects.size
event_type = 'storage.objects.size'
storage_objects_size_all_row = self.get_row_for_event_type(
event_type=event_type,
pre_transform_specs_data_frame=pre_transform_specs_data_frame)
self.check_list_field_for_row(
row=storage_objects_size_all_row,
field_name='metric_id_list',
expected_list=['storage_objects_size_all']
)
self.check_list_field_for_row(
row=storage_objects_size_all_row,
field_name='required_raw_fields_list',
expected_list=['creation_time', 'project_id'],
)
self.check_dict_field_for_row(
row=storage_objects_size_all_row,
field_name='event_processing_params',
expected_dict={
"set_default_zone_to": "1",
"set_default_geolocation_to": "1",
"set_default_region_to": "W"})
self.check_value_field_for_row(
row=storage_objects_size_all_row,
field_name='service_id',
expected_value='host_metrics'
)
def get_row_for_event_type(self,
event_type=None,
pre_transform_specs_data_frame=None):
@ -520,20 +553,21 @@ class TestMySQLDataDrivenSpecsRepo(TestDataDrivenSpecsRepo):
def test_transform_specs_data_frame(self):
db_transform_specs_data_frame = \
db_transform_specs_data_frame = (
self.data_driven_specs_repo.get_data_driven_specs(
sql_context=self.sql_context,
data_driven_spec_type=DataDrivenSpecsRepo.transform_specs_type)
data_driven_spec_type=DataDrivenSpecsRepo.
transform_specs_type))
self.check_transform_specs_data_frame(db_transform_specs_data_frame)
def test_pre_transform_specs_data_frame(self):
db_pre_transform_specs_data_frame = \
db_pre_transform_specs_data_frame = (
self.data_driven_specs_repo.get_data_driven_specs(
sql_context=self.sql_context,
data_driven_spec_type=DataDrivenSpecsRepo.
pre_transform_specs_type)
pre_transform_specs_type))
self.check_pre_transform_specs_data_frame(
db_pre_transform_specs_data_frame)
@ -550,20 +584,21 @@ class TestJSONDataDrivenSpecsRepo(TestDataDrivenSpecsRepo):
def test_transform_specs_data_frame(self):
json_transform_specs_data_frame = \
json_transform_specs_data_frame = (
self.data_driven_specs_repo.get_data_driven_specs(
sql_context=self.sql_context,
data_driven_spec_type=DataDrivenSpecsRepo.transform_specs_type)
data_driven_spec_type=DataDrivenSpecsRepo
.transform_specs_type))
self.check_transform_specs_data_frame(json_transform_specs_data_frame)
def test_pre_transform_specs_data_frame(self):
json_pre_transform_specs_data_frame = \
json_pre_transform_specs_data_frame = (
self.data_driven_specs_repo.get_data_driven_specs(
sql_context=self.sql_context,
data_driven_spec_type=DataDrivenSpecsRepo.
pre_transform_specs_type)
pre_transform_specs_type))
self.check_pre_transform_specs_data_frame(
json_pre_transform_specs_data_frame)

View File

@ -84,10 +84,10 @@ class SparkTest(SparkContextTest):
insert_manager):
usage_manager.return_value = MockComponentManager.get_usage_cmpt_mgr()
setter_manager.return_value = \
MockComponentManager.get_setter_cmpt_mgr()
insert_manager.return_value = \
MockComponentManager.get_insert_cmpt_mgr()
setter_manager.return_value = (MockComponentManager
.get_setter_cmpt_mgr())
insert_manager.return_value = (MockComponentManager
.get_insert_cmpt_mgr())
# Create an emulated set of Kafka messages (these were gathered
# by extracting Monasca messages from the Metrics queue).
@ -114,7 +114,7 @@ class SparkTest(SparkContextTest):
result = simple_count_transform(rdd_monasca_with_offsets)
# Verify it worked
self.assertEqual(result, 386)
self.assertEqual(result, 391)
# Call the primary method in mon_metrics_kafka
MonMetricsKafkaProcessor.rdd_to_recordstore(
@ -1269,6 +1269,47 @@ class SparkTest(SparkContextTest):
.get('metric').get('value_meta')
.get('lastrecord_timestamp_string'))
# Verify storage.objects.size_agg metrics
storage_objects_size_agg_metric = [
value for value in metrics
if value.get('metric').get('name') ==
'storage.objects.size_agg'][0]
self.assertTrue(storage_objects_size_agg_metric is not None)
self.assertEqual(16666.5,
storage_objects_size_agg_metric
.get('metric').get('value'))
self.assertEqual('useast',
storage_objects_size_agg_metric
.get('meta').get('region'))
self.assertEqual(cfg.CONF.messaging.publish_kafka_tenant_id,
storage_objects_size_agg_metric
.get('meta').get('tenantId'))
self.assertEqual('all',
storage_objects_size_agg_metric
.get('metric').get('dimensions').get('host'))
self.assertEqual('all',
storage_objects_size_agg_metric.get('metric')
.get('dimensions').get('project_id'))
self.assertEqual('hourly',
storage_objects_size_agg_metric
.get('metric').get('dimensions')
.get('aggregation_period'))
self.assertEqual(5.0,
storage_objects_size_agg_metric
.get('metric').get('value_meta').get('record_count'))
self.assertEqual('2016-08-10 21:04:12',
storage_objects_size_agg_metric
.get('metric').get('value_meta')
.get('firstrecord_timestamp_string'))
self.assertEqual('2016-08-10 21:04:12',
storage_objects_size_agg_metric
.get('metric').get('value_meta')
.get('lastrecord_timestamp_string'))
def simple_count_transform(rdd):
return rdd.count()

View File

@ -384,3 +384,8 @@
('<message id>','{"metric":{"name":"nova.vm.mem.total_allocated_mb","dimensions":{"cluster":"compute","hostname":"padawan-ccp-comp0005-mgmt","component":"vm","service":"compute","cloud_name":"padawan","control_plane":"ccp"},"timestamp":1453308000040,"value":3072.0},"meta":{"tenantId":"tenant_id of metric writer","region":"unset"},"creation_time":1469833554}')
('<message id>','{"metric":{"name":"nova.vm.mem.total_allocated_mb","dimensions":{"cluster":"compute","hostname":"padawan-ccp-comp0002-mgmt","component":"vm","service":"compute","cloud_name":"padawan","control_plane":"ccp"},"timestamp":1453308006000,"value":3072.0},"meta":{"tenantId":"tenant_id of metric writer","region":"unset"},"creation_time":1469833554}')
('<message id>','{"metric":{"name":"nova.vm.mem.total_allocated_mb","dimensions":{"cluster":"compute","hostname":"padawan-ccp-comp0002-mgmt","component":"vm","service":"compute","cloud_name":"padawan","control_plane":"ccp"},"timestamp":1453308046000,"value":2048.0},"meta":{"tenantId":"tenant_id of metric writer","region":"unset"},"creation_time":1469833554}')
('<message id>','{"metric":{"name":"storage.objects.size","dimensions":{"cluster":"mtrmon","unit":"B","user_id":"None","project_id":"0d2f9e2c395a4d5c9aac5edbb149014b","cloud_name":"mid-size","datasource":"ceilometer","resource_id":"0d2f9e2c395a4d5c9aac5edbb149014b","source":"openstack","region":"None","type":"gauge","control_plane":"control-plane-1"},"timestamp":1470863052575,"value":1111.1},"meta":{"tenantId":"tenant_id of metric writer","region":"unset"},"creation_time":1470863057}')
('<message id>','{"metric":{"name":"storage.objects.size","dimensions":{"cluster":"mtrmon","unit":"B","user_id":"None","project_id":"48792a0c749f42a9be06c9ab9d1d1b0c","cloud_name":"mid-size","datasource":"ceilometer","resource_id":"48792a0c749f42a9be06c9ab9d1d1b0c","source":"openstack","region":"None","type":"gauge","control_plane":"control-plane-1"},"timestamp":1470863052575,"value":2222.2},"meta":{"tenantId":"tenant_id of metric writer","region":"unset"},"creation_time":1470863057}')
('<message id>','{"metric":{"name":"storage.objects.size","dimensions":{"cluster":"mtrmon","unit":"B","user_id":"None","project_id":"6dc15cb0a9d143859dd70fe9ca32218a","cloud_name":"mid-size","datasource":"ceilometer","resource_id":"6dc15cb0a9d143859dd70fe9ca32218a","source":"openstack","region":"None","type":"gauge","control_plane":"control-plane-1"},"timestamp":1470863052576,"value":3333.3},"meta":{"tenantId":"tenant_id of metric writer","region":"unset"},"creation_time":1470863057}')
('<message id>','{"metric":{"name":"storage.objects.size","dimensions":{"cluster":"mtrmon","unit":"B","user_id":"None","project_id":"942138d729e240328cc24db012c523ed","cloud_name":"mid-size","datasource":"ceilometer","resource_id":"942138d729e240328cc24db012c523ed","source":"openstack","region":"None","type":"gauge","control_plane":"control-plane-1"},"timestamp":1470863052576,"value":4444.4},"meta":{"tenantId":"tenant_id of metric writer","region":"unset"},"creation_time":1470863057}')
('<message id>','{"metric":{"name":"storage.objects.size","dimensions":{"cluster":"mtrmon","unit":"B","user_id":"None","project_id":"bbe076c7fe124666af13f55c35a631e0","cloud_name":"mid-size","datasource":"ceilometer","resource_id":"bbe076c7fe124666af13f55c35a631e0","source":"openstack","region":"None","type":"gauge","control_plane":"control-plane-1"},"timestamp":1470863052577,"value":5555.5},"meta":{"tenantId":"tenant_id of metric writer","region":"unset"},"creation_time":1470863057}')