Add python multiple metrics for measurement and statistics

Adjusted java influx code to override merge with group_by

Change-Id: I34ca2ae516e8945d92723a2165bef34e59d5053c
This commit is contained in:
Ryan Brandt 2016-05-12 13:49:37 -06:00
parent 3431072de7
commit 5c4e209a81
10 changed files with 282 additions and 149 deletions

View File

@ -91,38 +91,32 @@ public class InfluxV9MeasurementRepo implements MeasurementRepo {
Boolean mergeMetricsFlag, String groupBy) throws Exception {
String q;
if (Boolean.TRUE.equals(mergeMetricsFlag)) {
String groupByStr = "";
if ("*".equals(groupBy)) {
// The time column is automatically included in the results before all other columns.
q = String.format("select value, value_meta %1$s "
+ "where %2$s %3$s %4$s %5$s %6$s",
this.influxV9Utils.namePart(name, true),
this.influxV9Utils.privateTenantIdPart(tenantId),
this.influxV9Utils.privateRegionPart(this.region),
this.influxV9Utils.startTimePart(startTime),
this.influxV9Utils.dimPart(dimensions),
this.influxV9Utils.endTimePart(endTime));
groupByStr = " group by * ";
} else {
if (Boolean.FALSE.equals(mergeMetricsFlag)) {
if (!this.influxV9MetricDefinitionRepo.isAtMostOneSeries(tenantId, name, dimensions)) {
throw new MultipleMetricsException(name, dimensions);
}
if (!"*".equals(groupBy) &&
!this.influxV9MetricDefinitionRepo.isAtMostOneSeries(tenantId, name, dimensions)) {
throw new MultipleMetricsException(name, dimensions);
groupByStr = this.influxV9Utils.groupByPart();
}
// The time column is automatically included in the results before all other columns.
q = String.format("select value, value_meta %1$s "
+ "where %2$s %3$s %4$s %5$s %6$s %7$s", //slimit 1
this.influxV9Utils.namePart(name, true),
this.influxV9Utils.privateTenantIdPart(tenantId),
this.influxV9Utils.privateRegionPart(this.region),
this.influxV9Utils.startTimePart(startTime),
this.influxV9Utils.dimPart(dimensions),
this.influxV9Utils.endTimePart(endTime),
this.influxV9Utils.groupByPart());
}
// The time column is automatically included in the results before all other columns.
q = String.format("select value, value_meta %1$s "
+ "where %2$s %3$s %4$s %5$s %6$s %7$s",
this.influxV9Utils.namePart(name, true),
this.influxV9Utils.privateTenantIdPart(tenantId),
this.influxV9Utils.privateRegionPart(this.region),
this.influxV9Utils.startTimePart(startTime),
this.influxV9Utils.dimPart(dimensions),
this.influxV9Utils.endTimePart(endTime),
groupByStr);
logger.debug("Measurements query: {}", q);
return q;

View File

@ -116,24 +116,22 @@ public class InfluxV9StatisticRepo implements StatisticRepo {
String q;
if (Boolean.TRUE.equals(mergeMetricsFlag)) {
if ("*".equals(groupBy) ) {
q = String.format("select %1$s %2$s "
+ "where %3$s %4$s %5$s %6$s %7$s %8$s %9$s %10$s",
funcPart(statistics),
this.influxV9Utils.namePart(name, true),
this.influxV9Utils.privateTenantIdPart(tenantId),
this.influxV9Utils.privateRegionPart(this.region),
this.influxV9Utils.startTimePart(startTime),
this.influxV9Utils.dimPart(dimensions),
this.influxV9Utils.endTimePart(endTime),
this.influxV9Utils.timeOffsetPart(offsetTimePart),
this.influxV9Utils.periodPart(period),
this.influxV9Utils.limitPart(limit));
+ "where %3$s %4$s %5$s %6$s %7$s %8$s",
funcPart(statistics),
this.influxV9Utils.namePart(name, true),
this.influxV9Utils.privateTenantIdPart(tenantId),
this.influxV9Utils.privateRegionPart(this.region),
this.influxV9Utils.startTimePart(startTime),
this.influxV9Utils.dimPart(dimensions),
this.influxV9Utils.endTimePart(endTime),
this.influxV9Utils.periodPartWithGroupBy(period));
} else {
if (!"*".equals(groupBy) &&
if (Boolean.FALSE.equals(mergeMetricsFlag) &&
!this.influxV9MetricDefinitionRepo.isAtMostOneSeries(tenantId, name, dimensions)) {
throw new MultipleMetricsException(name, dimensions);
@ -141,15 +139,18 @@ public class InfluxV9StatisticRepo implements StatisticRepo {
}
q = String.format("select %1$s %2$s "
+ "where %3$s %4$s %5$s %6$s %7$s %8$s",
funcPart(statistics),
this.influxV9Utils.namePart(name, true),
this.influxV9Utils.privateTenantIdPart(tenantId),
this.influxV9Utils.privateRegionPart(this.region),
this.influxV9Utils.startTimePart(startTime),
this.influxV9Utils.dimPart(dimensions),
this.influxV9Utils.endTimePart(endTime),
this.influxV9Utils.periodPartWithGroupBy(period));
+ "where %3$s %4$s %5$s %6$s %7$s %8$s %9$s %10$s",
funcPart(statistics),
this.influxV9Utils.namePart(name, true),
this.influxV9Utils.privateTenantIdPart(tenantId),
this.influxV9Utils.privateRegionPart(this.region),
this.influxV9Utils.startTimePart(startTime),
this.influxV9Utils.dimPart(dimensions),
this.influxV9Utils.endTimePart(endTime),
this.influxV9Utils.timeOffsetPart(offsetTimePart),
this.influxV9Utils.periodPart(period, mergeMetricsFlag),
this.influxV9Utils.limitPart(limit));
}
logger.debug("Statistics query: {}", q);

View File

@ -259,10 +259,12 @@ public class InfluxV9Utils {
: " group by time(300s), *";
}
public String periodPart(int period) {
return period > 0 ? String.format(" group by time(%1$ds)", period)
public String periodPart(int period, Boolean mergeMetricsFlag) {
String periodStr = period > 0 ? String.format(" group by time(%1$ds)", period)
: " group by time(300s)";
periodStr += mergeMetricsFlag ? "" : ", *";
return periodStr;
}
Map<String, String> filterPrivateTags(Map<String, String> tagMap) {

View File

@ -1,6 +1,5 @@
# -*- coding: utf-8 -*-
# Copyright 2014 Hewlett-Packard
# (C) Copyright 2015,2016 Hewlett Packard Enterprise Development LP
# (C) Copyright 2014-2016 Hewlett Packard Enterprise Development LP
# Copyright 2015 Cray Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
@ -61,21 +60,27 @@ class MetricsRepository(metrics_repository.AbstractMetricsRepository):
def _build_select_measurement_query(self, dimensions, name, tenant_id,
region, start_timestamp, end_timestamp,
offset, limit):
offset, group_by, limit):
from_clause = self._build_from_clause(dimensions, name, tenant_id,
region, start_timestamp,
end_timestamp)
offset_clause = self._build_offset_clause(offset, limit)
offset_clause = self._build_offset_clause(offset)
query = 'select value, value_meta ' + from_clause + offset_clause
group_by_clause = self._build_group_by_clause(group_by)
limit_clause = self._build_limit_clause(limit)
query = 'select value, value_meta '\
+ from_clause + offset_clause\
+ group_by_clause + limit_clause
return query
def _build_statistics_query(self, dimensions, name, tenant_id,
region, start_timestamp, end_timestamp,
statistics, period, offset, limit):
statistics, period, offset, group_by, limit):
from_clause = self._build_from_clause(dimensions, name, tenant_id,
region, start_timestamp,
@ -104,9 +109,9 @@ class MetricsRepository(metrics_repository.AbstractMetricsRepository):
query = 'select ' + statistic_string + ' ' + from_clause
query += " group by time(" + period + "s)"
query += self._build_group_by_clause(group_by, period)
limit_clause = " limit {}".format(str(limit + 1))
limit_clause = self._build_limit_clause(limit)
query += limit_clause
@ -316,7 +321,7 @@ class MetricsRepository(metrics_repository.AbstractMetricsRepository):
def measurement_list(self, tenant_id, region, name, dimensions,
start_timestamp, end_timestamp, offset,
limit, merge_metrics_flag):
limit, merge_metrics_flag, group_by):
json_measurement_list = []
@ -326,9 +331,10 @@ class MetricsRepository(metrics_repository.AbstractMetricsRepository):
region,
start_timestamp,
end_timestamp,
offset, limit)
offset, group_by,
limit)
if not merge_metrics_flag:
if not group_by and not merge_metrics_flag:
dimensions = self._get_dimensions(tenant_id, region, name, dimensions)
query += " slimit 1"
@ -352,11 +358,16 @@ class MetricsRepository(metrics_repository.AbstractMetricsRepository):
measurement = {u'name': serie['name'],
u'id': measurements_list[-1][0],
u'dimensions': dimensions,
u'columns': [u'timestamp', u'value',
u'value_meta'],
u'measurements': measurements_list}
if not group_by:
measurement[u'dimensions'] = dimensions
else:
measurement[u'dimensions'] = {key: value for key, value in serie['tags'].iteritems()
if not key.startswith('_')}
json_measurement_list.append(measurement)
return json_measurement_list
@ -407,20 +418,19 @@ class MetricsRepository(metrics_repository.AbstractMetricsRepository):
raise exceptions.RepositoryException(ex)
def metrics_statistics(self, tenant_id, region, name, dimensions,
start_timestamp,
end_timestamp, statistics, period, offset, limit,
merge_metrics_flag):
start_timestamp, end_timestamp, statistics,
period, offset, limit, merge_metrics_flag,
group_by):
json_statistics_list = []
try:
query = self._build_statistics_query(dimensions, name, tenant_id,
region,
start_timestamp,
region, start_timestamp,
end_timestamp, statistics,
period, offset, limit)
period, offset, group_by, limit)
if not merge_metrics_flag:
if not group_by and not merge_metrics_flag:
dimensions = self._get_dimensions(tenant_id, region, name, dimensions)
query += " slimit 1"
@ -446,10 +456,15 @@ class MetricsRepository(metrics_repository.AbstractMetricsRepository):
statistic = {u'name': serie['name'],
u'id': stats_list[-1][0],
u'dimensions': dimensions,
u'columns': columns,
u'statistics': stats_list}
if not group_by:
statistic[u'dimensions'] = dimensions
else:
statistic[u'dimensions'] = {key: value for key, value in serie['tags'].iteritems()
if not key.startswith('_')}
json_statistics_list.append(statistic)
return json_statistics_list
@ -485,18 +500,31 @@ class MetricsRepository(metrics_repository.AbstractMetricsRepository):
raise exceptions.RepositoryException(ex)
def _build_offset_clause(self, offset, limit):
def _build_offset_clause(self, offset):
if offset:
offset_clause = (
" and time > '{}' limit {}".format(offset, str(limit + 1)))
offset_clause = " and time > '{}'".format(offset)
else:
offset_clause = " limit {}".format(str(limit + 1))
offset_clause = ""
return offset_clause
def _build_group_by_clause(self, group_by, period=None):
if group_by or period:
items = []
if period:
items.append("time(" + str(period) + "s)")
if group_by:
items.append('*')
clause = " group by " + ','.join(items)
else:
clause = ""
return clause
def _build_limit_clause(self, limit):
return " limit {} ".format(str(limit + 1))
def _has_measurements(self, tenant_id, region, name, dimensions,
start_timestamp, end_timestamp):
@ -521,7 +549,8 @@ class MetricsRepository(metrics_repository.AbstractMetricsRepository):
end_timestamp,
0,
1,
False)
False,
None)
if len(measurements) == 0:
has_measurements = False
@ -571,9 +600,11 @@ class MetricsRepository(metrics_repository.AbstractMetricsRepository):
time_clause += " and time <= " + str(int(end_timestamp *
1000000)) + "u "
offset_clause = self._build_offset_clause(offset, limit)
offset_clause = self._build_offset_clause(offset)
query += where_clause + time_clause + offset_clause
limit_clause = self._build_limit_clause(limit)
query += where_clause + time_clause + offset_clause + limit_clause
result = self.influxdb_client.query(query)

View File

@ -1,4 +1,4 @@
# Copyright 2014 Hewlett-Packard
# (C) Copyright 2014,2016 Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
@ -41,13 +41,15 @@ class AbstractMetricsRepository(object):
@abc.abstractmethod
def measurement_list(self, tenant_id, region, name, dimensions,
start_timestamp, end_timestamp, offset, limit,
merge_metrics_flag):
merge_metrics_flag,
group_by):
pass
@abc.abstractmethod
def metrics_statistics(self, tenant_id, region, name, dimensions,
start_timestamp, end_timestamp, statistics,
period, offset, limit, merge_metrics_flag):
period, offset, limit, merge_metrics_flag,
group_by):
pass
@abc.abstractmethod

View File

@ -63,7 +63,8 @@ class TestRepoMetricsInfluxDB(unittest.TestCase):
end_timestamp=2,
offset=None,
limit=1,
merge_metrics_flag=True)
merge_metrics_flag=True,
group_by=None)
self.assertEqual(len(result), 1)
self.assertIsNone(result[0]['dimensions'])

View File

@ -1,5 +1,5 @@
# Copyright 2015 Cray Inc. All Rights Reserved.
# Copyright 2014,2016 Hewlett Packard Enterprise Development LP
# (C) Copyright 2014,2016 Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
@ -463,7 +463,62 @@ def paginate_alarming(resource, uri, limit):
return resource
def paginate_measurement(measurement, uri, limit):
def paginate_dimension_values(dimvals, uri, offset, limit):
parsed_uri = urlparse.urlparse(uri)
self_link = build_base_uri(parsed_uri)
old_query_params = _get_old_query_params(parsed_uri)
if old_query_params:
self_link += '?' + '&'.join(old_query_params)
if (dimvals and dimvals[u'values']):
have_more, truncated_values = _truncate_dimension_values(dimvals[u'values'],
limit,
offset)
links = [{u'rel': u'self', u'href': self_link.decode('utf8')}]
if have_more:
new_offset = truncated_values[limit - 1]
next_link = build_base_uri(parsed_uri)
new_query_params = [u'offset' + '=' + urlparse.quote(
new_offset.encode('utf8'), safe='')]
_get_old_query_params_except_offset(new_query_params, parsed_uri)
if new_query_params:
next_link += '?' + '&'.join(new_query_params)
links.append({u'rel': u'next', u'href': next_link.decode('utf8')})
truncated_dimvals = {u'id': dimvals[u'id'],
u'dimension_name': dimvals[u'dimension_name'],
u'values': truncated_values}
#
# Only return metric name if one was provided
#
if u'metric_name' in dimvals:
truncated_dimvals[u'metric_name'] = dimvals[u'metric_name']
resource = {u'links': links,
u'elements': [truncated_dimvals]}
else:
resource = {u'links': ([{u'rel': u'self',
u'href': self_link.decode('utf8')}]),
u'elements': [dimvals]}
return resource
def _truncate_dimension_values(values, limit, offset):
if offset and offset in values:
next_value_pos = values.index(offset) + 1
values = values[next_value_pos:]
have_more = len(values) > limit
return have_more, values[:limit]
def paginate_measurements(measurements, uri, limit):
parsed_uri = urlparse.urlparse(uri)
self_link = build_base_uri(parsed_uri)
@ -473,41 +528,48 @@ def paginate_measurement(measurement, uri, limit):
if old_query_params:
self_link += '?' + '&'.join(old_query_params)
if (measurement
and measurement[0]
and measurement[0]['measurements']
and len(measurement[0]['measurements']) > limit):
if measurements:
measurement_elements = []
resource = {u'links': [{u'rel': u'self',
u'href': self_link.decode('utf8')},
]}
for measurement in measurements:
if len(measurement['measurements']) >= limit:
new_offset = measurement[0]['measurements'][limit - 1][0]
new_offset = measurement['measurements'][limit - 1][0]
next_link = build_base_uri(parsed_uri)
next_link = build_base_uri(parsed_uri)
new_query_params = [u'offset' + '=' + urlparse.quote(
new_offset.encode('utf8'), safe='')]
new_query_params = [u'offset' + '=' + urlparse.quote(
new_offset.encode('utf8'), safe='')]
_get_old_query_params_except_offset(new_query_params, parsed_uri)
_get_old_query_params_except_offset(new_query_params, parsed_uri)
if new_query_params:
next_link += '?' + '&'.join(new_query_params)
if new_query_params:
next_link += '?' + '&'.join(new_query_params)
truncated_measurement = [{u'dimensions': measurement[0]['dimensions'],
u'measurements': (measurement[0]
['measurements'][:limit]),
u'name': measurement[0]['name'],
u'columns': measurement[0]['columns'],
u'id': new_offset}]
resource[u'links'].append({u'rel': u'next',
u'href': next_link.decode('utf8')})
resource = {u'links': ([{u'rel': u'self',
u'href': self_link.decode('utf8')},
{u'rel': u'next',
u'href': next_link.decode('utf8')}]),
u'elements': truncated_measurement}
truncated_measurement = {u'dimensions': measurement['dimensions'],
u'measurements': (measurement
['measurements'][:limit]),
u'name': measurement['name'],
u'columns': measurement['columns'],
u'id': new_offset}
measurement_elements.append(truncated_measurement)
break
else:
limit -= len(measurement['measurements'])
measurement_elements.append(measurement)
resource[u'elements'] = measurement_elements
else:
resource = {u'links': ([{u'rel': u'self',
u'href': self_link.decode('utf8')}]),
u'elements': measurement}
u'elements': []}
return resource
@ -541,7 +603,7 @@ def _get_old_query_params_except_offset(new_query_params, parsed_uri):
'utf8'), safe=''))
def paginate_statistics(statistic, uri, limit):
def paginate_statistics(statistics, uri, limit):
parsed_uri = urlparse.urlparse(uri)
self_link = build_base_uri(parsed_uri)
@ -551,41 +613,49 @@ def paginate_statistics(statistic, uri, limit):
if old_query_params:
self_link += '?' + '&'.join(old_query_params)
if (statistic
and statistic[0]
and statistic[0]['statistics']
and len(statistic[0]['statistics']) > limit):
if statistics:
statistic_elements = []
resource = {u'links': [{u'rel': u'self',
u'href': self_link.decode('utf8')}]}
new_offset = (
statistic[0]['statistics'][limit - 1][0])
for statistic in statistics:
if len(statistic['statistics']) >= limit:
next_link = build_base_uri(parsed_uri)
new_offset = (
statistic['statistics'][limit - 1][0])
new_query_params = [u'offset' + '=' + urlparse.quote(
new_offset.encode('utf8'), safe='')]
next_link = build_base_uri(parsed_uri)
_get_old_query_params_except_offset(new_query_params, parsed_uri)
new_query_params = [u'offset' + '=' + urlparse.quote(
new_offset.encode('utf8'), safe='')]
if new_query_params:
next_link += '?' + '&'.join(new_query_params)
_get_old_query_params_except_offset(new_query_params, parsed_uri)
truncated_statistic = [{u'dimensions': statistic[0]['dimensions'],
u'statistics': (statistic[0]['statistics'][:limit]),
u'name': statistic[0]['name'],
u'columns': statistic[0]['columns'],
u'id': new_offset}]
if new_query_params:
next_link += '?' + '&'.join(new_query_params)
resource = {u'links': ([{u'rel': u'self',
u'href': self_link.decode('utf8')},
{u'rel': u'next',
u'href': next_link.decode('utf8')}]),
u'elements': truncated_statistic}
resource[u'links'].append({u'rel': u'next',
u'href': next_link.decode('utf8')})
truncated_statistic = {u'dimensions': statistic['dimensions'],
u'statistics': (statistic['statistics'][:limit]),
u'name': statistic['name'],
u'columns': statistic['columns'],
u'id': new_offset}
statistic_elements.append(truncated_statistic)
break
else:
limit -= len(statistic['statistics'])
statistic_elements.append(statistic)
resource[u'elements'] = statistic_elements
else:
resource = {u'links': ([{u'rel': u'self',
u'href': self_link.decode('utf8')}]),
u'elements': statistic}
u'elements': []}
return resource

View File

@ -182,11 +182,13 @@ class MetricsMeasurements(metrics_api_v2.MetricsMeasurementsV2API):
offset = helpers.get_query_param(req, 'offset')
limit = helpers.get_limit(req)
merge_metrics_flag = get_merge_metrics_flag(req)
group_by = helpers.get_query_param(req, "group_by")
result = self._measurement_list(tenant_id, name, dimensions,
start_timestamp, end_timestamp,
req.uri, offset,
limit, merge_metrics_flag)
limit, merge_metrics_flag,
group_by)
res.body = helpers.dumpit_utf8(result)
res.status = falcon.HTTP_200
@ -194,7 +196,7 @@ class MetricsMeasurements(metrics_api_v2.MetricsMeasurementsV2API):
@resource.resource_try_catch_block
def _measurement_list(self, tenant_id, name, dimensions, start_timestamp,
end_timestamp, req_uri, offset,
limit, merge_metrics_flag):
limit, merge_metrics_flag, group_by):
result = self._metrics_repo.measurement_list(tenant_id,
self._region,
@ -204,9 +206,10 @@ class MetricsMeasurements(metrics_api_v2.MetricsMeasurementsV2API):
end_timestamp,
offset,
limit,
merge_metrics_flag)
merge_metrics_flag,
group_by)
return helpers.paginate_measurement(result, req_uri, limit)
return helpers.paginate_measurements(result, req_uri, limit)
class MetricsStatistics(metrics_api_v2.MetricsStatisticsV2API):
@ -240,11 +243,13 @@ class MetricsStatistics(metrics_api_v2.MetricsStatisticsV2API):
offset = helpers.get_query_param(req, 'offset')
limit = helpers.get_limit(req)
merge_metrics_flag = get_merge_metrics_flag(req)
group_by = helpers.get_query_param(req, "group_by")
result = self._metric_statistics(tenant_id, name, dimensions,
start_timestamp, end_timestamp,
statistics, period, req.uri,
offset, limit, merge_metrics_flag)
offset, limit, merge_metrics_flag,
group_by)
res.body = helpers.dumpit_utf8(result)
res.status = falcon.HTTP_200
@ -252,7 +257,7 @@ class MetricsStatistics(metrics_api_v2.MetricsStatisticsV2API):
@resource.resource_try_catch_block
def _metric_statistics(self, tenant_id, name, dimensions, start_timestamp,
end_timestamp, statistics, period, req_uri,
offset, limit, merge_metrics_flag):
offset, limit, merge_metrics_flag, group_by):
result = self._metrics_repo.metrics_statistics(tenant_id,
self._region,
@ -263,7 +268,8 @@ class MetricsStatistics(metrics_api_v2.MetricsStatisticsV2API):
statistics, period,
offset,
limit,
merge_metrics_flag)
merge_metrics_flag,
group_by)
return helpers.paginate_statistics(result, req_uri, limit)

View File

@ -1,4 +1,4 @@
# (C) Copyright 2015-2016 Hewlett Packard Enterprise Development Company LP
# (C) Copyright 2015-2016 Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
@ -258,6 +258,33 @@ class TestMeasurements(base.BaseMonascaTest):
query_parms)
self.assertEqual(200, resp.status)
@test.attr(type="gate")
def test_list_measurements_with_group_by(self):
query_parms = '?name=' + str(self._names_list[1]) + \
'&group_by=*' + \
'&start_time=' + str(self._start_time) + \
'&end_time=' + str(self._end_time)
resp, response_body = self.monasca_client.list_measurements(
query_parms)
self.assertEqual(200, resp.status)
elements = response_body['elements']
self.assertEqual(len(elements), 4)
self._verify_list_measurements_elements(elements, None, None)
@test.attr(type="gate")
def test_list_measurements_with_group_by_and_merge(self):
query_parms = '?name=' + str(self._names_list[1]) + \
'&group_by=*' + \
'&merge_metrics=true' + \
'&start_time=' + str(self._start_time) + \
'&end_time=' + str(self._end_time)
resp, response_body = self.monasca_client.list_measurements(
query_parms)
self.assertEqual(200, resp.status)
elements = response_body['elements']
self.assertEqual(len(elements), 4)
self._verify_list_measurements_elements(elements, None, None)
@test.attr(type="gate")
@test.attr(type=['negative'])
def test_list_measurements_with_name_exceeds_max_length(self):
@ -320,8 +347,13 @@ class TestMeasurements(base.BaseMonascaTest):
def _verify_list_measurements_elements(self, elements, test_key,
test_value):
if elements:
element = elements[0]
if not elements:
error_msg = "Failed: at least one element is needed. " \
"Number of element = 0."
self.fail(error_msg)
for element in elements:
# element = elements[0]
self.assertEqual(set(element),
set(['columns', 'dimensions', 'id',
'measurements', 'name']))
@ -335,10 +367,6 @@ class TestMeasurements(base.BaseMonascaTest):
if test_key is not None and test_value is not None:
self.assertEqual(str(element['dimensions'][test_key]),
test_value)
else:
error_msg = "Failed: at least one element is needed. " \
"Number of element = 0."
self.fail(error_msg)
def _verify_list_measurements_meas_len(self, measurements, test_len):
if measurements:

View File

@ -241,8 +241,7 @@ class TestStatistics(base.BaseMonascaTest):
('start_time', str(start_time)),
('end_time', str(end_time)),
('period', 1),
('limit', limit)
]
('limit', limit)]
offset = None
while True:
num_expected_elements = limit
@ -271,7 +270,6 @@ class TestStatistics(base.BaseMonascaTest):
# Get the next set
offset = self._get_offset(response_body)
@test.attr(type="gate")
@test.attr(type=['negative'])
def test_list_statistics_with_no_merge_metrics(self):