Add support for multiple metrics in measurements and statistics resources

Precursor to work on a group_by option for measurements and statistics queries

Change-Id: I6569923687511612c1d9ec318d2c5fe25535f973
This commit is contained in:
Ryan Brandt 2016-05-10 10:02:48 -06:00
parent 88b756cf64
commit ae05d04bff
21 changed files with 562 additions and 298 deletions

View File

@ -1,5 +1,7 @@
Andreas Jaeger <aj@suse.com>
Angelo Mendonca <angelomendonca@gmail.com>
Ben Motz <bmotz@cray.com>
Brad Klein <bradley.klein@twcable.com>
Craig Bryant <craig.bryant@hp.com>
Deklan Dieterly <deklan.dieterly@hp.com>
Deklan Dieterly <deklan.dieterly@hpe.com>
@ -15,7 +17,9 @@ Joe Keen <joe.keen@hp.com>
Jonathan Halterman <jhalterman@gmail.com>
Jonathan Halterman <jhalterman@hp.com>
Kaiyan Sheng <kaiyan.sheng@hp.com>
Koji Nakazono <nakazono_0507@jp.fujitsu.com>
LiuNanke <nanke.liu@easystack.cn>
Lukasz Zajaczkowski <Lukasz.Zajaczkowski@ts.fujitsu.com>
Michael Bielinski <michael.bielinski@hp.com>
Michael James Hoppal <michael.jam.hoppal@hp.com>
Michal Zielonka <michal.zielonka@ts.fujitsu.com>

View File

@ -1059,7 +1059,7 @@ Operations for accessing measurements of metrics.
## List measurements
Get measurements for metrics.
Metrics must be fully qualified with name and dimensions so that only measurements are returned for a single metric. If the metric name and dimensions given do not resolve to a single metric, an error will be displayed asking the user to further qualify the metric with a name and additional dimensions.
If `group_by` is not specified, metrics must be fully qualified with name and dimensions so that only measurements are returned for a single metric. If the metric name and dimensions given do not resolve to a single metric, an error will be displayed asking the user to further qualify the metric with a name and additional dimensions.
If users do not wish to see measurements for a single metric, but would prefer to have measurements from multiple metrics combined, a 'merge_metrics' flag can be specified. when 'merge_metrics' is set to true (**merge_metrics=true**), all meaurements for all metrics that satisfy the query parameters will be merged into a single list of measurements.
@ -1081,6 +1081,7 @@ None.
* offset (timestamp, optional)
* limit (integer, optional)
* merge_metrics (boolean, optional) - allow multiple metrics to be combined into a single list of measurements.
* group_by (string, optional) - list of columns to group the metrics to be returned. For now, the only valid value is '*'.
#### Request Body
None.
@ -1229,7 +1230,7 @@ ___
# Statistics
Operations for calculating statistics of metrics.
Metrics must be fully qualified with name and dimensions so that only statistics are returned for a single metric. If the metric name and dimensions given do not resolve to a single metric, an error will be displayed asking the user to further qualify the metric with a name and additional dimensions.
If `group_by` is not specified, then metrics must be fully qualified with name and dimensions so that only statistics are returned for a single metric. If the metric name and dimensions given do not resolve to a single metric, an error will be displayed asking the user to further qualify the metric with a name and additional dimensions.
If users do not wish to see statistics for a single metric, but would prefer to have statistics from multiple metrics combined, a 'merge_metrics' flag can be specified. when 'merge_metrics' is set to true (**merge_metrics=true**), all statistics for all metrics that satisfy the query parameters will be merged into a single list of statistics.
@ -1256,6 +1257,7 @@ None.
* offset (timestamp, optional)
* limit (integer, optional)
* merge_metrics (boolean, optional) - allow multiple metrics to be combined into a single list of statistics.
* group_by (string, optional) - list of columns to group the metrics to be returned. For now, the only valid value is '*'.
#### Request Body
None.

View File

@ -298,14 +298,6 @@
<threadCount>4</threadCount>
</configuration>
<executions>
<execution>
<goals>
<goal>integration-test</goal>
<goal>verify</goal>
</goals>
<configuration>
</configuration>
</execution>
</executions>
</plugin>
<plugin>

View File

@ -189,6 +189,13 @@ public final class Validation {
}
}
public static void validateMetricsGroupBy(String groupBy) {
if (!Strings.isNullOrEmpty(groupBy) && !"*".equals(groupBy)) {
throw Exceptions.unprocessableEntity("Invalid group_by", "Group_by must be '*' if specified");
}
}
public static void validateLifecycleState(String lifecycleState) {
if (lifecycleState != null) {
if (lifecycleState.length() > 50) {

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
* Copyright (c) 2014, 2016 Hewlett-Packard Development Company, L.P.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
@ -28,5 +28,6 @@ public interface MeasurementRepo {
*/
List<Measurements> find(String tenantId, String name, Map<String, String> dimensions,
DateTime startTime, @Nullable DateTime endTime, @Nullable String offset,
int limit, Boolean mergeMetricsFlag) throws Exception;
int limit, Boolean mergeMetricsFlag, String groupBy)
throws Exception;
}

View File

@ -30,7 +30,9 @@ public class Measurements extends AbstractEntity {
private final String[] columns = COLUMNS;
private List<Object[]> measurements;
public Measurements() {}
public Measurements() {
measurements = new LinkedList<>();
}
public Measurements(String name, Map<String, String> dimensions, List<Object[]> measurements) {
this.name = name;

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
* Copyright (c) 2014, 2016 Hewlett-Packard Development Company, L.P.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
@ -30,6 +30,7 @@ public interface StatisticRepo {
*/
List<Statistics> find(String tenantId, String name, Map<String, String> dimensions,
DateTime startTime, @Nullable DateTime endTime, List<String> statistics,
int period, String offset, int limit, Boolean mergeMetricsFlag)
int period, String offset, int limit, Boolean mergeMetricsFlag,
String groupBy)
throws Exception;
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
* Copyright (c) 2014, 2016 Hewlett-Packard Development Company, L.P.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
@ -49,6 +49,8 @@ public class InfluxV9MeasurementRepo implements MeasurementRepo {
private final InfluxV9MetricDefinitionRepo influxV9MetricDefinitionRepo;
private final ObjectMapper objectMapper = new ObjectMapper();
@Inject
public InfluxV9MeasurementRepo(ApiConfig config,
InfluxV9RepoReader influxV9RepoReader,
@ -65,18 +67,18 @@ public class InfluxV9MeasurementRepo implements MeasurementRepo {
@Override
public List<Measurements> find(String tenantId, String name, Map<String, String> dimensions,
DateTime startTime, @Nullable DateTime endTime,
@Nullable String offset, int limit, Boolean mergeMetricsFlag)
@Nullable String offset, int limit, Boolean mergeMetricsFlag,
String groupBy)
throws Exception {
String q = buildQuery(tenantId, name, dimensions, startTime, endTime,
offset, limit, mergeMetricsFlag);
offset, limit, mergeMetricsFlag, groupBy);
String r = this.influxV9RepoReader.read(q);
Series series = this.objectMapper.readValue(r, Series.class);
List<Measurements> measurementsList = measurementsList(series);
List<Measurements> measurementsList = measurementsList(series, offset, limit);
logger.debug("Found {} metrics matching query", measurementsList.size());
@ -85,42 +87,39 @@ public class InfluxV9MeasurementRepo implements MeasurementRepo {
private String buildQuery(String tenantId, String name, Map<String, String> dimensions,
DateTime startTime, DateTime endTime, String offset, int limit,
Boolean mergeMetricsFlag) throws Exception {
Boolean mergeMetricsFlag, String groupBy) throws Exception {
String q;
if (Boolean.TRUE.equals(mergeMetricsFlag)) {
// The time column is automatically included in the results before all other columns.
q = String.format("select value, value_meta %1$s "
+ "where %2$s %3$s %4$s %5$s %6$s %7$s %8$s",
+ "where %2$s %3$s %4$s %5$s %6$s",
this.influxV9Utils.namePart(name, true),
this.influxV9Utils.privateTenantIdPart(tenantId),
this.influxV9Utils.privateRegionPart(this.region),
this.influxV9Utils.startTimePart(startTime),
this.influxV9Utils.dimPart(dimensions),
this.influxV9Utils.endTimePart(endTime),
this.influxV9Utils.timeOffsetPart(offset),
this.influxV9Utils.limitPart(limit));
this.influxV9Utils.endTimePart(endTime));
} else {
if (!this.influxV9MetricDefinitionRepo.isAtMostOneSeries(tenantId, name, dimensions)) {
if (!"*".equals(groupBy) &&
!this.influxV9MetricDefinitionRepo.isAtMostOneSeries(tenantId, name, dimensions)) {
throw new MultipleMetricsException(name, dimensions);
}
// The time column is automatically included in the results before all other columns.
q = String.format("select value, value_meta %1$s "
+ "where %2$s %3$s %4$s %5$s %6$s %7$s %8$s %9$s slimit 1",
+ "where %2$s %3$s %4$s %5$s %6$s %7$s", //slimit 1
this.influxV9Utils.namePart(name, true),
this.influxV9Utils.privateTenantIdPart(tenantId),
this.influxV9Utils.privateRegionPart(this.region),
this.influxV9Utils.startTimePart(startTime),
this.influxV9Utils.dimPart(dimensions),
this.influxV9Utils.endTimePart(endTime),
this.influxV9Utils.timeOffsetPart(offset),
this.influxV9Utils.groupByPart(),
this.influxV9Utils.limitPart(limit));
this.influxV9Utils.groupByPart());
}
logger.debug("Measurements query: {}", q);
@ -128,24 +127,55 @@ public class InfluxV9MeasurementRepo implements MeasurementRepo {
return q;
}
private List<Measurements> measurementsList(Series series) {
private List<Measurements> measurementsList(Series series, String offsetStr, int limit) {
List<Measurements> measurementsList = new LinkedList<>();
if (!series.isEmpty()) {
int offsetId = 0;
String offsetTimestamp = "1970-01-01T00:00:00.000Z";
if (offsetStr != null) {
List<String> offsets = influxV9Utils.parseMultiOffset(offsetStr);
if (offsets.size() > 1) {
offsetId = Integer.parseInt(offsets.get(0));
offsetTimestamp = offsets.get(1);
} else {
offsetId = 0;
offsetTimestamp = offsets.get(0);
}
}
int remaining_limit = limit;
int index = 0;
for (Serie serie : series.getSeries()) {
if (index < offsetId || remaining_limit <= 0) {
index++;
continue;
}
Measurements measurements =
new Measurements(serie.getName(),
influxV9Utils.filterPrivateTags(serie.getTags()));
measurements.setId(Integer.toString(index));
for (String[] values : serie.getValues()) {
if (remaining_limit <= 0) {
break;
}
final String timestamp = influxV9Utils.threeDigitMillisTimestamp(values[0]);
measurements.addMeasurement(
new Object[]{timestamp, Double.parseDouble(values[1]), getValueMeta(values)});
if (timestamp.compareTo(offsetTimestamp) > 0 || index > offsetId) {
measurements.addMeasurement(
new Object[]{timestamp, Double.parseDouble(values[1]), getValueMeta(values)});
remaining_limit--;
}
}
measurementsList.add(measurements);
if (measurements.getMeasurements().size() > 0) {
measurementsList.add(measurements);
}
index++;
}
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
* Copyright (c) 2014, 2016 Hewlett-Packard Development Company, L.P.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
@ -67,16 +67,16 @@ public class InfluxV9StatisticRepo implements StatisticRepo {
public List<Statistics> find(String tenantId, String name, Map<String, String> dimensions,
DateTime startTime, @Nullable DateTime endTime,
List<String> statistics, int period, String offset, int limit,
Boolean mergeMetricsFlag) throws Exception {
Boolean mergeMetricsFlag, String groupBy) throws Exception {
String q = buildQuery(tenantId, name, dimensions, startTime, endTime,
statistics, period, offset, limit, mergeMetricsFlag);
statistics, period, offset, limit, mergeMetricsFlag, groupBy);
String r = this.influxV9RepoReader.read(q);
Series series = this.objectMapper.readValue(r, Series.class);
List<Statistics> statisticsList = statisticslist(series);
List<Statistics> statisticsList = statisticslist(series, offset, limit);
logger.debug("Found {} metric definitions matching query", statisticsList.size());
@ -86,7 +86,8 @@ public class InfluxV9StatisticRepo implements StatisticRepo {
private String buildQuery(String tenantId, String name, Map<String, String> dimensions,
DateTime startTime, DateTime endTime, List<String> statistics,
int period, String offset, int limit, Boolean mergeMetricsFlag)
int period, String offset, int limit, Boolean mergeMetricsFlag,
String groupBy)
throws Exception {
String q;
@ -94,7 +95,7 @@ public class InfluxV9StatisticRepo implements StatisticRepo {
if (Boolean.TRUE.equals(mergeMetricsFlag)) {
q = String.format("select %1$s %2$s "
+ "where %3$s %4$s %5$s %6$s %7$s %8$s %9$s %10$s",
+ "where %3$s %4$s %5$s %6$s %7$s %8$s %9$s",
funcPart(statistics),
this.influxV9Utils.namePart(name, true),
this.influxV9Utils.privateTenantIdPart(tenantId),
@ -102,19 +103,20 @@ public class InfluxV9StatisticRepo implements StatisticRepo {
this.influxV9Utils.startTimePart(startTime),
this.influxV9Utils.dimPart(dimensions),
this.influxV9Utils.endTimePart(endTime),
this.influxV9Utils.timeOffsetPart(offset),
this.influxV9Utils.periodPart(period),
this.influxV9Utils.limitPart(limit));
} else {
if (!this.influxV9MetricDefinitionRepo.isAtMostOneSeries(tenantId, name, dimensions)) {
if (!"*".equals(groupBy) &&
!this.influxV9MetricDefinitionRepo.isAtMostOneSeries(tenantId, name, dimensions)) {
throw new MultipleMetricsException(name, dimensions);
}
q = String.format("select %1$s %2$s "
+ "where %3$s %4$s %5$s %6$s %7$s %8$s %9$s %10$s slimit 1",
+ "where %3$s %4$s %5$s %6$s %7$s %8$s",
funcPart(statistics),
this.influxV9Utils.namePart(name, true),
this.influxV9Utils.privateTenantIdPart(tenantId),
@ -122,9 +124,7 @@ public class InfluxV9StatisticRepo implements StatisticRepo {
this.influxV9Utils.startTimePart(startTime),
this.influxV9Utils.dimPart(dimensions),
this.influxV9Utils.endTimePart(endTime),
this.influxV9Utils.timeOffsetPart(offset),
this.influxV9Utils.periodPartWithGroupBy(period),
this.influxV9Utils.limitPart(limit));
this.influxV9Utils.periodPartWithGroupBy(period));
}
logger.debug("Statistics query: {}", q);
@ -132,23 +132,57 @@ public class InfluxV9StatisticRepo implements StatisticRepo {
return q;
}
private List<Statistics> statisticslist(Series series) {
private List<Statistics> statisticslist(Series series, String offsetStr, int limit) {
int offsetId = 0;
String offsetTimestamp = "1970-01-01T00:00:00.000Z";
if (offsetStr != null) {
List<String> offsets = influxV9Utils.parseMultiOffset(offsetStr);
if (offsets.size() > 1) {
offsetId = Integer.parseInt(offsets.get(0));
offsetTimestamp = offsets.get(1);
} else {
offsetId = 0;
offsetTimestamp = offsets.get(0);
}
}
List<Statistics> statisticsList = new LinkedList<>();
if (!series.isEmpty()) {
int remaining_limit = limit;
int index = 0;
for (Serie serie : series.getSeries()) {
if (index < offsetId || remaining_limit <= 0) {
index++;
continue;
}
Statistics statistics = new Statistics(serie.getName(),
this.influxV9Utils.filterPrivateTags(serie.getTags()),
Arrays.asList(translateNames(serie.getColumns())));
statistics.setId(Integer.toString(index));
for (Object[] values : serie.getValues()) {
statistics.addStatistics(buildValsList(values));
for (Object[] valueObjects : serie.getValues()) {
if (remaining_limit <= 0) {
break;
}
List<Object> values = buildValsList(valueObjects);
if (((String) values.get(0)).compareTo(offsetTimestamp) > 0 || index > offsetId) {
statistics.addStatistics(values);
remaining_limit--;
}
}
statisticsList.add(statistics);
if (statistics.getStatistics().size() > 0) {
statisticsList.add(statistics);
}
index++;
}

View File

@ -31,6 +31,10 @@ import monasca.common.util.Conversions;
public class InfluxV9Utils {
private static final Pattern sqlUnsafePattern = Pattern.compile("^.*('|;|\")+.*$");
static final String OFFSET_SEPARATOR = "_";
static final Splitter
offsetSplitter = Splitter.on(OFFSET_SEPARATOR).omitEmptyStrings().trimResults();
public InfluxV9Utils() {
}
@ -283,4 +287,8 @@ public class InfluxV9Utils {
}
return timestamp;
}
public List<String> parseMultiOffset(String offsetStr) {
return offsetSplitter.splitToList(offsetStr);
}
}

View File

@ -1,5 +1,4 @@
/*
* Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
/* Copyright (c) 2014, 2016 Hewlett-Packard Development Company, L.P.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
@ -21,12 +20,9 @@ import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
@ -52,18 +48,16 @@ public class MeasurementVerticaRepoImpl implements MeasurementRepo {
public static final DateTimeFormatter DATETIME_FORMATTER =
ISODateTimeFormat.dateTime().withZoneUTC();
public static final ByteBuffer EMPTY_DEF_ID = ByteBuffer.wrap(new byte[0]);
private static final String FIND_BY_METRIC_DEF_SQL =
"select mes.definition_dimensions_id, "
"SELECT to_hex(mes.definition_dimensions_id) as def_dims_id, "
+ "mes.time_stamp, mes.value, mes.value_meta "
+ "from MonMetrics.Measurements mes "
+ "where to_hex(mes.definition_dimensions_id) "
+ "%s " // defdim IN clause here
+ "FROM MonMetrics.Measurements mes "
+ "WHERE to_hex(mes.definition_dimensions_id) %s " // Sub select query
+ "%s " // endtime and offset here
+ "and mes.time_stamp >= :startTime "
+ "order by mes.time_stamp ASC "
+ "limit :limit";
+ "AND mes.time_stamp >= :startTime "
+ "ORDER BY %s" // sort by id if not merging
+ "mes.time_stamp ASC "
+ "LIMIT :limit";
private static final String
DEFDIM_IDS_SELECT =
@ -93,36 +87,16 @@ public class MeasurementVerticaRepoImpl implements MeasurementRepo {
@Nullable DateTime endTime,
@Nullable String offset,
int limit,
Boolean mergeMetricsFlag) throws MultipleMetricsException {
Boolean mergeMetricsFlag,
String groupBy) throws MultipleMetricsException {
try (Handle h = db.open()) {
Map<ByteBuffer, Measurements> results = new LinkedHashMap<>();
Map<String, Measurements> results = findDefIds(h, tenantId, name, dimensions);
Set<byte[]> defDimIdSet = new HashSet<>();
Set<String> defDimsIdSet = results.keySet();
String defDimSql = String.format(
DEFDIM_IDS_SELECT,
MetricQueries.buildMetricDefinitionSubSql(name, dimensions));
Query<Map<String, Object>> query = h.createQuery(defDimSql).bind("tenantId", tenantId);
if (name != null && !name.isEmpty()) {
query.bind("name", name);
}
MetricQueries.bindDimensionsToQuery(query, dimensions);
List<Map<String, Object>> rows = query.list();
for (Map<String, Object> row : rows) {
byte[] defDimId = (byte[]) row.get("id");
defDimIdSet.add(defDimId);
}
if (!Boolean.TRUE.equals(mergeMetricsFlag) && (defDimIdSet.size() > 1)) {
if (!"*".equals(groupBy) && !Boolean.TRUE.equals(mergeMetricsFlag) && (defDimsIdSet.size() > 1)) {
throw new MultipleMetricsException(name, dimensions);
}
@ -131,87 +105,204 @@ public class MeasurementVerticaRepoImpl implements MeasurementRepo {
// we won't have any measurements, let's just bail
// now.
//
if (defDimIdSet.size() == 0) {
if (defDimsIdSet.size() == 0) {
return new ArrayList<>(results.values());
}
String defDimInClause = MetricQueries.createDefDimIdInClause(defDimIdSet);
String defDimInClause = MetricQueries.createDefDimIdInClause(defDimsIdSet);
StringBuilder sb = new StringBuilder();
if (endTime != null) {
sb.append(" and time_stamp <= :endTime");
sb.append(" and mes.time_stamp <= :endTime");
}
if (offset != null && !offset.isEmpty()) {
sb.append(" and time_stamp > :offset");
if (Boolean.TRUE.equals(mergeMetricsFlag)) {
sb.append(" and mes.time_stamp > :offset_timestamp ");
} else {
sb.append(" and (TO_HEX(mes.definition_dimensions_id) > :offset_id "
+ "or (TO_HEX(mes.definition_dimensions_id) = :offset_id and mes.time_stamp > :offset_timestamp)) ");
}
}
String sql = String.format(FIND_BY_METRIC_DEF_SQL, defDimInClause, sb);
String orderById = "";
if (Boolean.FALSE.equals(mergeMetricsFlag)) {
query = h.createQuery(sql)
orderById = "mes.definition_dimensions_id,";
}
String sql = String.format(FIND_BY_METRIC_DEF_SQL, defDimInClause, sb, orderById);
Query<Map<String, Object>> query = h.createQuery(sql)
.bind("startTime", new Timestamp(startTime.getMillis()))
.bind("limit", limit + 1);
if (endTime != null) {
logger.debug("binding endtime: {}", endTime);
query.bind("endTime", new Timestamp(endTime.getMillis()));
}
if (offset != null && !offset.isEmpty()) {
logger.debug("binding offset: {}", offset);
query.bind("offset", new Timestamp(DateTime.parse(offset).getMillis()));
MetricQueries.bindOffsetToQuery(query, offset);
}
rows = query.list();
List<Map<String, Object>> rows = query.list();
for (Map<String, Object> row : rows) {
if (rows.size() == 0) {
return new ArrayList<>();
}
String timestamp = DATETIME_FORMATTER.print(((Timestamp) row.get("time_stamp")).getTime());
if ("*".equals(groupBy)) {
byte[] defdimsIdBytes = (byte[]) row.get("definition_dimensions_id");
ByteBuffer defdimsId = ByteBuffer.wrap(defdimsIdBytes);
for (Map<String, Object> row : rows) {
double value = (double) row.get("value");
String defDimsId = (String) row.get("def_dims_id");
String valueMetaString = (String) row.get("value_meta");
Object[] measurement = parseRow(row);
Map<String, String> valueMetaMap = new HashMap<>();
if (valueMetaString != null && !valueMetaString.isEmpty()) {
try {
valueMetaMap = this.objectMapper.readValue(valueMetaString, VALUE_META_TYPE);
} catch (IOException e) {
logger.error("failed to parse value metadata: {}", valueMetaString);
}
results.get(defDimsId).addMeasurement(measurement);
}
Measurements measurements = (Boolean.TRUE.equals(mergeMetricsFlag)) ? results.get(EMPTY_DEF_ID) : results.get(defdimsId);
} else {
if (measurements == null) {
if (Boolean.TRUE.equals(mergeMetricsFlag)) {
measurements =
new Measurements(name, new HashMap<String, String>(),
new ArrayList<Object[]>());
String firstDefDimsId = (String) rows.get(0).get("def_dims_id");
Measurements firstMeasurement = results.get(firstDefDimsId);
// clear dimensions
firstMeasurement.setDimensions(new HashMap<String, String>());
results.clear();
results.put(firstDefDimsId, firstMeasurement);
for (Map<String, Object> row : rows) {
Object[] measurement = parseRow(row);
results.get(firstDefDimsId).addMeasurement(measurement);
results.put(EMPTY_DEF_ID, measurements);
} else {
measurements =
new Measurements(name, MetricQueries.dimensionsFor(h, (byte[]) defDimIdSet.toArray()[0]),
new ArrayList<Object[]>());
results.put(defdimsId, measurements);
}
}
measurements.addMeasurement(new Object[] {timestamp, value, valueMetaMap});
}
// clean up any empty measurements
for (Map.Entry<String, Measurements> entry : results.entrySet()) {
if (entry.getValue().getMeasurements().size() == 0) {
results.remove(entry.getKey());
}
}
return new ArrayList<>(results.values());
}
}
private Object[] parseRow(Map<String, Object> row) {
String timestamp = DATETIME_FORMATTER.print(((Timestamp) row.get("time_stamp")).getTime());
double value = (double) row.get("value");
String valueMetaString = (String) row.get("value_meta");
Map<String, String> valueMetaMap = new HashMap<>();
if (valueMetaString != null && !valueMetaString.isEmpty()) {
try {
valueMetaMap = this.objectMapper.readValue(valueMetaString, VALUE_META_TYPE);
} catch (IOException e) {
logger.error("failed to parse value metadata: {}", valueMetaString);
}
}
return new Object[]{timestamp, value, valueMetaMap};
}
private Map<String, Measurements> findDefIds(Handle h, String tenantId,
String name, Map<String, String> dimensions) {
String defDimSql = String.format(
MetricQueries.FIND_METRIC_DEFS_SQL,
MetricQueries.buildMetricDefinitionSubSql(name, dimensions));
Query<Map<String, Object>> query = h.createQuery(defDimSql).bind("tenantId", tenantId);
MetricQueries.bindDimensionsToQuery(query, dimensions);
if (name != null && !name.isEmpty()) {
query.bind("name", name);
}
List<Map<String, Object>> rows = query.list();
Map<String, Measurements> stringIdMap = new HashMap<>();
String currentDefDimId = null;
Map<String, String> dims = null;
for (Map<String, Object> row : rows) {
String defDimId = (String) row.get("defDimsId");
String defName = (String) row.get("name");
String dimName = (String) row.get("dName");
String dimValue = (String) row.get("dValue");
if (defDimId == null || !defDimId.equals(currentDefDimId)) {
currentDefDimId = defDimId;
dims = new HashMap<>();
if (dimName != null && dimValue != null)
dims.put(dimName, dimValue);
Measurements measurements = new Measurements();
measurements.setId(defDimId);
measurements.setName(defName);
measurements.setDimensions(dims);
stringIdMap.put(currentDefDimId, measurements);
} else {
if (dimName != null && dimValue != null)
dims.put(dimName, dimValue);
}
}
return stringIdMap;
}
}

View File

@ -44,16 +44,6 @@ public class MetricDefinitionVerticaRepoImpl implements MetricDefinitionRepo {
logger =
LoggerFactory.getLogger(MetricDefinitionVerticaRepoImpl.class);
private static final String FIND_METRIC_DEFS_SQL =
"SELECT defDims.id as defDimsId, def.name, dims.name as dName, dims.value AS dValue "
+ "FROM MonMetrics.Definitions def "
+ "JOIN MonMetrics.DefinitionDimensions defDims ON def.id = defDims.definition_id "
// Outer join needed in case there are no dimensions for a definition.
+ "LEFT OUTER JOIN MonMetrics.Dimensions dims ON dims.dimension_set_id = defDims"
+ ".dimension_set_id "
+ "WHERE defDims.id in (%s) "
+ "ORDER BY defDims.id ASC";
private static final String METRIC_DEF_SUB_QUERY =
"SELECT defDimsSub.id "
+ "FROM MonMetrics.Definitions defSub "
@ -84,7 +74,7 @@ public class MetricDefinitionVerticaRepoImpl implements MetricDefinitionRepo {
+ "ORDER BY max_id ASC %s"; // Limit goes here.
private static final String DEFDIM_IDS_SELECT =
"SELECT defDims.id "
"SELECT to_hex(defDims.id) AS id "
+ "FROM MonMetrics.Definitions def, MonMetrics.DefinitionDimensions defDims "
+ "WHERE defDims.definition_id = def.id "
+ "AND def.tenant_id = :tenantId "
@ -204,21 +194,21 @@ public class MetricDefinitionVerticaRepoImpl implements MetricDefinitionRepo {
List<MetricDefinition> metricDefs = new ArrayList<>(rows.size());
byte[] currentDefDimId = null;
String currentDefDimId = null;
Map<String, String> dims = null;
for (Map<String, Object> row : rows) {
byte[] defDimId = (byte[]) row.get("defdimsid");
String defDimId = (String) row.get("defDimsId");
String metricName = (String) row.get("name");
String dimName = (String) row.get("dname");
String dimName = (String) row.get("dName");
String dimValue = (String) row.get("dvalue");
String dimValue = (String) row.get("dValue");
if (defDimId == null || !Arrays.equals(currentDefDimId, defDimId)) {
if (defDimId == null || !defDimId.equals(currentDefDimId)) {
currentDefDimId = defDimId;
@ -231,7 +221,7 @@ public class MetricDefinitionVerticaRepoImpl implements MetricDefinitionRepo {
}
MetricDefinition m = new MetricDefinition(metricName, dims);
m.setId(Hex.encodeHexString(defDimId));
m.setId(defDimId);
metricDefs.add(m);
@ -284,7 +274,7 @@ public class MetricDefinitionVerticaRepoImpl implements MetricDefinitionRepo {
String timeInClause = createTimeInClause(h, startTime, endTime, tenantId, name, dimensions);
String sql =
String.format(FIND_METRIC_DEFS_SQL,
String.format(MetricQueries.FIND_METRIC_DEFS_SQL,
String.format(METRIC_DEF_SUB_QUERY,
namePart,
offsetPart,
@ -344,7 +334,7 @@ public class MetricDefinitionVerticaRepoImpl implements MetricDefinitionRepo {
return "";
}
Set<byte[]> defDimIdSet = new HashSet<>();
Set<String> defDimIdSet = new HashSet<>();
String namePart = "";
@ -368,7 +358,7 @@ public class MetricDefinitionVerticaRepoImpl implements MetricDefinitionRepo {
List<Map<String, Object>> rows = query.list();
for (Map<String, Object> row : rows) {
byte[] defDimId = (byte[]) row.get("id");
String defDimId = (String) row.get("id");
defDimIdSet.add(defDimId);
}

View File

@ -17,22 +17,32 @@ package monasca.api.infrastructure.persistence.vertica;
import com.google.common.base.Splitter;
import com.google.common.base.Strings;
import java.sql.Timestamp;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.codec.binary.Hex;
import org.skife.jdbi.v2.Handle;
import org.joda.time.DateTime;
import org.skife.jdbi.v2.Query;
import monasca.common.persistence.SqlQueries;
/**
* Vertica utilities for building metric queries.
*/
final class MetricQueries {
private static Splitter BAR_SPLITTER = Splitter.on('|').omitEmptyStrings().trimResults();
static final Splitter BAR_SPLITTER = Splitter.on('|').omitEmptyStrings().trimResults();
static final char OFFSET_SEPARATOR = '_';
static final Splitter offsetSplitter = Splitter.on(OFFSET_SEPARATOR).omitEmptyStrings().trimResults();
static final String FIND_METRIC_DEFS_SQL =
"SELECT TO_HEX(defDims.id) as defDimsId, def.name, dims.name as dName, dims.value AS dValue "
+ "FROM MonMetrics.Definitions def "
+ "JOIN MonMetrics.DefinitionDimensions defDims ON def.id = defDims.definition_id "
// Outer join needed in case there are no dimensions for a definition.
+ "LEFT OUTER JOIN MonMetrics.Dimensions dims ON dims.dimension_set_id = defDims"
+ ".dimension_set_id "
+ "WHERE defDims.id in (%s) "
+ "ORDER BY defDims.id ASC";
static final String METRIC_DEF_SUB_SQL =
"SELECT defDimsSub.id "
@ -119,7 +129,7 @@ final class MetricQueries {
Map.Entry<String, String> entry = it.next();
query.bind("dname" + i, entry.getKey());
if (!Strings.isNullOrEmpty(entry.getValue())) {
List<String> values = Splitter.on('|').splitToList(entry.getValue());
List<String> values = BAR_SPLITTER.splitToList(entry.getValue());
if (values.size() > 1) {
for (int j = 0; j < values.size(); j++) {
query.bind("dvalue" + i + '_' + j, values.get(j));
@ -133,36 +143,37 @@ final class MetricQueries {
}
}
static Map<String, String> dimensionsFor(Handle handle, byte[] dimensionSetId) {
return SqlQueries.keyValuesFor(handle,
"select name, value from MonMetrics.Dimensions as d "
+ "join MonMetrics.DefinitionDimensions as dd "
+ "on d.dimension_set_id = dd.dimension_set_id "
+ "where" + " dd.id = ?", dimensionSetId);
}
static String createDefDimIdInClause(Set<byte[]> defDimIdSet) {
static String createDefDimIdInClause(Set<String> defDimIdSet) {
StringBuilder sb = new StringBuilder("IN ");
sb.append("(");
boolean first = true;
for (byte[] defDimId : defDimIdSet) {
for (String defDimId : defDimIdSet) {
if (first) {
first = false;
} else {
sb.append(",");
}
sb.append("'" + Hex.encodeHexString(defDimId) + "'");
sb.append("'").append(defDimId).append("'");
}
sb.append(") ");
return sb.toString();
}
static void bindOffsetToQuery(Query<Map<String, Object>> query, String offset) {
List<String> offsets = offsetSplitter.splitToList(offset);
if (offsets.size() > 1) {
query.bind("offset_id", offsets.get(0));
query.bind("offset_timestamp",
new Timestamp(DateTime.parse(offsets.get(1)).getMillis()));
} else {
query.bind("offset_timestamp",
new Timestamp(DateTime.parse(offsets.get(0)).getMillis()));
}
}
}

View File

@ -28,7 +28,6 @@ import org.slf4j.LoggerFactory;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
@ -46,14 +45,6 @@ public class StatisticVerticaRepoImpl implements StatisticRepo {
public static final DateTimeFormatter DATETIME_FORMATTER =
ISODateTimeFormat.dateTime().withZoneUTC();
private static final String FIND_BY_METRIC_DEF_SQL =
"select defdims.id, def.name, d.name as dname, d.value as dvalue "
+ "from MonMetrics.Definitions def, MonMetrics.DefinitionDimensions defdims "
+ "left outer join MonMetrics.Dimensions d on d.dimension_set_id = defdims.dimension_set_id "
+ "where def.id = defdims.definition_id "
+ "and defdims.id in (%s) "
+ "order by defdims.id ASC";
private final DBI db;
@Inject
@ -74,7 +65,8 @@ public class StatisticVerticaRepoImpl implements StatisticRepo {
int period,
String offset,
int limit,
Boolean mergeMetricsFlag) throws MultipleMetricsException {
Boolean mergeMetricsFlag,
String groupBy) throws MultipleMetricsException {
List<Statistics> statisticsList = new ArrayList<>();
@ -83,7 +75,7 @@ public class StatisticVerticaRepoImpl implements StatisticRepo {
try (Handle h = db.open()) {
Map<byte[], Statistics> byteMap = findDefIds(h, tenantId, name, dimensions);
Map<String, Statistics> byteMap = findDefIds(h, tenantId, name, dimensions);
if (byteMap.isEmpty()) {
@ -91,7 +83,7 @@ public class StatisticVerticaRepoImpl implements StatisticRepo {
}
if (!Boolean.TRUE.equals(mergeMetricsFlag) && byteMap.keySet().size() > 1) {
if (!"*".equals(groupBy) && !Boolean.TRUE.equals(mergeMetricsFlag) && byteMap.keySet().size() > 1) {
throw new MultipleMetricsException(name, dimensions);
@ -99,7 +91,8 @@ public class StatisticVerticaRepoImpl implements StatisticRepo {
List<List<Object>> statisticsListList = new ArrayList<>();
String sql = createQuery(byteMap.keySet(), period, startTime, endTime, offset, statisticsCols);
String sql = createQuery(byteMap.keySet(), period, startTime, endTime, offset, statisticsCols,
groupBy, mergeMetricsFlag);
logger.debug("vertica sql: {}", sql);
@ -112,35 +105,63 @@ public class StatisticVerticaRepoImpl implements StatisticRepo {
if (offset != null && !offset.isEmpty()) {
logger.debug("binding offset: {}", offset);
query.bind("offset", new Timestamp(DateTime.parse(offset).getMillis()));
MetricQueries.bindOffsetToQuery(query, offset);
}
List<Map<String, Object>> rows = query.list();
for (Map<String, Object> row : rows) {
if ("*".equals(groupBy)) {
List<Object> statisticsRow = parseRow(row);
for (Map<String, Object> row : rows) {
statisticsListList.add(statisticsRow);
List<Object> statisticsRow = parseRow(row);
String defDimsId = (String) row.get("id");
byteMap.get(defDimsId).addStatistics(statisticsRow);
}
for (Map.Entry<String, Statistics> entry : byteMap.entrySet()) {
Statistics statistics = entry.getValue();
statistics.setColumns(statisticsColumns);
if (statistics.getStatistics().size() > 0) {
statisticsList.add(statistics);
}
}
} else {
for (Map<String, Object> row : rows) {
List<Object> statisticsRow = parseRow(row);
statisticsListList.add(statisticsRow);
}
// Just use the first entry in the byteMap to get the def name and dimensions.
Statistics statistics = byteMap.entrySet().iterator().next().getValue();
statistics.setColumns(statisticsColumns);
if (Boolean.TRUE.equals(mergeMetricsFlag) && byteMap.keySet().size() > 1) {
// Wipe out the dimensions.
statistics.setDimensions(new HashMap<String, String>());
}
statistics.setStatistics(statisticsListList);
statisticsList.add(statistics);
}
// Just use the first entry in the byteMap to get the def name and dimensions.
Statistics statistics = byteMap.entrySet().iterator().next().getValue();
statistics.setColumns(statisticsColumns);
if (Boolean.TRUE.equals(mergeMetricsFlag) && byteMap.keySet().size() > 1) {
// Wipe out the dimensions.
statistics.setDimensions(new HashMap<String, String>());
}
statistics.setStatistics(statisticsListList);
statisticsList.add(statistics);
}
return statisticsList;
@ -180,20 +201,18 @@ public class StatisticVerticaRepoImpl implements StatisticRepo {
if (sum != null) {
statisticsRow.add(sum);
}
return statisticsRow;
return statisticsRow;
}
private Map<byte[], Statistics> findDefIds(
private Map<String, Statistics> findDefIds(
Handle h,
String tenantId,
String name,
Map<String, String> dimensions) {
List<byte[]> bytes = new ArrayList<>();
String sql = String.format(
FIND_BY_METRIC_DEF_SQL,
MetricQueries.FIND_METRIC_DEFS_SQL,
MetricQueries.buildMetricDefinitionSubSql(name, dimensions));
Query<Map<String, Object>> query =
@ -212,23 +231,23 @@ public class StatisticVerticaRepoImpl implements StatisticRepo {
List<Map<String, Object>> rows = query.list();
Map<byte[], Statistics> byteIdMap = new HashMap<>();
Map<String, Statistics> byteIdMap = new HashMap<>();
byte[] currentDefDimId = null;
String currentDefDimId = null;
Map<String, String> dims = null;
for (Map<String, Object> row : rows) {
byte[] defDimId = (byte[]) row.get("id");
String defDimId = (String) row.get("defDimsId");
String defName = (String) row.get("name");
String dimName = (String) row.get("dname");
String dimName = (String) row.get("dName");
String dimValue = (String) row.get("dvalue");
String dimValue = (String) row.get("dValue");
if (defDimId == null || !Arrays.equals(currentDefDimId, defDimId)) {
if (defDimId == null || !defDimId.equals(currentDefDimId)) {
currentDefDimId = defDimId;
@ -238,6 +257,8 @@ public class StatisticVerticaRepoImpl implements StatisticRepo {
Statistics statistics = new Statistics();
statistics.setId(defDimId);
statistics.setName(defName);
statistics.setDimensions(dims);
@ -251,8 +272,6 @@ public class StatisticVerticaRepoImpl implements StatisticRepo {
}
}
bytes.add(currentDefDimId);
return byteIdMap;
}
@ -270,30 +289,44 @@ public class StatisticVerticaRepoImpl implements StatisticRepo {
}
private String createQuery(
Set<byte[]> defDimIdSet,
Set<String> defDimIdSet,
int period,
DateTime startTime,
DateTime endTime,
String offset,
List<String> statistics) {
List<String> statistics,
String groupBy,
Boolean mergeMetricsFlag) {
StringBuilder sb = new StringBuilder();
sb.append("SELECT " + createColumnsStr(statistics));
sb.append("SELECT ");
if (groupBy != null && !groupBy.isEmpty()) {
sb.append(" to_hex(definition_dimensions_id) AS id, ");
}
sb.append(createColumnsStr(statistics));
if (period >= 1) {
sb.append("Time_slice(time_stamp, " + period);
sb.append(", 'SECOND', 'END') AS time_interval");
sb.append("Time_slice(time_stamp, ").append(period);
sb.append(", 'SECOND', 'START') AS time_interval");
}
sb.append(" FROM MonMetrics.Measurements ");
String inClause = MetricQueries.createDefDimIdInClause(defDimIdSet);
sb.append("WHERE to_hex(definition_dimensions_id) " + inClause);
sb.append(createWhereClause(startTime, endTime, offset));
sb.append("WHERE to_hex(definition_dimensions_id) ").append(inClause);
sb.append(createWhereClause(startTime, endTime, offset, mergeMetricsFlag));
if (period >= 1) {
sb.append("group by Time_slice(time_stamp, " + period);
sb.append(", 'SECOND', 'END') order by time_interval");
sb.append(" group by ");
if (groupBy != null && !groupBy.isEmpty()) {
sb.append("definition_dimensions_id,");
}
sb.append("time_interval ");
sb.append(" order by ");
if (groupBy != null && !groupBy.isEmpty()) {
sb.append("to_hex(definition_dimensions_id),");
}
sb.append("time_interval ");
}
sb.append(" limit :limit");
@ -304,7 +337,8 @@ public class StatisticVerticaRepoImpl implements StatisticRepo {
private String createWhereClause(
DateTime startTime,
DateTime endTime,
String offset) {
String offset,
Boolean mergeMetricsFlag) {
String s = "";
@ -316,7 +350,12 @@ public class StatisticVerticaRepoImpl implements StatisticRepo {
if (offset != null && !offset.isEmpty()) {
s += " and time_stamp > :offset ";
if (Boolean.FALSE.equals(mergeMetricsFlag)) {
s += " AND (TO_HEX(definition_dimensions_id) > :offset_id "
+ "OR (TO_HEX(definition_dimensions_id) = :offset_id AND time_stamp > :offset_timestamp)) ";
} else {
s += " AND time_stamp > :offset_timestamp ";
}
}

View File

@ -258,37 +258,46 @@ public final class Links {
if (elements != null && !elements.isEmpty()) {
Measurements m = elements.get(0);
int remaining_limit = limit;
if (m != null) {
for (int i = 0; i < elements.size(); i++) {
List<Object[]> l = m.getMeasurements();
Measurements m = elements.get(i);
if (l.size() > limit) {
if (m != null) {
String offset = (String) l.get(limit - 1)[0];
List<Object[]> l = m.getMeasurements();
m.setId(offset);
if (l.size() >= remaining_limit) {
paged.links.add(getNextLink(offset, uriInfo));
String offset = m.getId();
// Truncate the list. Normally this will just truncate one extra element.
l = l.subList(0, limit);
m.setMeasurements(l);
if (offset != null) {
offset += '_' + (String) l.get(remaining_limit - 1)[0];
} else {
offset = (String) l.get(remaining_limit - 1)[0];
}
paged.links.add(getNextLink(offset, uriInfo));
// Truncate the measurement list. Normally this will just truncate one extra element.
l = l.subList(0, remaining_limit);
m.setMeasurements(l);
// Truncate the elements list
elements = elements.subList(0, i + 1);
} else {
remaining_limit -= l.size();
}
paged.elements = elements;
} else {
paged.elements = new ArrayList<>();
}
// Check if there are any elements.
if (l.size() > 0) {
// Set the id to the last date in the list.
m.setId((String) l.get(l.size() - 1)[0]);
}
paged.elements = elements;
} else {
paged.elements = new ArrayList<>();
}
} else {
@ -316,37 +325,46 @@ public final class Links {
if (elements != null && !elements.isEmpty()) {
Statistics s = elements.get(0);
int remaining_limit = limit;
if (s != null) {
for (int i = 0; i < elements.size(); i++) {
List<List<Object>> l = s.getStatistics();
Statistics s = elements.get(i);
if (l.size() > limit) {
if (s != null) {
String offset = (String) l.get(limit - 1).get(0);
List<List<Object>> l = s.getStatistics();
s.setId(offset);
if (l.size() >= remaining_limit) {
paged.links.add(getNextLink(offset, uriInfo));
String offset = s.getId();
// Truncate the list. Normally this will just truncate one extra element.
l = l.subList(0, limit);
s.setStatistics(l);
if (offset != null) {
offset += '_' + (String) l.get(remaining_limit - 1).get(0);
} else {
offset = (String) l.get(remaining_limit - 1).get(0);
}
paged.links.add(getNextLink(offset, uriInfo));
// Truncate the measurement list. Normally this will just truncate one extra element.
l = l.subList(0, remaining_limit);
s.setStatistics(l);
// Truncate the elements list
elements = elements.subList(0, i + 1);
} else {
remaining_limit -= l.size();
}
paged.elements = elements;
} else {
paged.elements = new ArrayList<>();
}
// Check if there are any elements.
if (l.size() > 0) {
// Set the id to the last date in the list.
s.setId((String) l.get(l.size() - 1).get(0));
}
paged.elements = elements;
} else {
paged.elements = new ArrayList<>();
}
} else {

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
* Copyright (c) 2014, 2016 Hewlett-Packard Development Company, L.P.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
@ -73,7 +73,8 @@ public class MeasurementResource {
@QueryParam("offset") String offset,
@QueryParam("limit") String limit,
@QueryParam("tenant_id") String crossTenantId,
@QueryParam("merge_metrics") String mergeMetricsFlag) throws Exception {
@QueryParam("merge_metrics") String mergeMetricsFlag,
@QueryParam("group_by") String groupBy) throws Exception {
// Validate query parameters
DateTime startTime = Validation.parseAndValidateDate(startTimeStr, "start_time", true);
@ -85,6 +86,7 @@ public class MeasurementResource {
.parseAndValidateDimensions(dimensionsStr);
MetricNameValidation.validate(name, true);
Boolean mergeMetricsFlagBool = Validation.validateAndParseMergeMetricsFlag(mergeMetricsFlag);
Validation.validateMetricsGroupBy(groupBy);
String queryTenantId = Validation.getQueryProject(roles, crossTenantId, tenantId, admin_role);
@ -96,7 +98,8 @@ public class MeasurementResource {
endTime,
offset,
paging_limit,
mergeMetricsFlagBool
mergeMetricsFlagBool,
groupBy
);
return Links.paginateMeasurements(paging_limit, resources, uriInfo);
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
* Copyright (c) 2014,2016 Hewlett-Packard Development Company, L.P.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
@ -79,7 +79,8 @@ public class StatisticResource {
@QueryParam("offset") String offset,
@QueryParam("limit") String limit,
@QueryParam("tenant_id") String crossTenantId,
@QueryParam("merge_metrics") String mergeMetricsFlag) throws Exception {
@QueryParam("merge_metrics") String mergeMetricsFlag,
@QueryParam("group_by") String groupBy) throws Exception {
// Validate query parameters
Validation.validateNotNullOrEmpty(name, "name");
@ -95,6 +96,7 @@ public class StatisticResource {
.parseAndValidateDimensions(dimensionsStr);
MetricNameValidation.validate(name, true);
Boolean mergeMetricsFlagBool = Validation.validateAndParseMergeMetricsFlag(mergeMetricsFlag);
Validation.validateMetricsGroupBy(groupBy);
String queryTenantId = Validation.getQueryProject(roles, crossTenantId, tenantId, admin_role);
@ -102,7 +104,7 @@ public class StatisticResource {
repo.find(queryTenantId, name, dimensions, startTime, endTime,
statistics, period, offset,
this.persistUtils.getLimit(limit),
mergeMetricsFlagBool),
mergeMetricsFlagBool, groupBy),
uriInfo);
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
* Copyright (c) 2014, 2016 Hewlett-Packard Development Company, L.P.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
@ -88,7 +88,7 @@ public class MeasurementVerticaRepositoryImplTest {
public void shouldFindWithoutDimensions() throws Exception {
Collection<Measurements> measurements =
repo.find("bob", "cpu_utilization", null, new DateTime(2014, 1, 1, 0, 0, 0), null, null, 1,
false);
false, null);
assertEquals(measurements.size(), 3);
}
@ -99,12 +99,12 @@ public class MeasurementVerticaRepositoryImplTest {
Collection<Measurements> measurements =
repo.find("bob", "cpu_utilization", dims, new DateTime(2014, 1, 1, 0, 0), null, null, 1,
false);
false, null);
assertEquals(measurements.size(), 2);
dims.put("flavor_id", "2");
measurements = repo.find("bob", "cpu_utilization", dims, new DateTime(2014, 1, 1, 0, 0), null, null, 1,
false);
false, null);
assertEquals(measurements.size(), 1);
}
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
* Copyright (c) 2014,2016 Hewlett-Packard Development Company, L.P.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
@ -58,7 +58,7 @@ public class StatisticResourceTest extends AbstractMonApiResourceTest {
.header("X-Tenant-Id", "abc").get(ClientResponse.class);
verify(statisticRepo).find(anyString(), anyString(), any(Map.class), any(DateTime.class),
any(DateTime.class), any(List.class), anyInt(), any(String.class), anyInt(),
anyBoolean());
anyBoolean(), anyString());
}
public void queryShouldThrowOnInvalidDateFormat() throws Exception {

View File

@ -194,6 +194,7 @@ class TestMeasurements(base.BaseMonascaTest):
self.assertRaises(exceptions.BadRequest,
self.monasca_client.list_measurements, query_parms)
@test.attr(type="gate")
def test_list_measurements_with_offset_limit(self):
query_parms = '?name=' + str(self._names_list[1]) + \
@ -209,20 +210,34 @@ class TestMeasurements(base.BaseMonascaTest):
self._verify_list_measurements_meas_len(measurements=measurements,
test_len=4)
for measurement_index in xrange(len(measurements) - 2):
for measurement_index in xrange(1, len(measurements) - 3):
max_limit = len(measurements) - measurement_index
# Get first offset from api
query_parms = '?name=' + str(self._names_list[1]) + \
'&merge_metrics=true&start_time=' + measurements[measurement_index - 1][0] + \
'&end_time=' + self._end_time + \
'&limit=1'
resp, response_body = self.monasca_client.list_measurements(query_parms)
for link in response_body['links']:
if link['rel'] == 'next':
next_link = link['href']
if not next_link:
self.fail("No next link returned with query parameters: {}".formet(query_parms))
offset = helpers.get_query_param(next_link, "offset")
first_index = measurement_index + 1
for limit in xrange(1, max_limit):
first_index = measurement_index + 1
last_index = measurement_index + limit + 1
expected_measurements = measurements[first_index:last_index]
offset_timestamp = measurements[measurement_index][0]
query_parms = '?name=' + str(self._names_list[1]) + \
'&merge_metrics=true&start_time=' + \
self._start_time + '&end_time=' + \
self._end_time + '&offset=' + \
str(offset_timestamp) + '&limit=' + str(limit)
self._end_time + '&limit=' + str(limit) + \
'&offset=' + str(offset)
resp, response_body = self.monasca_client.list_measurements(
query_parms)
self._verify_list_measurements(resp, response_body)

View File

@ -13,6 +13,7 @@
# under the License.
import time
import datetime
from oslo_utils import timeutils
@ -231,21 +232,34 @@ class TestStatistics(base.BaseMonascaTest):
self.assertEqual(4, len(elements))
self.assertEqual(first_element, elements[0])
for index in xrange(1, 5):
max_limit = 5 - index
for index in xrange(1, 4):
max_limit = 4 - index
# Get first offset from api
query_parms = '?name=' + str(name) + \
'&merge_metrics=true&start_time=' + elements[index - 1][0] + \
'&end_time=' + end_time + \
'&limit=1'
resp, response_body = self.monasca_client.list_measurements(query_parms)
for link in response_body['links']:
if link['rel'] == 'next':
next_link = link['href']
if not next_link:
self.fail("No next link returned with query parameters: {}".formet(query_parms))
offset = helpers.get_query_param(next_link, "offset")
# python api returns exact timestamp, but the test needs a rounded number
offset_period_index = offset.find('.')
offset = offset[:offset_period_index] + 'Z'
for limit in xrange(1, max_limit):
offset_timestamp = start_timestamp + (1000 * index)
offset = timeutils.iso8601_from_timestamp(offset_timestamp / 1000)
last_index = index + limit
expected_elements = elements[index:last_index]
expected_elements = [elem for elem in elements if elem[0] > offset]
expected_elements = expected_elements[:limit]
query_parms = '?name=' + name + '&merge_metrics=true' + \
'&statistics=avg' + '&start_time=' + \
str(start_time) + '&end_time=' + \
str(end_time) + '&period=1' + '&limit=' + \
str(limit) + '&offset=' + str(offset)
'&statistics=avg' + '&start_time=' + \
str(start_time) + '&end_time=' + \
str(end_time) + '&period=1' + '&limit=' + \
str(limit) + '&offset=' + str(offset)
resp, response_body = self.monasca_client.list_statistics(query_parms)
self.assertEqual(200, resp.status)
if not response_body['elements']: