Filter dimension queries by time

Reduce the number of definitions to store in memory
for statistics and measurements. This should
alleviate the memory issues.

Change-Id: Iccab1207f4d0dfb4bc2823d0bf8e963adc801bcc
This commit is contained in:
Ryan Brandt 2016-05-12 08:35:21 -06:00
parent f9b2208b13
commit 39a73ce741
10 changed files with 300 additions and 556 deletions

View File

@ -13,6 +13,7 @@
*/
package monasca.api.domain.model.measurement;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
@ -23,18 +24,18 @@ import monasca.common.model.domain.common.AbstractEntity;
* Encapsulates a metric measurements.
*/
public class Measurements extends AbstractEntity {
private static final String[] COLUMNS = new String[] {"timestamp", "value", "value_meta"};
private static final List<String> COLUMNS = Arrays.asList("timestamp", "value", "value_meta");
private String name;
private Map<String, String> dimensions;
private final String[] columns = COLUMNS;
private List<Object[]> measurements;
protected String name;
protected Map<String, String> dimensions;
protected List<String> columns = COLUMNS;
protected List<List<Object>> measurements;
public Measurements() {
measurements = new LinkedList<>();
}
public Measurements(String name, Map<String, String> dimensions, List<Object[]> measurements) {
public Measurements(String name, Map<String, String> dimensions, List<List<Object>> measurements) {
this.name = name;
this.dimensions = dimensions;
this.measurements = measurements;
@ -46,7 +47,7 @@ public class Measurements extends AbstractEntity {
this.measurements = new LinkedList<>();
}
public void addMeasurement(Object[] measurement) {
public void addMeasurement(List<Object> measurement) {
measurements.add(measurement);
}
@ -74,10 +75,15 @@ public class Measurements extends AbstractEntity {
return false;
} else if (!name.equals(other.name))
return false;
if (columns == null) {
if (other.columns != null)
return false;
} else if (!columns.equals(other.columns))
return false;
return true;
}
public String[] getColumns() {
public List<String> getColumns() {
return columns;
}
@ -85,7 +91,7 @@ public class Measurements extends AbstractEntity {
return dimensions;
}
public List<Object[]> getMeasurements() {
public List<List<Object>> getMeasurements() {
return measurements;
}
@ -100,6 +106,7 @@ public class Measurements extends AbstractEntity {
result = prime * result + ((dimensions == null) ? 0 : dimensions.hashCode());
result = prime * result + ((measurements == null) ? 0 : measurements.hashCode());
result = prime * result + ((name == null) ? 0 : name.hashCode());
result = prime * result + ((columns == null) ? 0 : columns.hashCode());
return result;
}
@ -107,7 +114,7 @@ public class Measurements extends AbstractEntity {
this.dimensions = dimensions;
}
public void setMeasurements(List<Object[]> measurements) {
public void setMeasurements(List<List<Object>> measurements) {
this.measurements = measurements;
}

View File

@ -13,126 +13,42 @@
*/
package monasca.api.domain.model.statistic;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import monasca.common.model.domain.common.AbstractEntity;
import monasca.api.domain.model.measurement.Measurements;
/**
* Encapsulates a metric measurements.
*/
public class Statistics extends AbstractEntity {
private String name;
private Map<String, String> dimensions;
private List<String> columns;
private List<List<Object>> statistics;
public class Statistics extends Measurements {
public Statistics() {
statistics = new ArrayList<>();
super();
}
public Statistics(String name, Map<String, String> dimensions, List<String> columns) {
this.name = name;
this.dimensions = dimensions;
super(name, dimensions);
this.columns = columns;
this.statistics = new LinkedList<>();
}
public void addValues(List<Object> value) {
statistics.add(value);
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
Statistics other = (Statistics) obj;
if (dimensions == null) {
if (other.dimensions != null)
return false;
} else if (!dimensions.equals(other.dimensions))
return false;
if (columns == null) {
if (other.columns != null)
return false;
} else if (!columns.equals(other.columns))
return false;
if (statistics == null) {
if (other.statistics != null)
return false;
} else if (!statistics.equals(other.statistics))
return false;
if (name == null) {
if (other.name != null)
return false;
} else if (!name.equals(other.name))
return false;
return true;
}
public void setId(String id) {
this.id = id;
}
public void addStatistics(List<Object> statistics) {
this.statistics.add(statistics);
}
public List<String> getColumns() {
return columns;
}
public Map<String, String> getDimensions() {
return dimensions;
}
public String getName() {
return name;
}
public List<List<Object>> getStatistics() {
return statistics;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((dimensions == null) ? 0 : dimensions.hashCode());
result = prime * result + ((name == null) ? 0 : name.hashCode());
result = prime * result + ((statistics == null) ? 0 : statistics.hashCode());
result = prime * result + ((columns == null) ? 0 : columns.hashCode());
return result;
@JsonProperty("statistics")
public List<List<Object>> getMeasurements() {
return this.measurements;
}
public void setColumns(List<String> columns) {
this.columns = columns;
}
public void setDimensions(Map<String, String> dimensions) {
this.dimensions = dimensions;
}
public void setName(String name) {
this.name = name;
}
public void setStatistics(List<List<Object>> statistics) {
this.statistics = statistics;
}
@Override
public String toString() {
return String.format("Statistics [name=%s, dimensions=%s,statistics=%s]", name, dimensions,
statistics);
measurements);
}
}

View File

@ -23,6 +23,7 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
@ -166,8 +167,9 @@ public class InfluxV9MeasurementRepo implements MeasurementRepo {
final String timestamp = influxV9Utils.threeDigitMillisTimestamp(values[0]);
if (timestamp.compareTo(offsetTimestamp) > 0 || index > offsetId) {
measurements.addMeasurement(
new Object[]{timestamp, Double.parseDouble(values[1]), getValueMeta(values)});
measurements.addMeasurement(Arrays.asList(timestamp,
Double.parseDouble(values[1]),
getValueMeta(values)));
remaining_limit--;
}
}

View File

@ -31,6 +31,7 @@ import javax.annotation.Nullable;
import monasca.api.ApiConfig;
import monasca.api.domain.exception.MultipleMetricsException;
import monasca.api.domain.model.measurement.Measurements;
import monasca.api.domain.model.statistic.StatisticRepo;
import monasca.api.domain.model.statistic.Statistics;
@ -174,12 +175,12 @@ public class InfluxV9StatisticRepo implements StatisticRepo {
List<Object> values = buildValsList(valueObjects);
if (((String) values.get(0)).compareTo(offsetTimestamp) > 0 || index > offsetId) {
statistics.addStatistics(values);
statistics.addMeasurement(values);
remaining_limit--;
}
}
if (statistics.getStatistics().size() > 0) {
if (statistics.getMeasurements().size() > 0) {
statisticsList.add(statistics);
}
index++;

View File

@ -23,11 +23,10 @@ import com.fasterxml.jackson.databind.ObjectMapper;
import java.io.IOException;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.annotation.Nullable;
import javax.inject.Inject;
@ -54,19 +53,13 @@ public class MeasurementVerticaRepoImpl implements MeasurementRepo {
"SELECT %s to_hex(mes.definition_dimensions_id) as def_dims_id, "
+ "mes.time_stamp, mes.value, mes.value_meta "
+ "FROM MonMetrics.Measurements mes "
+ "WHERE to_hex(mes.definition_dimensions_id) %s " // Sub select query
+ "WHERE mes.time_stamp >= :startTime "
+ "%s " // endtime and offset here
+ "AND mes.time_stamp >= :startTime "
+ "AND TO_HEX(definition_dimensions_id) IN (%s) " // id subquery here
+ "ORDER BY %s" // sort by id if not merging
+ "mes.time_stamp ASC "
+ "LIMIT :limit";
private static final String
DEFDIM_IDS_SELECT =
"SELECT %s defDims.id "
+ "FROM MonMetrics.DefinitionDimensions defDims "
+ "WHERE defDims.id IN (%s)";
private final DBI db;
private final ObjectMapper objectMapper = new ObjectMapper();
@ -97,26 +90,11 @@ public class MeasurementVerticaRepoImpl implements MeasurementRepo {
try (Handle h = db.open()) {
Map<String, Measurements> results = findDefIds(h, tenantId, name, dimensions);
Map<String, Measurements> results = new HashMap<>();
Set<String> defDimsIdSet = results.keySet();
if (!"*".equals(groupBy) && !Boolean.TRUE.equals(mergeMetricsFlag) && (defDimsIdSet.size() > 1)) {
throw new MultipleMetricsException(name, dimensions);
if (!"*".equals(groupBy) && !Boolean.TRUE.equals(mergeMetricsFlag)) {
MetricQueries.checkForMultipleDefinitions(h, tenantId, name, dimensions);
}
//
// If we didn't find any definition dimension ids,
// we won't have any measurements, let's just bail
// now.
//
if (defDimsIdSet.size() == 0) {
return new ArrayList<>(results.values());
}
String defDimInClause = MetricQueries.createDefDimIdInClause(defDimsIdSet);
StringBuilder sb = new StringBuilder();
@ -128,15 +106,15 @@ public class MeasurementVerticaRepoImpl implements MeasurementRepo {
if (offset != null && !offset.isEmpty()) {
if (Boolean.TRUE.equals(mergeMetricsFlag)) {
sb.append(" and mes.time_stamp > :offset_timestamp ");
} else {
if ("*".equals(groupBy)) {
sb.append(" and (TO_HEX(mes.definition_dimensions_id) > :offset_id "
+ "or (TO_HEX(mes.definition_dimensions_id) = :offset_id and mes.time_stamp > :offset_timestamp)) ");
} else {
sb.append(" and mes.time_stamp > :offset_timestamp ");
}
}
@ -148,12 +126,25 @@ public class MeasurementVerticaRepoImpl implements MeasurementRepo {
}
String sql = String.format(FIND_BY_METRIC_DEF_SQL, this.dbHint, defDimInClause, sb, orderById);
String sql = String.format(FIND_BY_METRIC_DEF_SQL,
this.dbHint,
sb,
MetricQueries.buildMetricDefinitionSubSql(name, dimensions,
null, null),
orderById);
Query<Map<String, Object>> query = h.createQuery(sql)
.bind("tenantId", tenantId)
.bind("startTime", new Timestamp(startTime.getMillis()))
.bind("limit", limit + 1);
if (name != null && !name.isEmpty()) {
query.bind("name", name);
}
MetricQueries.bindDimensionsToQuery(query, dimensions);
if (endTime != null) {
logger.debug("binding endtime: {}", endTime);
@ -176,58 +167,60 @@ public class MeasurementVerticaRepoImpl implements MeasurementRepo {
if ("*".equals(groupBy)) {
String currentDefId = null;
for (Map<String, Object> row : rows) {
String defDimsId = (String) row.get("def_dims_id");
Object[] measurement = parseRow(row);
if (defDimsId != null && !defDimsId.equals(currentDefId)) {
currentDefId = defDimsId;
results.put(defDimsId, new Measurements());
}
List<Object> measurement = parseRow(row);
results.get(defDimsId).addMeasurement(measurement);
}
MetricQueries.addDefsToResults(results, h, this.dbHint);
} else {
Measurements firstMeasurement = new Measurements();
firstMeasurement.setName(name);
String firstDefDimsId = (String) rows.get(0).get("def_dims_id");
Measurements firstMeasurement = results.get(firstDefDimsId);
for (Map<String, Object> row : rows) {
if (Boolean.TRUE.equals(mergeMetricsFlag) && defDimsIdSet.size() > 1) {
List<Object> measurement = parseRow(row);
// Wipe out the dimensions.
firstMeasurement.setDimensions(new HashMap<String, String>());
firstMeasurement.addMeasurement(measurement);
}
results.clear();
results.put(firstDefDimsId, firstMeasurement);
for (Map<String, Object> row : rows) {
Object[] measurement = parseRow(row);
results.get(firstDefDimsId).addMeasurement(measurement);
if (!Boolean.TRUE.equals(mergeMetricsFlag)) {
firstMeasurement.setId(firstDefDimsId);
MetricQueries.addDefsToResults(results, h, this.dbHint);
} else {
if (dimensions == null) {
dimensions = new HashMap<>();
}
firstMeasurement.setDimensions(dimensions);
}
}
// clean up any empty measurements
Iterator<Map.Entry<String, Measurements>> it = results.entrySet().iterator();
while (it.hasNext())
{
Map.Entry<String, Measurements> entry = it.next();
if (entry.getValue().getMeasurements().size() == 0) {
it.remove();
}
}
return new ArrayList<>(results.values());
}
}
private Object[] parseRow(Map<String, Object> row) {
private List<Object> parseRow(Map<String, Object> row) {
String timestamp = DATETIME_FORMATTER.print(((Timestamp) row.get("time_stamp")).getTime());
@ -250,72 +243,6 @@ public class MeasurementVerticaRepoImpl implements MeasurementRepo {
}
return new Object[]{timestamp, value, valueMetaMap};
}
private Map<String, Measurements> findDefIds(Handle h, String tenantId,
String name, Map<String, String> dimensions) {
String defDimSql = String.format(
MetricQueries.FIND_METRIC_DEFS_SQL,
this.dbHint,
MetricQueries.buildMetricDefinitionSubSql(name, dimensions));
Query<Map<String, Object>> query = h.createQuery(defDimSql).bind("tenantId", tenantId);
MetricQueries.bindDimensionsToQuery(query, dimensions);
if (name != null && !name.isEmpty()) {
query.bind("name", name);
}
List<Map<String, Object>> rows = query.list();
Map<String, Measurements> stringIdMap = new HashMap<>();
String currentDefDimId = null;
Map<String, String> dims = null;
for (Map<String, Object> row : rows) {
String defDimId = (String) row.get("defDimsId");
String defName = (String) row.get("name");
String dimName = (String) row.get("dName");
String dimValue = (String) row.get("dValue");
if (defDimId == null || !defDimId.equals(currentDefDimId)) {
currentDefDimId = defDimId;
dims = new HashMap<>();
if (dimName != null && dimValue != null)
dims.put(dimName, dimValue);
Measurements measurements = new Measurements();
measurements.setId(defDimId);
measurements.setName(defName);
measurements.setDimensions(dims);
stringIdMap.put(currentDefDimId, measurements);
} else {
if (dimName != null && dimValue != null)
dims.put(dimName, dimValue);
}
}
return stringIdMap;
return Arrays.asList(timestamp, value, valueMetaMap);
}
}

View File

@ -29,12 +29,9 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.inject.Inject;
import javax.inject.Named;
@ -46,7 +43,7 @@ public class MetricDefinitionVerticaRepoImpl implements MetricDefinitionRepo {
LoggerFactory.getLogger(MetricDefinitionVerticaRepoImpl.class);
private static final String METRIC_DEF_SUB_QUERY =
"SELECT defDimsSub.id "
"SELECT TO_HEX(defDimsSub.id) "
+ "FROM MonMetrics.Definitions defSub "
+ "JOIN MonMetrics.DefinitionDimensions defDimsSub ON defSub.id = defDimsSub.definition_id "
+ "WHERE defSub.tenant_id = :tenantId "
@ -74,22 +71,8 @@ public class MetricDefinitionVerticaRepoImpl implements MetricDefinitionRepo {
+ "GROUP BY defSub.name " // This is to reduce the (id, name) sets to only include unique names
+ "ORDER BY max_id ASC %s"; // Limit goes here.
private static final String DEFDIM_IDS_SELECT =
"SELECT %s to_hex(defDims.id) AS id "
+ "FROM MonMetrics.Definitions def, MonMetrics.DefinitionDimensions defDims "
+ "WHERE defDims.definition_id = def.id "
+ "AND def.tenant_id = :tenantId "
+ "%s " // Name clause here
+ "%s;"; // Dimensions and clause goes here
private static final String MEASUREMENT_AND_CLAUSE =
"AND defDimsSub.id IN ("
+ "SELECT definition_dimensions_id FROM MonMetrics.Measurements "
+ "WHERE to_hex(definition_dimensions_id) "
+ "%s " // List of definition dimension ids here
+ "%s ) "; // start or start and end time here
private static final String TABLE_TO_JOIN_DIMENSIONS_ON = "defDimsSub";
private static final String TABLE_TO_JOIN_ON = "defDimsSub";
private final DBI db;
@ -150,7 +133,7 @@ public class MetricDefinitionVerticaRepoImpl implements MetricDefinitionRepo {
String.format(METRIC_NAMES_SUB_SELECT,
offsetPart,
MetricQueries.buildDimensionAndClause(dimensions,
TABLE_TO_JOIN_DIMENSIONS_ON),
TABLE_TO_JOIN_ON),
limitPart);
String sql = String.format(FIND_METRIC_NAMES_SQL, this.dbHint, defSubSelect);
@ -273,9 +256,6 @@ public class MetricDefinitionVerticaRepoImpl implements MetricDefinitionRepo {
try (Handle h = db.open()) {
// If startTime/endTime is specified, create the 'IN' select statement
String timeInClause = createTimeInClause(h, startTime, endTime, tenantId, name, dimensions);
String sql =
String.format(MetricQueries.FIND_METRIC_DEFS_SQL,
this.dbHint,
@ -283,8 +263,9 @@ public class MetricDefinitionVerticaRepoImpl implements MetricDefinitionRepo {
namePart,
offsetPart,
MetricQueries.buildDimensionAndClause(dimensions,
TABLE_TO_JOIN_DIMENSIONS_ON),
timeInClause,
TABLE_TO_JOIN_ON),
MetricQueries.buildTimeAndClause(startTime, endTime,
TABLE_TO_JOIN_ON),
limitPart)
);
@ -296,11 +277,11 @@ public class MetricDefinitionVerticaRepoImpl implements MetricDefinitionRepo {
}
if (startTime != null) {
query.bind("start_time", startTime);
query.bind("startTime", startTime);
}
if (endTime != null) {
query.bind("end_time", endTime);
query.bind("endTime", endTime);
}
if (offset != null && !offset.isEmpty()) {
@ -324,67 +305,4 @@ public class MetricDefinitionVerticaRepoImpl implements MetricDefinitionRepo {
}
}
private String createTimeInClause(
Handle dbHandle,
DateTime startTime,
DateTime endTime,
String tenantId,
String metricName,
Map<String, String> dimensions)
{
if (startTime == null) {
return "";
}
Set<String> defDimIdSet = new HashSet<>();
String namePart = "";
if (metricName != null && !metricName.isEmpty()) {
namePart = "AND def.name = :name ";
}
String defDimSql = String.format(
DEFDIM_IDS_SELECT,
this.dbHint,
namePart,
MetricQueries.buildDimensionAndClause(dimensions, "defDims"));
Query<Map<String, Object>> query = dbHandle.createQuery(defDimSql).bind("tenantId", tenantId);
MetricQueries.bindDimensionsToQuery(query, dimensions);
if (metricName != null && !metricName.isEmpty()) {
query.bind("name", metricName);
}
List<Map<String, Object>> rows = query.list();
for (Map<String, Object> row : rows) {
String defDimId = (String) row.get("id");
defDimIdSet.add(defDimId);
}
//
// If we didn't find any definition dimension ids,
// we won't add the time clause.
//
if (defDimIdSet.size() == 0) {
return "";
}
String timeAndClause = "";
if (endTime != null) {
timeAndClause = "AND time_stamp >= :start_time AND time_stamp <= :end_time ";
} else {
timeAndClause = "AND time_stamp >= :start_time ";
}
String defDimInClause = MetricQueries.createDefDimIdInClause(defDimIdSet);
return String.format(MEASUREMENT_AND_CLAUSE, defDimInClause, timeAndClause);
}
}

View File

@ -18,21 +18,25 @@ import com.google.common.base.Splitter;
import com.google.common.base.Strings;
import java.sql.Timestamp;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.joda.time.DateTime;
import org.skife.jdbi.v2.Handle;
import org.skife.jdbi.v2.Query;
import monasca.api.domain.exception.MultipleMetricsException;
import monasca.api.domain.model.measurement.Measurements;
/**
* Vertica utilities for building metric queries.
*/
final class MetricQueries {
static final Splitter BAR_SPLITTER = Splitter.on('|').omitEmptyStrings().trimResults();
static final char OFFSET_SEPARATOR = '_';
static final Splitter offsetSplitter = Splitter.on(OFFSET_SEPARATOR).omitEmptyStrings().trimResults();
private static final Splitter BAR_SPLITTER = Splitter.on('|').omitEmptyStrings().trimResults();
private static final char OFFSET_SEPARATOR = '_';
private static final Splitter offsetSplitter = Splitter.on(OFFSET_SEPARATOR).omitEmptyStrings().trimResults();
static final String FIND_METRIC_DEFS_SQL =
"SELECT %s TO_HEX(defDims.id) as defDimsId, def.name, dims.name as dName, dims.value AS dValue "
@ -41,23 +45,29 @@ final class MetricQueries {
// Outer join needed in case there are no dimensions for a definition.
+ "LEFT OUTER JOIN MonMetrics.Dimensions dims ON dims.dimension_set_id = defDims"
+ ".dimension_set_id "
+ "WHERE defDims.id in (%s) "
+ "WHERE TO_HEX(defDims.id) in (%s) "
+ "ORDER BY defDims.id ASC";
static final String METRIC_DEF_SUB_SQL =
"SELECT defDimsSub.id "
"SELECT TO_HEX(defDimsSub.id) as id "
+ "FROM MonMetrics.Definitions as defSub "
+ "JOIN MonMetrics.DefinitionDimensions as defDimsSub ON defDimsSub.definition_id = defSub.id "
+ "WHERE defSub.tenant_id = :tenantId "
+ "%s " // metric name here
+ "%s " // dimension and clause here
+ "%s " // time and clause here
+ "GROUP BY defDimsSub.id";
private static final String TABLE_TO_JOIN_DIMENSIONS_ON = "defDimsSub";
private static final String MEASUREMENT_AND_CLAUSE =
"SELECT definition_dimensions_id FROM MonMetrics.Measurements "
+ "WHERE time_stamp >= :startTime "; // start or start and end time here
private static final String TABLE_TO_JOIN_ON = "defDimsSub";
private MetricQueries() {}
static String buildMetricDefinitionSubSql(String name, Map<String, String> dimensions) {
static String buildMetricDefinitionSubSql(String name, Map<String, String> dimensions,
DateTime startTime, DateTime endTime) {
String namePart = "";
@ -68,7 +78,9 @@ final class MetricQueries {
return String.format(METRIC_DEF_SUB_SQL,
namePart,
buildDimensionAndClause(dimensions,
TABLE_TO_JOIN_DIMENSIONS_ON));
TABLE_TO_JOIN_ON),
buildTimeAndClause(startTime, endTime,
TABLE_TO_JOIN_ON));
}
static String buildDimensionAndClause(Map<String, String> dimensions,
@ -122,6 +134,30 @@ final class MetricQueries {
return sb.toString();
}
static String buildTimeAndClause(
DateTime startTime,
DateTime endTime,
String tableToJoin)
{
if (startTime == null) {
return "";
}
StringBuilder timeAndClause = new StringBuilder();
timeAndClause.append("AND ").append(tableToJoin).append(".id IN (");
timeAndClause.append(MEASUREMENT_AND_CLAUSE);
if (endTime != null) {
timeAndClause.append("AND time_stamp <= :endTime ");
}
timeAndClause.append(")");
return timeAndClause.toString();
}
static void bindDimensionsToQuery(Query<?> query, Map<String, String> dimensions) {
if (dimensions != null) {
int i = 0;
@ -143,28 +179,6 @@ final class MetricQueries {
}
}
static String createDefDimIdInClause(Set<String> defDimIdSet) {
StringBuilder sb = new StringBuilder("IN ");
sb.append("(");
boolean first = true;
for (String defDimId : defDimIdSet) {
if (first) {
first = false;
} else {
sb.append(",");
}
sb.append("'").append(defDimId).append("'");
}
sb.append(") ");
return sb.toString();
}
static void bindOffsetToQuery(Query<Map<String, Object>> query, String offset) {
List<String> offsets = offsetSplitter.splitToList(offset);
if (offsets.size() > 1) {
@ -176,4 +190,95 @@ final class MetricQueries {
new Timestamp(DateTime.parse(offsets.get(0)).getMillis()));
}
}
static void checkForMultipleDefinitions(Handle h, String tenantId, String name, Map<String, String> dimensions)
throws MultipleMetricsException {
String namePart = "";
if (name != null && !name.isEmpty()) {
namePart = "AND name = :name ";
}
String sql = String.format(METRIC_DEF_SUB_SQL,
namePart,
buildDimensionAndClause(dimensions,
TABLE_TO_JOIN_ON),
"") + " limit 2";
Query<Map<String, Object>> query = h.createQuery(sql);
query.bind("tenantId", tenantId);
if (name != null) {
query.bind("name", name);
}
bindDimensionsToQuery(query, dimensions);
List<Map<String, Object>> rows = query.list();
if (rows.size() > 1) {
throw new MultipleMetricsException(name, dimensions);
}
}
static void addDefsToResults(Map<String, ? extends Measurements> results, Handle h, String dbHint) {
StringBuilder sb = new StringBuilder();
boolean first = true;
for (String id : results.keySet()) {
if (first) {
sb.append("'").append(id).append("'");
first = false;
} else {
sb.append(',').append("'").append(id).append("'");
}
}
String defDimSql = String.format(MetricQueries.FIND_METRIC_DEFS_SQL,
dbHint,
sb.toString());
Query<Map<String, Object>> query = h.createQuery(defDimSql);
List<Map<String, Object>> rows = query.list();
String currentDefDimId = null;
Map<String, String> dims = null;
for (Map<String, Object> row : rows) {
String defDimId = (String) row.get("defDimsId");
String defName = (String) row.get("name");
String dimName = (String) row.get("dName");
String dimValue = (String) row.get("dValue");
if (defDimId != null && !defDimId.equals(currentDefDimId)) {
currentDefDimId = defDimId;
dims = new HashMap<>();
if (dimName != null && dimValue != null)
dims.put(dimName, dimValue);
results.get(defDimId).setId(defDimId);
results.get(defDimId).setName(defName);
results.get(defDimId).setDimensions(dims);
} else {
if (dimName != null && dimValue != null)
dims.put(dimName, dimValue);
}
}
}
}

View File

@ -33,7 +33,6 @@ import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.inject.Inject;
import javax.inject.Named;
@ -71,41 +70,38 @@ public class StatisticVerticaRepoImpl implements StatisticRepo {
Boolean mergeMetricsFlag,
String groupBy) throws MultipleMetricsException {
List<Statistics> statisticsList = new ArrayList<>();
Map<String, Statistics> statisticsMap = new HashMap<>();
// Sort the column names so that they match the order of the statistics in the results.
List<String> statisticsColumns = createColumnsList(statisticsCols);
try (Handle h = db.open()) {
Map<String, Statistics> byteMap = findDefIds(h, tenantId, name, dimensions);
if (!"*".equals(groupBy) && !Boolean.TRUE.equals(mergeMetricsFlag)) {
if (byteMap.isEmpty()) {
return statisticsList;
MetricQueries.checkForMultipleDefinitions(h, tenantId, name, dimensions);
}
if (!"*".equals(groupBy) && !Boolean.TRUE.equals(mergeMetricsFlag) && byteMap.keySet().size() > 1) {
throw new MultipleMetricsException(name, dimensions);
}
List<List<Object>> statisticsListList = new ArrayList<>();
String sql = createQuery(byteMap.keySet(), period, startTime, endTime, offset, statisticsCols,
groupBy, mergeMetricsFlag);
String sql = createQuery(name, dimensions, period, startTime, endTime, offset,
statisticsCols, mergeMetricsFlag);
logger.debug("vertica sql: {}", sql);
Query<Map<String, Object>>
query =
h.createQuery(sql)
.bind("tenantId", tenantId)
.bind("start_time", startTime)
.bind("end_time", endTime)
.bind("limit", limit + 1);
if (name != null && !name.isEmpty()) {
query.bind("name", name);
}
MetricQueries.bindDimensionsToQuery(query, dimensions);
if (offset != null && !offset.isEmpty()) {
logger.debug("binding offset: {}", offset);
@ -114,60 +110,70 @@ public class StatisticVerticaRepoImpl implements StatisticRepo {
List<Map<String, Object>> rows = query.list();
if (rows.size() == 0) {
return new ArrayList<>();
}
if ("*".equals(groupBy)) {
String currentDefId = null;
for (Map<String, Object> row : rows) {
List<Object> statisticsRow = parseRow(row);
String defDimsId = (String) row.get("id");
byteMap.get(defDimsId).addStatistics(statisticsRow);
if (defDimsId != null && !defDimsId.equals(currentDefId)) {
Statistics newStats = new Statistics();
newStats.setColumns(statisticsColumns);
}
for (Map.Entry<String, Statistics> entry : byteMap.entrySet()) {
Statistics statistics = entry.getValue();
statistics.setColumns(statisticsColumns);
if (statistics.getStatistics().size() > 0) {
statisticsList.add(statistics);
statisticsMap.put(defDimsId, newStats);
currentDefId = defDimsId;
}
statisticsMap.get(defDimsId).addMeasurement(statisticsRow);
}
MetricQueries.addDefsToResults(statisticsMap, h, this.dbHint);
} else {
Statistics statistics = new Statistics();
statistics.setId("");
statistics.setName(name);
statistics.setColumns(statisticsColumns);
String firstDefId = (String) rows.get(0).get("id");
for (Map<String, Object> row : rows) {
List<Object> statisticsRow = parseRow(row);
statisticsListList.add(statisticsRow);
statistics.addMeasurement(statisticsRow);
}
// Just use the first entry in the byteMap to get the def name and dimensions.
Statistics statistics = byteMap.entrySet().iterator().next().getValue();
statistics.setColumns(statisticsColumns);
if (Boolean.TRUE.equals(mergeMetricsFlag) && byteMap.keySet().size() > 1) {
// Wipe out the dimensions.
statistics.setDimensions(new HashMap<String, String>());
statisticsMap.put(firstDefId, statistics);
if (!Boolean.TRUE.equals(mergeMetricsFlag)) {
statistics.setId(firstDefId);
MetricQueries.addDefsToResults(statisticsMap, h, this.dbHint);
} else {
if (dimensions == null) {
dimensions = new HashMap<>();
}
statistics.setDimensions(dimensions);
}
statistics.setStatistics(statisticsListList);
statisticsList.add(statistics);
}
}
return statisticsList;
return new ArrayList<>(statisticsMap.values());
}
private List<Object> parseRow(Map<String, Object> row) {
@ -208,77 +214,6 @@ public class StatisticVerticaRepoImpl implements StatisticRepo {
return statisticsRow;
}
private Map<String, Statistics> findDefIds(
Handle h,
String tenantId,
String name,
Map<String, String> dimensions) {
String sql = String.format(
MetricQueries.FIND_METRIC_DEFS_SQL,
this.dbHint,
MetricQueries.buildMetricDefinitionSubSql(name, dimensions));
Query<Map<String, Object>> query =
h.createQuery(sql)
.bind("tenantId", tenantId);
if (name != null && !name.isEmpty()) {
logger.debug("binding name: {}", name);
query.bind("name", name);
}
MetricQueries.bindDimensionsToQuery(query, dimensions);
List<Map<String, Object>> rows = query.list();
Map<String, Statistics> byteIdMap = new HashMap<>();
String currentDefDimId = null;
Map<String, String> dims = null;
for (Map<String, Object> row : rows) {
String defDimId = (String) row.get("defDimsId");
String defName = (String) row.get("name");
String dimName = (String) row.get("dName");
String dimValue = (String) row.get("dValue");
if (defDimId == null || !defDimId.equals(currentDefDimId)) {
currentDefDimId = defDimId;
dims = new HashMap<>();
dims.put(dimName, dimValue);
Statistics statistics = new Statistics();
statistics.setId(defDimId);
statistics.setName(defName);
statistics.setDimensions(dims);
byteIdMap.put(currentDefDimId, statistics);
} else {
dims.put(dimName, dimValue);
}
}
return byteIdMap;
}
List<String> createColumnsList(
List<String> list) {
@ -293,21 +228,19 @@ public class StatisticVerticaRepoImpl implements StatisticRepo {
}
private String createQuery(
Set<String> defDimIdSet,
String name,
Map<String, String> dimensions,
int period,
DateTime startTime,
DateTime endTime,
String offset,
List<String> statistics,
String groupBy,
Boolean mergeMetricsFlag) {
StringBuilder sb = new StringBuilder();
sb.append("SELECT " + this.dbHint + " ");
if (groupBy != null && !groupBy.isEmpty()) {
sb.append(" to_hex(definition_dimensions_id) AS id, ");
}
sb.append("SELECT ").append(this.dbHint).append(" ");
sb.append(" max(to_hex(definition_dimensions_id)) AS id, ");
sb.append(createColumnsStr(statistics));
if (period >= 1) {
@ -316,18 +249,19 @@ public class StatisticVerticaRepoImpl implements StatisticRepo {
}
sb.append(" FROM MonMetrics.Measurements ");
String inClause = MetricQueries.createDefDimIdInClause(defDimIdSet);
sb.append("WHERE to_hex(definition_dimensions_id) ").append(inClause);
sb.append("WHERE TO_HEX(definition_dimensions_id) IN (")
.append(MetricQueries.buildMetricDefinitionSubSql(name, dimensions, null, null))
.append(") ");
sb.append(createWhereClause(startTime, endTime, offset, mergeMetricsFlag));
if (period >= 1) {
sb.append(" group by ");
if (groupBy != null && !groupBy.isEmpty()) {
sb.append("definition_dimensions_id,");
if (Boolean.FALSE.equals(mergeMetricsFlag)) {
sb.append("definition_dimensions_id, ");
}
sb.append("time_interval ");
sb.append(" order by ");
if (groupBy != null && !groupBy.isEmpty()) {
if (Boolean.FALSE.equals(mergeMetricsFlag)) {
sb.append("to_hex(definition_dimensions_id),");
}
sb.append("time_interval ");

View File

@ -27,7 +27,6 @@ import monasca.api.ApiConfig;
import monasca.api.domain.model.alarm.AlarmCount;
import monasca.api.domain.model.common.Paged;
import monasca.api.domain.model.measurement.Measurements;
import monasca.api.domain.model.statistic.Statistics;
import monasca.common.model.domain.common.AbstractEntity;
import monasca.api.domain.model.common.Link;
import monasca.api.domain.model.common.Linked;
@ -242,7 +241,7 @@ public final class Links {
}
public static Object paginateMeasurements(int limit, List<Measurements> elements, UriInfo uriInfo)
public static Object paginateMeasurements(int limit, List<? extends Measurements> elements, UriInfo uriInfo)
throws UnsupportedEncodingException {
// Check for paging turned off. Happens if maxQueryLimit is not set or is set to zero.
@ -262,78 +261,11 @@ public final class Links {
for (int i = 0; i < elements.size(); i++) {
Measurements m = elements.get(i);
if (m != null) {
List<Object[]> l = m.getMeasurements();
if (l.size() >= remaining_limit) {
String offset = m.getId();
if (offset != null) {
offset += '_' + (String) l.get(remaining_limit - 1)[0];
} else {
offset = (String) l.get(remaining_limit - 1)[0];
}
paged.links.add(getNextLink(offset, uriInfo));
// Truncate the measurement list. Normally this will just truncate one extra element.
l = l.subList(0, remaining_limit);
m.setMeasurements(l);
// Truncate the elements list
elements = elements.subList(0, i + 1);
} else {
remaining_limit -= l.size();
}
paged.elements = elements;
} else {
paged.elements = new ArrayList<>();
}
}
} else {
paged.elements = new ArrayList<>();
}
return paged;
}
public static Object paginateStatistics(int limit, List<Statistics> elements, UriInfo uriInfo)
throws UnsupportedEncodingException {
// Check for paging turned off. Happens if maxQueryLimit is not set or is set to zero.
if (limit == 0) {
Paged paged = new Paged();
paged.elements = elements != null ? elements : new ArrayList<>();
return paged;
}
Paged paged = new Paged();
paged.links.add(getSelfLink(uriInfo));
if (elements != null && !elements.isEmpty()) {
int remaining_limit = limit;
for (int i = 0; i < elements.size(); i++) {
Statistics s = elements.get(i);
Measurements s = elements.get(i);
if (s != null) {
List<List<Object>> l = s.getStatistics();
List<List<Object>> l = s.getMeasurements();
if (l.size() >= remaining_limit) {
@ -349,7 +281,7 @@ public final class Links {
// Truncate the measurement list. Normally this will just truncate one extra element.
l = l.subList(0, remaining_limit);
s.setStatistics(l);
s.setMeasurements(l);
// Truncate the elements list
elements = elements.subList(0, i + 1);
@ -376,6 +308,8 @@ public final class Links {
}
private static Link getSelfLink(UriInfo uriInfo) {
Link selfLink = new Link();

View File

@ -100,12 +100,12 @@ public class StatisticResource {
String queryTenantId = Validation.getQueryProject(roles, crossTenantId, tenantId, admin_role);
return Links.paginateStatistics(this.persistUtils.getLimit(limit),
repo.find(queryTenantId, name, dimensions, startTime, endTime,
statistics, period, offset,
this.persistUtils.getLimit(limit),
mergeMetricsFlagBool, groupBy),
uriInfo);
return Links.paginateMeasurements(this.persistUtils.getLimit(limit),
repo.find(queryTenantId, name, dimensions, startTime, endTime,
statistics, period, offset,
this.persistUtils.getLimit(limit),
mergeMetricsFlagBool, groupBy),
uriInfo);
}
}