Refactor code to remove duplication.

Add unit tests.
This commit is contained in:
Deklan Dieterly 2014-06-23 11:10:41 -06:00
parent 6f5898c58b
commit 68030d77d4
20 changed files with 587 additions and 556 deletions

View File

@ -186,6 +186,13 @@
<version>${dropwizard.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId>
<version>1.9.5</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>

View File

@ -25,7 +25,6 @@ import com.hpcloud.mon.domain.exception.EntityNotFoundException;
import com.hpcloud.mon.domain.model.alarm.Alarm;
import com.hpcloud.mon.domain.model.alarm.AlarmRepository;
import com.hpcloud.persistence.BeanMapper;
import org.skife.jdbi.v2.DBI;
import org.skife.jdbi.v2.Handle;
import org.skife.jdbi.v2.Query;
@ -33,7 +32,6 @@ import org.skife.jdbi.v2.util.StringMapper;
import javax.inject.Inject;
import javax.inject.Named;
import java.util.*;
/**
@ -118,7 +116,7 @@ public class AlarmRepositoryImpl implements AlarmRepository {
sbWhere.append(" and alarm.state = :state");
}
String sql = String.format(query, SubAlarmQueries.buildJoinClauseFor(dimensions), sbWhere);
String sql = String.format(query, Utils.SubAlarmQueries.buildJoinClauseFor(dimensions), sbWhere);
Query<?> q = h.createQuery(sql).bind("tenantId", tenantId);
if (state != null) {
@ -126,7 +124,7 @@ public class AlarmRepositoryImpl implements AlarmRepository {
}
q = q.map(new BeanMapper<Alarm>(Alarm.class));
DimensionQueries.bindDimensionsToQuery(q, dimensions);
Utils.DimensionQueries.bindDimensionsToQuery(q, dimensions);
List<Alarm> alarms = (List<Alarm>) q.list();

View File

@ -40,123 +40,116 @@ import java.util.concurrent.TimeUnit;
public class AlarmStateHistoryInfluxDBRepositoryImpl implements AlarmStateHistoryRepository {
private static final Logger logger = LoggerFactory.getLogger(AlarmStateHistoryInfluxDBRepositoryImpl.class);
private static final Logger logger = LoggerFactory.getLogger
(AlarmStateHistoryInfluxDBRepositoryImpl.class);
private final MonApiConfiguration config;
private final InfluxDB influxDB;
private final DBI mysql;
private final MonApiConfiguration config;
private final InfluxDB influxDB;
private final DBI mysql;
private static final String FIND_ALARMS_SQL = "select distinct a.id from alarm as a "
+ "join sub_alarm sa on a.id = sa.alarm_id "
+ "left outer join sub_alarm_dimension dim on sa.id = dim.sub_alarm_id%s "
+ "where a.tenant_id = :tenantId and a.deleted_at is NULL";
private static final String FIND_ALARMS_SQL = "select distinct a.id from alarm as a " + "join" +
" sub_alarm sa on a.id = sa.alarm_id " + "left outer join sub_alarm_dimension dim on " +
"sa.id = dim.sub_alarm_id%s " + "where a.tenant_id = :tenantId and a.deleted_at is " +
"NULL";
@Inject
public AlarmStateHistoryInfluxDBRepositoryImpl(@Named("mysql") DBI mysql, MonApiConfiguration config) {
this.mysql = mysql;
this.config = config;
@Inject
public AlarmStateHistoryInfluxDBRepositoryImpl(@Named("mysql") DBI mysql,
MonApiConfiguration config) {
this.mysql = mysql;
this.config = config;
this.influxDB = InfluxDBFactory.connect(this.config.influxDB.getUrl(), this.config.influxDB.getUser(),
this.config.influxDB.getPassword());
this.influxDB = InfluxDBFactory.connect(this.config.influxDB.getUrl(),
this.config.influxDB.getUser(), this.config.influxDB.getPassword());
}
@Override
public List<AlarmStateHistory> findById(String tenantId, String alarmId) throws Exception {
// InfluxDB orders queries by time stamp desc by default.
String query = String.format("select alarm_id, old_state, new_state, reason, reason_data " +
"from alarm_state_history " +
"where tenant_id = '%1$s' and alarm_id = '%2$s'", Utils.SQLSanitizer.sanitize(tenantId),
Utils.SQLSanitizer.sanitize(alarmId));
return queryInfluxDBForAlarmStateHistory(query);
}
@Override
public Collection<AlarmStateHistory> find(String tenantId, Map<String, String> dimensions,
DateTime startTime, @Nullable DateTime endTime)
throws Exception {
List<String> alarmIds = null;
// Find alarm Ids for dimensions
try (Handle h = mysql.open()) {
String sql = String.format(FIND_ALARMS_SQL, Utils.SubAlarmQueries.buildJoinClauseFor(dimensions));
Query<Map<String, Object>> query = h.createQuery(sql).bind("tenantId", tenantId);
Utils.DimensionQueries.bindDimensionsToQuery(query, dimensions);
alarmIds = query.map(StringMapper.FIRST).list();
}
@Override
public List<AlarmStateHistory> findById(String tenantId, String alarmId) throws Exception {
// InfluxDB orders queries by time stamp desc by default.
String query = String.format("select alarm_id, old_state, new_state, reason, reason_data " +
"from alarm_state_history " +
"where tenant_id = '%1$s' and alarm_id = '%2$s'", SQLSanitizer.sanitize(tenantId), SQLSanitizer.sanitize(alarmId));
return queryInfluxDBForAlarmStateHistory(query);
if (alarmIds == null || alarmIds.isEmpty()) {
return Collections.emptyList();
}
@Override
public Collection<AlarmStateHistory> find(String tenantId, Map<String, String> dimensions, DateTime startTime, @Nullable DateTime endTime) throws Exception {
String timePart = Utils.WhereClauseBuilder.buildTimePart(startTime, endTime);
String alarmsPart = buildAlarmsPart(alarmIds);
List<String> alarmIds = null;
// Find alarm Ids for dimensions
try (Handle h = mysql.open()) {
String sql = String.format(FIND_ALARMS_SQL, SubAlarmQueries.buildJoinClauseFor(dimensions));
Query<Map<String, Object>> query = h.createQuery(sql).bind("tenantId", tenantId);
DimensionQueries.bindDimensionsToQuery(query, dimensions);
alarmIds = query.map(StringMapper.FIRST).list();
}
String query = String.format("select alarm_id, old_state, new_state, reason, reason_data " +
"from alarm_state_history " +
"where tenant_id = '%1$s' %2$s %3$s", Utils.SQLSanitizer.sanitize(tenantId), timePart,
alarmsPart);
if (alarmIds == null || alarmIds.isEmpty()) {
return Collections.emptyList();
}
return queryInfluxDBForAlarmStateHistory(query);
String timePart = buildTimePart(startTime, endTime);
String alarmsPart = buildAlarmsPart(alarmIds);
}
String query = String.format("select alarm_id, old_state, new_state, reason, reason_data " +
"from alarm_state_history " +
"where tenant_id = '%1$s' %2$s %3$s", SQLSanitizer.sanitize(tenantId), timePart, alarmsPart);
return queryInfluxDBForAlarmStateHistory(query);
private String buildAlarmsPart(List<String> alarmIds) {
StringBuilder sb = new StringBuilder();
for (String alarmId : alarmIds) {
if (sb.length() > 0) {
sb.append(" or ");
}
sb.append(String.format(" alarm_id = '%1$s' ", alarmId));
}
private String buildAlarmsPart(List<String> alarmIds) {
if (sb.length() > 0) {
sb.insert(0, " and (");
sb.insert(sb.length(), ")");
}
return sb.toString();
}
StringBuilder sb = new StringBuilder();
for (String alarmId : alarmIds) {
if (sb.length() > 0) {
sb.append(" or ");
}
sb.append(String.format(" alarm_id = '%1$s' ", alarmId));
}
private List<AlarmStateHistory> queryInfluxDBForAlarmStateHistory(String query) {
if (sb.length() > 0) {
sb.insert(0, " and (");
sb.insert(sb.length(), ")");
}
return sb.toString();
logger.debug("Query string: {}", query);
List<Serie> result = this.influxDB.Query(this.config.influxDB.getName(), query,
TimeUnit.SECONDS);
List<AlarmStateHistory> alarmStateHistoryList = new LinkedList<>();
// Should only be one serie -- alarm_state_history.
for (Serie serie : result) {
Object[][] valObjArryArry = serie.getPoints();
for (int i = 0; i < valObjArryArry.length; i++) {
AlarmStateHistory alarmStateHistory = new AlarmStateHistory();
// Time is always in position 0.
alarmStateHistory.setTimestamp(new DateTime(new Long((Integer) valObjArryArry[i][0]) *
1000, DateTimeZone.UTC));
// Sequence_number is always in position 1.
alarmStateHistory.setAlarmId((String) valObjArryArry[i][2]);
alarmStateHistory.setNewState(AlarmState.valueOf((String) valObjArryArry[i][3]));
alarmStateHistory.setOldState(AlarmState.valueOf((String) valObjArryArry[i][4]));
alarmStateHistory.setReason((String) valObjArryArry[i][5]);
alarmStateHistory.setReasonData((String) valObjArryArry[i][6]);
alarmStateHistoryList.add(alarmStateHistory);
}
}
private String buildTimePart(DateTime startTime, DateTime endTime) {
String s = "";
if (startTime != null) {
s += String.format(" and time > %1$ds", startTime.getMillis() / 1000);
}
if (endTime != null) {
s += String.format(" and time < %1$ds", endTime.getMillis() / 1000);
}
return s;
}
private List<AlarmStateHistory> queryInfluxDBForAlarmStateHistory(String query) {
logger.debug("Query string: {}", query);
List<Serie> result = this.influxDB.Query(this.config.influxDB.getName(), query, TimeUnit.SECONDS);
List<AlarmStateHistory> alarmStateHistoryList = new LinkedList<>();
// Should only be one serie -- alarm_state_history.
for (Serie serie : result) {
Object[][] valObjArryArry = serie.getPoints();
for (int i = 0; i < valObjArryArry.length; i++) {
AlarmStateHistory alarmStateHistory = new AlarmStateHistory();
// Time is always in position 0.
alarmStateHistory.setTimestamp(new DateTime(new Long((Integer) valObjArryArry[i][0]) * 1000, DateTimeZone.UTC));
// Sequence_number is always in position 1.
alarmStateHistory.setAlarmId((String) valObjArryArry[i][2]);
alarmStateHistory.setNewState(AlarmState.valueOf((String) valObjArryArry[i][3]));
alarmStateHistory.setOldState(AlarmState.valueOf((String) valObjArryArry[i][4]));
alarmStateHistory.setReason((String) valObjArryArry[i][5]);
alarmStateHistory.setReasonData((String) valObjArryArry[i][6]);
alarmStateHistoryList.add(alarmStateHistory);
}
}
return alarmStateHistoryList;
}
return alarmStateHistoryList;
}
}

View File

@ -77,9 +77,9 @@ public class AlarmStateHistoryVerticaRepositoryImpl implements AlarmStateHistory
// Find alarm Ids for dimensions
try (Handle h = mysql.open()) {
String sql = String.format(FIND_ALARMS_SQL, SubAlarmQueries.buildJoinClauseFor(dimensions));
String sql = String.format(FIND_ALARMS_SQL, Utils.SubAlarmQueries.buildJoinClauseFor(dimensions));
Query<Map<String, Object>> query = h.createQuery(sql).bind("tenantId", tenantId);
DimensionQueries.bindDimensionsToQuery(query, dimensions);
Utils.DimensionQueries.bindDimensionsToQuery(query, dimensions);
alarmIds = query.map(StringMapper.FIRST).list();
}
@ -109,7 +109,7 @@ public class AlarmStateHistoryVerticaRepositoryImpl implements AlarmStateHistory
query.bind("startTime", new Timestamp(startTime.getMillis()));
if (endTime != null)
query.bind("endTime", new Timestamp(endTime.getMillis()));
DimensionQueries.bindDimensionsToQuery(query, dimensions);
Utils.DimensionQueries.bindDimensionsToQuery(query, dimensions);
return query.map(new BeanMapper<>(AlarmStateHistory.class)).list();
}
}

View File

@ -1,41 +0,0 @@
/*
* Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.persistence;
import org.skife.jdbi.v2.Query;
import java.util.Iterator;
import java.util.Map;
/**
* Utilities for querying dimensions.
*/
public final class DimensionQueries {
private DimensionQueries() {
}
static void bindDimensionsToQuery(Query<?> query, Map<String, String> dimensions) {
if (dimensions != null) {
int i = 0;
for (Iterator<Map.Entry<String, String>> it = dimensions.entrySet().iterator(); it.hasNext(); i++) {
Map.Entry<String, String> entry = it.next();
query.bind("dname" + i, entry.getKey());
query.bind("dvalue" + i, entry.getValue());
}
}
}
}

View File

@ -35,80 +35,62 @@ import java.util.concurrent.TimeUnit;
public class MeasurementInfluxDBRepositoryImpl implements MeasurementRepository {
private static final Logger logger = LoggerFactory.getLogger(MeasurementInfluxDBRepositoryImpl.class);
private static final Logger logger = LoggerFactory.getLogger(MeasurementInfluxDBRepositoryImpl
.class);
private final MonApiConfiguration config;
private final InfluxDB influxDB;
private final MonApiConfiguration config;
private final InfluxDB influxDB;
public static final DateTimeFormatter DATETIME_FORMATTER = ISODateTimeFormat.dateTimeNoMillis();
public static final DateTimeFormatter DATETIME_FORMATTER = ISODateTimeFormat.dateTimeNoMillis();
@Inject
public MeasurementInfluxDBRepositoryImpl(MonApiConfiguration config) {
this.config = config;
@Inject
public MeasurementInfluxDBRepositoryImpl(MonApiConfiguration config) {
this.config = config;
this.influxDB = InfluxDBFactory.connect(this.config.influxDB.getUrl(), this.config.influxDB.getUser(),
this.config.influxDB.getPassword());
this.influxDB = InfluxDBFactory.connect(this.config.influxDB.getUrl(),
this.config.influxDB.getUser(), this.config.influxDB.getPassword());
}
@Override
public Collection<Measurements> find(String tenantId, String name, Map<String,
String> dimensions, DateTime startTime, @Nullable DateTime endTime) throws Exception {
String dimsPart = Utils.WhereClauseBuilder.buildDimsPart(dimensions);
String timePart = Utils.WhereClauseBuilder.buildTimePart(startTime, endTime);
String query = String.format("select value " +
"from %1$s " +
"where tenant_id = '%2$s' %3$s %4$s", Utils.SQLSanitizer.sanitize(name),
Utils.SQLSanitizer.sanitize(tenantId), timePart, dimsPart);
logger.debug("Query string: {}", query);
List<Serie> result = this.influxDB.Query(this.config.influxDB.getName(), query,
TimeUnit.MILLISECONDS);
Measurements measurements = new Measurements();
measurements.setName(name);
measurements.setDimensions(dimensions);
List<Object[]> valObjArryList = new LinkedList<>();
for (Serie serie : result) {
Object[][] valObjArry = serie.getPoints();
for (int i = 0; i < valObjArry.length; i++) {
Object[] objArry = new Object[3];
// sequence_number
objArry[0] = valObjArry[i][1];
// time
objArry[1] = DATETIME_FORMATTER.print((long) valObjArry[i][0]);
// value
objArry[2] = valObjArry[i][2];
valObjArryList.add(objArry);
}
}
@Override
public Collection<Measurements> find(String tenantId, String name, Map<String, String> dimensions, DateTime startTime, @Nullable DateTime endTime) throws Exception {
measurements.setMeasurements(valObjArryList);
String dimWhereClause = "";
if (dimensions != null) {
for (String colName : dimensions.keySet()) {
dimWhereClause += String.format(" and %1$s = '%2$s'", SQLSanitizer.sanitize(colName), SQLSanitizer.sanitize(dimensions.get(colName)));
return Arrays.asList(measurements);
}
}
}
String timePart = buildTimePart(startTime, endTime);
String query = String.format("select value " +
"from %1$s " +
"where tenant_id = '%2$s' %3$s %4$s",
SQLSanitizer.sanitize(name), SQLSanitizer.sanitize(tenantId), timePart, dimWhereClause);
logger.debug("Query string: {}", query);
List<Serie> result = this.influxDB.Query(this.config.influxDB.getName(), query, TimeUnit.MILLISECONDS);
Measurements measurements = new Measurements();
measurements.setName(name);
measurements.setDimensions(dimensions);
List<Object[]> valObjArryList = new LinkedList<>();
for (Serie serie : result) {
Object[][] valObjArry = serie.getPoints();
for (int i = 0; i < valObjArry.length; i++) {
Object[] objArry = new Object[3];
// sequence_number
objArry[0] = valObjArry[i][1];
// time
objArry[1] = DATETIME_FORMATTER.print((long) valObjArry[i][0]);
// value
objArry[2] = valObjArry[i][2];
valObjArryList.add(objArry);
}
}
measurements.setMeasurements(valObjArryList);
return Arrays.asList(measurements);
}
private String buildTimePart(DateTime startTime, DateTime endTime) {
String s = "";
if (startTime != null) {
s += String.format(" and time > %1$ds", startTime.getMillis() / 1000);
}
if (endTime != null) {
s += String.format(" and time < %1$ds", endTime.getMillis() / 1000);
}
return s;
}
}

View File

@ -61,7 +61,7 @@ public class MeasurementVerticaRepositoryImpl implements MeasurementRepository {
if (endTime != null)
sbWhere.append(" and m.time_stamp <= :endTime");
String sql = String.format(FIND_BY_METRIC_DEF_SQL,
MetricQueries.buildJoinClauseFor(dimensions), sbWhere);
Utils.MetricQueries.buildJoinClauseFor(dimensions), sbWhere);
// Build query
Query<Map<String, Object>> query = h.createQuery(sql)
@ -71,7 +71,7 @@ public class MeasurementVerticaRepositoryImpl implements MeasurementRepository {
query.bind("name", name);
if (endTime != null)
query.bind("endTime", new Timestamp(endTime.getMillis()));
DimensionQueries.bindDimensionsToQuery(query, dimensions);
Utils.DimensionQueries.bindDimensionsToQuery(query, dimensions);
// Execute query
List<Map<String, Object>> rows = query.list();
@ -89,7 +89,8 @@ public class MeasurementVerticaRepositoryImpl implements MeasurementRepository {
Measurements measurements = results.get(defId);
if (measurements == null) {
measurements = new Measurements(metricName, MetricQueries.dimensionsFor(h, dimSetIdBytes),
measurements = new Measurements(metricName, Utils.MetricQueries.dimensionsFor(h,
dimSetIdBytes),
new ArrayList<Object[]>());
results.put(defId, measurements);
}

View File

@ -33,60 +33,46 @@ import java.util.Map;
import java.util.concurrent.TimeUnit;
public class MetricDefinitionInfluxDBRepositoryImpl implements MetricDefinitionRepository {
private static final Logger logger = LoggerFactory.getLogger(AlarmStateHistoryInfluxDBRepositoryImpl.class);
private static final Logger logger = LoggerFactory.getLogger
(AlarmStateHistoryInfluxDBRepositoryImpl.class);
private final MonApiConfiguration config;
private final InfluxDB influxDB;
private final MonApiConfiguration config;
private final InfluxDB influxDB;
@Inject
public MetricDefinitionInfluxDBRepositoryImpl(MonApiConfiguration config) {
this.config = config;
this.influxDB = InfluxDBFactory.connect(this.config.influxDB.getUrl(), this.config.influxDB.getUser(),
this.config.influxDB.getPassword());
@Inject
public MetricDefinitionInfluxDBRepositoryImpl(MonApiConfiguration config) {
this.config = config;
this.influxDB = InfluxDBFactory.connect(this.config.influxDB.getUrl(),
this.config.influxDB.getUser(), this.config.influxDB.getPassword());
}
@Override
public List<MetricDefinition> find(String tenantId, String name, Map<String,
String> dimensions) throws Exception {
String dimsPart = Utils.WhereClauseBuilder.buildDimsPart(dimensions);
// name is not used in the query.
String query = String.format("select first(value) from /.*/ where tenant_id = '%1$s' %2$s",
Utils.SQLSanitizer.sanitize(tenantId), dimsPart);
logger.debug("Query string: {}", query);
List<Serie> result = this.influxDB.Query(this.config.influxDB.getName(), query,
TimeUnit.SECONDS);
List<MetricDefinition> metricDefinitionList = new ArrayList<>();
for (Serie serie : result) {
MetricDefinition metricDefinition = new MetricDefinition();
metricDefinition.name = serie.getName();
metricDefinition.setDimensions(dimensions == null ? new HashMap<String,
String>() : dimensions);
metricDefinitionList.add(metricDefinition);
}
@Override
public List<MetricDefinition> find(String tenantId, String name, Map<String, String> dimensions) throws Exception {
return metricDefinitionList;
}
String dimWhereClause = buildDimWherePart(dimensions);
// name is not used in the query.
String query = String.format("select first(value) from /.*/ where tenant_id = '%1$s' %2$s", SQLSanitizer.sanitize(tenantId), dimWhereClause);
logger.debug("Query string: {}", query);
List<Serie> result = this.influxDB.Query(this.config.influxDB.getName(), query, TimeUnit.SECONDS);
List<MetricDefinition> metricDefinitionList = new ArrayList<>();
for (Serie serie : result) {
MetricDefinition metricDefinition = new MetricDefinition();
metricDefinition.name = serie.getName();
metricDefinition.setDimensions(dimensions == null ? new HashMap<String, String>() : dimensions);
metricDefinitionList.add(metricDefinition);
}
return metricDefinitionList;
}
private String buildDimWherePart(Map<String, String> dimensions) throws Exception {
String dimWhereClause = "";
boolean first = true;
if (dimensions != null) {
for (String colName : dimensions.keySet()) {
if (first) {
first = false;
} else {
dimWhereClause += " and";
}
dimWhereClause += String.format(" %1$s = '%2$s'", SQLSanitizer.sanitize(colName), SQLSanitizer.sanitize(dimensions.get(colName)));
}
if (dimWhereClause.length() > 0) {
dimWhereClause = String.format(" and %1$s", dimWhereClause);
}
}
return dimWhereClause;
}
}

View File

@ -50,13 +50,13 @@ public class MetricDefinitionVerticaRepositoryImpl implements MetricDefinitionRe
if (name != null)
sbWhere.append(" and def.name = :name");
String sql = String.format(FIND_BY_METRIC_DEF_SQL,
MetricQueries.buildJoinClauseFor(dimensions), sbWhere);
Utils.MetricQueries.buildJoinClauseFor(dimensions), sbWhere);
// Build query
Query<Map<String, Object>> query = h.createQuery(sql).bind("tenantId", tenantId);
if (name != null)
query.bind("name", name);
DimensionQueries.bindDimensionsToQuery(query, dimensions);
Utils.DimensionQueries.bindDimensionsToQuery(query, dimensions);
// Execute query
List<Map<String, Object>> rows = query.list();

View File

@ -1,59 +0,0 @@
/*
* Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.persistence;
import java.util.Map;
import org.skife.jdbi.v2.Handle;
import com.hpcloud.persistence.SqlQueries;
/**
* Utilities for building metric queries.
*/
final class MetricQueries {
private MetricQueries() {
}
static String buildJoinClauseFor(Map<String, String> dimensions) {
StringBuilder sbJoin = null;
if (dimensions != null) {
sbJoin = new StringBuilder();
for (int i = 0; i < dimensions.size(); i++)
sbJoin.append(" inner join MonMetrics.Dimensions d")
.append(i)
.append(" on d")
.append(i)
.append(".name = :dname")
.append(i)
.append(" and d")
.append(i)
.append(".value = :dvalue")
.append(i)
.append(" and dd.dimension_set_id = d")
.append(i)
.append(".dimension_set_id");
}
return sbJoin == null ? "" : sbJoin.toString();
}
static Map<String, String> dimensionsFor(Handle handle, byte[] dimensionSetId) {
return SqlQueries.keyValuesFor(handle,
"select name, value from MonMetrics.Dimensions where dimension_set_id = ?", dimensionSetId);
}
}

View File

@ -1,35 +0,0 @@
/*
* Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.persistence;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class SQLSanitizer {
private static final Pattern p = Pattern.compile("^(\\w|-|\\.)+$");
static String sanitize(String taintedString) throws Exception {
Matcher m = p.matcher(taintedString);
if (!m.matches()) {
throw new Exception(String.format("Input from user contains non-word chars[ %1$s ]. Only word chars [a-zA-Z_0-9], dash [-], and dot [.] allowed. ", taintedString));
}
return taintedString;
}
}

View File

@ -38,122 +38,93 @@ import java.util.concurrent.TimeUnit;
public class StatisticInfluxDBRepositoryImpl implements StatisticRepository {
private static final Logger logger = LoggerFactory.getLogger(StatisticInfluxDBRepositoryImpl.class);
private static final Logger logger = LoggerFactory.getLogger(StatisticInfluxDBRepositoryImpl
.class);
private final MonApiConfiguration config;
private final InfluxDB influxDB;
private final MonApiConfiguration config;
private final InfluxDB influxDB;
public static final DateTimeFormatter DATETIME_FORMATTER = ISODateTimeFormat.dateTimeNoMillis();
public static final DateTimeFormatter DATETIME_FORMATTER = ISODateTimeFormat.dateTimeNoMillis();
@Inject
public StatisticInfluxDBRepositoryImpl(MonApiConfiguration config) {
this.config = config;
@Inject
public StatisticInfluxDBRepositoryImpl(MonApiConfiguration config) {
this.config = config;
this.influxDB = InfluxDBFactory.connect(this.config.influxDB.getUrl(), this.config.influxDB.getUser(),
this.config.influxDB.getPassword());
this.influxDB = InfluxDBFactory.connect(this.config.influxDB.getUrl(),
this.config.influxDB.getUser(), this.config.influxDB.getPassword());
}
@Override
public List<Statistics> find(String tenantId, String name, Map<String, String> dimensions,
DateTime startTime, @Nullable DateTime endTime,
List<String> statistics, int period) throws Exception {
String statsPart = buildStatsPart(statistics);
String timePart = Utils.WhereClauseBuilder.buildTimePart(startTime, endTime);
String dimsPart = Utils.WhereClauseBuilder.buildDimsPart(dimensions);
String periodPart = buildPeriodPart(period);
String query = String.format("select time %1$s from %2$s where tenant_id = '%3$s' %4$s %5$s " +
"%6$s", statsPart, Utils.SQLSanitizer.sanitize(name), Utils.SQLSanitizer.sanitize
(tenantId), timePart, dimsPart, periodPart);
logger.debug("Query string: {}", query);
List<Serie> result = this.influxDB.Query(this.config.influxDB.getName(), query,
TimeUnit.MILLISECONDS);
List<Statistics> statisticsList = new LinkedList<Statistics>();
// Should only be one serie -- name.
for (Serie serie : result) {
Statistics stat = new Statistics();
stat.setName(serie.getName());
List<String> colNamesList = new LinkedList<>(statistics);
colNamesList.add(0, "timestamp");
stat.setColumns(colNamesList);
stat.setDimensions(dimensions);
List<List<Object>> valObjArryArry = new LinkedList<List<Object>>();
stat.setStatistics(valObjArryArry);
Object[][] pointsArryArry = serie.getPoints();
for (int i = 0; i < pointsArryArry.length; i++) {
List<Object> valObjArry = new ArrayList<>();
// First column is always time.
valObjArry.add(DATETIME_FORMATTER.print((long) pointsArryArry[i][0]));
for (int j = 1; j < statistics.size() + 1; j++) {
valObjArry.add(pointsArryArry[i][j]);
}
valObjArryArry.add(valObjArry);
}
statisticsList.add(stat);
}
@Override
public List<Statistics> find(String tenantId, String name, Map<String, String> dimensions,
DateTime startTime, @Nullable DateTime endTime,
List<String> statistics, int period) throws Exception {
return statisticsList;
}
String statsPart = buildStatsPart(statistics);
String timePart = buildTimePart(startTime, endTime);
String dimsPart = buildDimPart(dimensions);
String periodPart = buildPeriodPart(period);
private String buildPeriodPart(int period) {
String query = String.format("select time %1$s from %2$s where tenant_id = '%3$s' %4$s %5$s %6$s",
statsPart, SQLSanitizer.sanitize(name), SQLSanitizer.sanitize(tenantId), timePart, dimsPart, periodPart);
logger.debug("Query string: {}", query);
List<Serie> result = this.influxDB.Query(this.config.influxDB.getName(), query, TimeUnit.MILLISECONDS);
List<Statistics> statisticsList = new LinkedList<Statistics>();
// Should only be one serie -- name.
for (Serie serie : result) {
Statistics stat = new Statistics();
stat.setName(serie.getName());
List<String> colNamesList = new LinkedList<>(statistics);
colNamesList.add(0, "timestamp");
stat.setColumns(colNamesList);
stat.setDimensions(dimensions);
List<List<Object>> valObjArryArry = new LinkedList<List<Object>>();
stat.setStatistics(valObjArryArry);
Object[][] pointsArryArry = serie.getPoints();
for (int i = 0; i < pointsArryArry.length; i++) {
List<Object> valObjArry = new ArrayList<>();
// First column is always time.
valObjArry.add(DATETIME_FORMATTER.print((long) pointsArryArry[i][0]));
for (int j = 1; j < statistics.size() + 1; j++) {
valObjArry.add(pointsArryArry[i][j]);
}
valObjArryArry.add(valObjArry);
}
statisticsList.add(stat);
}
return statisticsList;
String s = "";
if (period >= 1) {
s += String.format("group by time(%1$ds)", period);
}
private String buildPeriodPart(int period) {
return s;
}
String s = "";
if (period >= 1) {
s += String.format("group by time(%1$ds)", period);
}
private String buildStatsPart(List<String> statistics) {
return s;
String s = "";
for (String statistic : statistics) {
s += ",";
if (statistic.trim().toLowerCase().equals("avg")) {
s += " mean(value)";
} else {
s += " " + statistic + "(value)";
}
}
private String buildDimPart(Map<String, String> dims) throws Exception {
String s = "";
if (dims != null) {
for (String colName : dims.keySet()) {
if (s.length() > 0) {
s += " and";
}
s += String.format(" %1$s = '%2$s'", SQLSanitizer.sanitize(colName), SQLSanitizer.sanitize(dims.get(colName)));
}
if (s.length() > 0) {
s = " and " + s;
}
}
return s;
}
private String buildTimePart(DateTime startTime, DateTime endTime) {
String s = "";
if (startTime != null) {
s += String.format(" and time > %1$ds", startTime.getMillis() / 1000);
}
if (endTime != null) {
s += String.format(" and time < %1$ds", endTime.getMillis() / 1000);
}
return s;
}
private String buildStatsPart(List<String> statistics) {
String s = "";
for (String statistic : statistics) {
s += ",";
if (statistic.trim().toLowerCase().equals("avg")) {
s += " mean(value)";
} else {
s += " " + statistic + "(value)";
}
}
return s;
}
return s;
}
}

View File

@ -118,7 +118,7 @@ public class StatisticVerticaRepositoryImpl implements StatisticRepository {
sbWhere.append(" and def.name = :name");
String sql = String.format(FIND_BY_METRIC_DEF_SQL,
MetricQueries.buildJoinClauseFor(dimensions), sbWhere);
Utils.MetricQueries.buildJoinClauseFor(dimensions), sbWhere);
Query<Map<String, Object>> query = h.createQuery(sql)
.bind("tenantId", tenantId)
@ -132,7 +132,7 @@ public class StatisticVerticaRepositoryImpl implements StatisticRepository {
query.bind("endTime", new Timestamp(endTime.getMillis()));
}
DimensionQueries.bindDimensionsToQuery(query, dimensions);
Utils.DimensionQueries.bindDimensionsToQuery(query, dimensions);
// Execute
List<Map<String, Object>> rows = query.list();

View File

@ -1,51 +0,0 @@
/*
* Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.persistence;
import java.util.Map;
/**
* Utilities for building sub alarm queries.
*/
final class SubAlarmQueries {
private SubAlarmQueries() {
}
static String buildJoinClauseFor(Map<String, String> dimensions) {
StringBuilder sbJoin = null;
if (dimensions != null) {
sbJoin = new StringBuilder();
for (int i = 0; i < dimensions.size(); i++) {
sbJoin.append(" inner join sub_alarm_dimension d")
.append(i)
.append(" on d")
.append(i)
.append(".dimension_name = :dname")
.append(i)
.append(" and d")
.append(i)
.append(".value = :dvalue")
.append(i)
.append(" and dim.sub_alarm_id = d")
.append(i)
.append(".sub_alarm_id");
}
}
return sbJoin == null ? "" : sbJoin.toString();
}
}

View File

@ -0,0 +1,148 @@
package com.hpcloud.mon.infrastructure.persistence;
import com.hpcloud.persistence.SqlQueries;
import org.joda.time.DateTime;
import org.skife.jdbi.v2.Handle;
import org.skife.jdbi.v2.Query;
import java.util.Iterator;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
final class Utils {
private Utils() {
}
/**
* InfluxDB Utilities for protecting against SQL injection attacks.
*/
static class SQLSanitizer {
private SQLSanitizer() {
}
private static final Pattern p = Pattern.compile("^(\\w|-|\\.)+$");
static String sanitize(String taintedString) throws Exception {
Matcher m = p.matcher(taintedString);
if (!m.matches()) {
throw new Exception(String.format("Input from user contains non-word chars[ %1$s ]. Only " +
"" + "word chars [a-zA-Z_0-9], dash [-], and dot [.] allowed. ", taintedString));
}
return taintedString;
}
}
/**
* InfluxDB Utilities for building parts of where clauses.
*/
static class WhereClauseBuilder {
private WhereClauseBuilder() {
}
static String buildTimePart(DateTime startTime, DateTime endTime) {
StringBuilder sb = new StringBuilder();
if (startTime != null) {
sb.append(String.format(" and time > %1$ds", startTime.getMillis() / 1000));
}
if (endTime != null) {
sb.append(String.format(" and time < %1$ds", endTime.getMillis() / 1000));
}
return sb.toString();
}
static String buildDimsPart(Map<String, String> dims) throws Exception {
StringBuilder sb = new StringBuilder();
if (dims != null) {
for (String colName : dims.keySet()) {
sb.append(String.format(" and %1$s = '%2$s'", Utils.SQLSanitizer.sanitize(colName),
Utils.SQLSanitizer.sanitize(dims.get(colName))));
}
}
return sb.toString();
}
}
/**
* Vertica utilities for building metric queries.
*/
static final class MetricQueries {
private MetricQueries() {
}
static String buildJoinClauseFor(Map<String, String> dimensions) {
StringBuilder sbJoin = null;
if (dimensions != null) {
sbJoin = new StringBuilder();
for (int i = 0; i < dimensions.size(); i++)
sbJoin.append(" inner join MonMetrics.Dimensions d").append(i).append(" on d").append
(i).append(".name = :dname").append(i).append(" and d").append(i).append(".value = " +
":dvalue").append(i).append(" and dd.dimension_set_id = d").append(i).append("" +
".dimension_set_id");
}
return sbJoin == null ? "" : sbJoin.toString();
}
static Map<String, String> dimensionsFor(Handle handle, byte[] dimensionSetId) {
return SqlQueries.keyValuesFor(handle, "select name, value from MonMetrics.Dimensions " +
"where" + " dimension_set_id = ?", dimensionSetId);
}
}
/**
* Vertica Utilities for building sub alarm queries.
*/
static final class SubAlarmQueries {
private SubAlarmQueries() {
}
static String buildJoinClauseFor(Map<String, String> dimensions) {
StringBuilder sbJoin = null;
if (dimensions != null) {
sbJoin = new StringBuilder();
for (int i = 0; i < dimensions.size(); i++) {
sbJoin.append(" inner join sub_alarm_dimension d").append(i).append(" on d").append(i)
.append(".dimension_name = :dname").append(i).append(" and d").append(i).append("" +
".value = :dvalue").append(i).append(" and dim.sub_alarm_id = d").append(i).append
(".sub_alarm_id");
}
}
return sbJoin == null ? "" : sbJoin.toString();
}
}
/**
* Vertica Utilities for querying dimensions.
*
* This class has issues with testing with mockito because bind method on Query class
* is final.
*/
public static final class DimensionQueries {
private DimensionQueries() {
}
static void bindDimensionsToQuery(Query<?> query, Map<String, String> dimensions) {
if (dimensions != null) {
int i = 0;
for (Iterator<Map.Entry<String, String>> it = dimensions.entrySet().iterator(); it
.hasNext(); i++) {
Map.Entry<String, String> entry = it.next();
query.bind("dname" + i, entry.getKey());
query.bind("dvalue" + i, entry.getValue());
}
}
}
}
}

View File

@ -1,26 +1,5 @@
package com.hpcloud.mon.infrastructure.persistence;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.fail;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import org.skife.jdbi.v2.DBI;
import org.skife.jdbi.v2.Handle;
import org.skife.jdbi.v2.util.StringMapper;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import com.google.common.collect.ImmutableMap;
import com.google.common.io.Resources;
import com.hpcloud.mon.common.model.alarm.AggregateFunction;
@ -31,6 +10,18 @@ import com.hpcloud.mon.common.model.metric.MetricDefinition;
import com.hpcloud.mon.domain.exception.EntityNotFoundException;
import com.hpcloud.mon.domain.model.alarm.Alarm;
import com.hpcloud.mon.domain.model.alarm.AlarmRepository;
import org.skife.jdbi.v2.DBI;
import org.skife.jdbi.v2.Handle;
import org.skife.jdbi.v2.util.StringMapper;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.nio.charset.Charset;
import java.util.*;
import static org.testng.Assert.*;
@Test
public class AlarmRepositoryImplTest {
@ -115,7 +106,7 @@ public class AlarmRepositoryImplTest {
@Test(groups = "database")
public void shouldUpdate() {
db = new DBI("jdbc:mysql://localhost/mon", "root", "");
db = new DBI("jdbc:mysql://192.168.10.4/mon", "monapi", "password");
handle = db.open();
repo = new AlarmRepositoryImpl(db);
beforeMethod();
@ -162,7 +153,7 @@ public class AlarmRepositoryImplTest {
@Test(groups = "database")
public void shouldFindSubAlarmMetricDefinitions() {
db = new DBI("jdbc:mysql://localhost/mon", "root", "");
db = new DBI("jdbc:mysql://192.168.10.4/mon", "monapi", "password");
handle = db.open();
repo = new AlarmRepositoryImpl(db);
beforeMethod();
@ -189,7 +180,7 @@ public class AlarmRepositoryImplTest {
@Test(groups = "database")
public void shouldFindSubExpressions() {
db = new DBI("jdbc:mysql://localhost/mon", "root", "");
db = new DBI("jdbc:mysql://192.168.10.4/mon", "monapi", "password");
handle = db.open();
repo = new AlarmRepositoryImpl(db);
beforeMethod();

View File

@ -23,7 +23,7 @@ public class AlarmStateHistoryVerticaRepositoryImplTest {
@BeforeClass
protected void setupClass() throws Exception {
Class.forName("com.vertica.jdbc.Driver");
db = new DBI("jdbc:vertica://192.168.10.8/mon", "dbadmin", "password");
db = new DBI("jdbc:vertica://192.168.10.4/mon", "dbadmin", "password");
handle = db.open();
repo = new AlarmStateHistoryVerticaRepositoryImpl(null, db);
}
@ -38,19 +38,19 @@ public class AlarmStateHistoryVerticaRepositoryImplTest {
handle.execute("truncate table MonAlarms.StateHistory");
}
public void create(String tenantId, String alarmId, AlarmState oldState, AlarmState newState,
String reason, String reasonData, DateTime timestamp) {
private void create(String tenantId, String alarmId, AlarmState oldState, AlarmState newState,
String reason, String reasonData, DateTime timestamp) {
try (Handle h = db.open()) {
h.insert(
"insert into MonAlarms.StateHistory (tenant_id, alarm_id, old_state, new_state, reason, reason_data, time_stamp) values (?, ?, ?, ?, ?, ?, ?)",
tenantId, alarmId, oldState.name(), newState.name(), reason, reasonData, new Timestamp(
timestamp.getMillis()));
h.insert("insert into MonAlarms.StateHistory (tenant_id, alarm_id, old_state, new_state, " +
"reason, reason_data, time_stamp) values (?, ?, ?, ?, ?, ?, ?)", tenantId, alarmId,
oldState.name(), newState.name(), reason, reasonData, new Timestamp(timestamp.getMillis
()));
}
}
@Test
public void shouldCreateAndFind() throws Exception {
create("bob", "123", AlarmState.UNDETERMINED, AlarmState.ALARM, "foo", "bar",
new DateTime());
create("bob", "123", AlarmState.UNDETERMINED, AlarmState.ALARM, "foo", "bar", new DateTime());
assertEquals(repo.findById("bob", "123").size(), 1);
}
}

View File

@ -25,7 +25,7 @@ public class MeasurementVerticaRepositoryImplTest {
@BeforeClass
protected void setupClass() throws Exception {
Class.forName("com.vertica.jdbc.Driver");
db = new DBI("jdbc:vertica://192.168.10.8/mon", "dbadmin", "password");
db = new DBI("jdbc:vertica://192.168.10.4/mon", "dbadmin", "password");
handle = db.open();
repo = new MeasurementVerticaRepositoryImpl(db);
}

View File

@ -24,7 +24,7 @@ public class MetricDefinitionVerticaRepositoryImplTest {
@BeforeClass
protected void setupClass() throws Exception {
Class.forName("com.vertica.jdbc.Driver");
db = new DBI("jdbc:vertica://192.168.10.8/mon", "dbadmin", "password");
db = new DBI("jdbc:vertica://192.168.10.4/mon", "dbadmin", "password");
handle = db.open();
repo = new MetricDefinitionVerticaRepositoryImpl(db);
}

View File

@ -0,0 +1,140 @@
package com.hpcloud.mon.infrastructure.persistence;
import org.joda.time.DateTime;
import org.testng.annotations.Test;
import java.util.HashMap;
import java.util.Map;
public class UtilsTest {
@Test
public void SQLSanitizerSanitizeGoodDataTest() throws Exception {
String goodString = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789" + "-_.";
assert (goodString.equals(Utils.SQLSanitizer.sanitize(goodString)));
}
@Test(expectedExceptions = {Exception.class})
public void SQLSanitizerSanitizeBadDataTest1() throws Exception {
String badString = "';abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789" + "-_.";
assert (badString.equals(Utils.SQLSanitizer.sanitize(badString)));
}
@Test(expectedExceptions = {Exception.class})
public void SQLSanitizerSanitizeBadDataTest2() throws Exception {
String badStrng = "'a'bcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789" + "-_.";
assert (badStrng.equals(Utils.SQLSanitizer.sanitize(badStrng)));
}
@Test
public void whereClauseBuilderBuildTimePartTest() {
String expectedResult = " and time > 1388563261s and time < 1388563262s";
DateTime startTime = new DateTime(2014, 01, 01, 01, 01, 01);
DateTime endTime = new DateTime(2014, 01, 01, 01, 01, 02);
assert (expectedResult.equals(Utils.WhereClauseBuilder.buildTimePart(startTime, endTime)));
}
@Test
public void whereClauseBuilderBuildDimsPartTest1() throws Exception {
String expectedResult = "";
Map<String, String> dimsMap = new HashMap<>();
assert (expectedResult.equals(Utils.WhereClauseBuilder.buildDimsPart(dimsMap)));
}
@Test
public void whereClauseBuilderBuildDimsPartTest2() throws Exception {
String expectedResult = " and foo = 'bar'";
Map<String, String> dimsMap = new HashMap<>();
dimsMap.put("foo", "bar");
assert (expectedResult.equals(Utils.WhereClauseBuilder.buildDimsPart(dimsMap)));
}
@Test
public void whereClauseBuilderBuildDimsPartTest3() throws Exception {
String expectedResult = " and foo = 'bar' and biz = 'baz'";
Map<String, String> dimsMap = new HashMap<>();
dimsMap.put("foo", "bar");
dimsMap.put("biz", "baz");
assert (expectedResult.equals(Utils.WhereClauseBuilder.buildDimsPart(dimsMap)));
}
@Test
public void metricQueriesBuildJoinClauseForTest1() {
String expectedResult = " inner join MonMetrics.Dimensions d0 on d0.name = :dname0 and d0" +
".value " + "= :dvalue0 and dd.dimension_set_id = d0.dimension_set_id inner join " +
"MonMetrics.Dimensions d1 on d1.name = :dname1 and d1.value = :dvalue1 and dd" +
".dimension_set_id = d1.dimension_set_id";
Map<String, String> dimsMap = new HashMap<>();
dimsMap.put("foo", "bar");
dimsMap.put("biz", "baz");
assert (expectedResult.equals(Utils.MetricQueries.buildJoinClauseFor(dimsMap)));
}
@Test
public void metricQueriesBuildJoinClauseForTest2() {
String expectedResult = "";
Map<String, String> dimsMap = new HashMap<>();
assert (expectedResult.equals(Utils.MetricQueries.buildJoinClauseFor(dimsMap)));
}
@Test
public void metricQueriesBuildJoinClauseForTest3() {
String expectedResult = "";
Map<String, String> dimsMap = null;
assert (expectedResult.equals(Utils.MetricQueries.buildJoinClauseFor(dimsMap)));
}
@Test
public void metricQueriesSubAlarmQueriesTest1() {
String expectedResult = " inner join sub_alarm_dimension d0 on d0.dimension_name = :dname0 " +
"and d0.value = :dvalue0 and dim.sub_alarm_id = d0.sub_alarm_id inner join " +
"sub_alarm_dimension d1 on d1.dimension_name = :dname1 and d1.value = :dvalue1 and dim" +
".sub_alarm_id = d1.sub_alarm_id";
Map<String, String> dimsMap = new HashMap<>();
dimsMap.put("foo", "bar");
dimsMap.put("biz", "baz");
assert (expectedResult.equals(Utils.SubAlarmQueries.buildJoinClauseFor(dimsMap)));
}
@Test
public void metricQueriesSubAlarmQueriesTest2() {
String expectedResult = "";
Map<String, String> dimsMap = new HashMap<>();
assert (expectedResult.equals(Utils.SubAlarmQueries.buildJoinClauseFor(dimsMap)));
}
@Test
public void metricQueriesSubAlarmQueriesTest3() {
String expectedResult = "";
Map<String, String> dimsMap = null;
assert (expectedResult.equals(Utils.SubAlarmQueries.buildJoinClauseFor(dimsMap)));
}
}