This commit is contained in:
Derrick Johnson 2014-07-09 12:02:36 -07:00
commit 652b87918d
10 changed files with 1407 additions and 0 deletions

View File

@ -0,0 +1,166 @@
/*
* Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.hpcloud.mon.infrastructure.persistence.influxdb;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import javax.annotation.Nullable;
import javax.inject.Named;
import org.influxdb.InfluxDB;
import org.influxdb.dto.Serie;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.skife.jdbi.v2.DBI;
import org.skife.jdbi.v2.Handle;
import org.skife.jdbi.v2.Query;
import org.skife.jdbi.v2.util.StringMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.inject.Inject;
import com.hpcloud.mon.MonApiConfiguration;
import com.hpcloud.mon.common.model.alarm.AlarmState;
import com.hpcloud.mon.domain.model.alarmstatehistory.AlarmStateHistory;
import com.hpcloud.mon.domain.model.alarmstatehistory.AlarmStateHistoryRepository;
import com.hpcloud.mon.infrastructure.persistence.DimensionQueries;
import com.hpcloud.mon.infrastructure.persistence.SubAlarmQueries;
public class AlarmStateHistoryInfluxDbRepositoryImpl implements AlarmStateHistoryRepository {
private static final Logger logger = LoggerFactory
.getLogger(AlarmStateHistoryInfluxDbRepositoryImpl.class);
private final MonApiConfiguration config;
private final InfluxDB influxDB;
private final DBI mysql;
private static final String FIND_ALARMS_SQL = "select distinct a.id from alarm as a " + "join"
+ " sub_alarm sa on a.id = sa.alarm_id " + "left outer join sub_alarm_dimension dim on "
+ "sa.id = dim.sub_alarm_id%s " + "where a.tenant_id = :tenantId and a.deleted_at is "
+ "NULL";
@Inject
public AlarmStateHistoryInfluxDbRepositoryImpl(@Named("mysql") DBI mysql,
MonApiConfiguration config, InfluxDB influxDB) {
this.mysql = mysql;
this.config = config;
this.influxDB = influxDB;
}
@Override
public List<AlarmStateHistory> findById(String tenantId, String alarmId) throws Exception {
// InfluxDB orders queries by time stamp desc by default.
String query = buildQueryForFindById(tenantId, alarmId);
return queryInfluxDBForAlarmStateHistory(query);
}
String buildQueryForFindById(String tenantId, String alarmId) throws Exception {
return String.format("select alarm_id, old_state, new_state, reason, reason_data "
+ "from alarm_state_history " + "where tenant_id = '%1$s' and alarm_id = '%2$s'",
Utils.SQLSanitizer.sanitize(tenantId), Utils.SQLSanitizer.sanitize(alarmId));
}
@Override
public Collection<AlarmStateHistory> find(String tenantId, Map<String, String> dimensions,
DateTime startTime, @Nullable DateTime endTime) throws Exception {
List<String> alarmIds = null;
// Find alarm Ids for dimensions
try (Handle h = mysql.open()) {
String sql = String.format(FIND_ALARMS_SQL, SubAlarmQueries.buildJoinClauseFor(dimensions));
Query<Map<String, Object>> query = h.createQuery(sql).bind("tenantId", tenantId);
DimensionQueries.bindDimensionsToQuery(query, dimensions);
alarmIds = query.map(StringMapper.FIRST).list();
}
if (alarmIds == null || alarmIds.isEmpty()) {
return Collections.emptyList();
}
String timePart = buildTimePart(startTime, endTime);
String alarmsPart = buildAlarmsPart(alarmIds);
String query = buildQueryForFind(tenantId, timePart, alarmsPart);
return queryInfluxDBForAlarmStateHistory(query);
}
String buildTimePart(DateTime startTime, DateTime endTime) {
return Utils.WhereClauseBuilder.buildTimePart(startTime, endTime);
}
String buildQueryForFind(String tenantId, String timePart, String alarmsPart) throws Exception {
return String.format("select alarm_id, old_state, new_state, reason, reason_data "
+ "from alarm_state_history " + "where tenant_id = '%1$s' %2$s %3$s",
Utils.SQLSanitizer.sanitize(tenantId), timePart, alarmsPart);
}
String buildAlarmsPart(List<String> alarmIds) {
StringBuilder sb = new StringBuilder();
for (String alarmId : alarmIds) {
if (sb.length() > 0) {
sb.append(" or ");
}
sb.append(String.format(" alarm_id = '%1$s' ", alarmId));
}
if (sb.length() > 0) {
sb.insert(0, " and (");
sb.insert(sb.length(), ")");
}
return sb.toString();
}
private List<AlarmStateHistory> queryInfluxDBForAlarmStateHistory(String query) {
logger.debug("Query string: {}", query);
List<Serie> result =
this.influxDB.Query(this.config.influxDB.getName(), query, TimeUnit.MILLISECONDS);
List<AlarmStateHistory> alarmStateHistoryList = new LinkedList<>();
// Should only be one serie -- alarm_state_history.
for (Serie serie : result) {
Object[][] valObjArryArry = serie.getPoints();
for (int i = 0; i < valObjArryArry.length; i++) {
AlarmStateHistory alarmStateHistory = new AlarmStateHistory();
// Time is always in position 0.
Double timeDouble = (Double) valObjArryArry[i][0];
alarmStateHistory.setTimestamp(new DateTime(timeDouble.longValue(), DateTimeZone.UTC));
// Sequence_number is always in position 1.
alarmStateHistory.setAlarmId((String) valObjArryArry[i][2]);
alarmStateHistory.setNewState(AlarmState.valueOf((String) valObjArryArry[i][3]));
alarmStateHistory.setOldState(AlarmState.valueOf((String) valObjArryArry[i][4]));
alarmStateHistory.setReason((String) valObjArryArry[i][5]);
alarmStateHistory.setReasonData((String) valObjArryArry[i][6]);
alarmStateHistoryList.add(alarmStateHistory);
}
}
return alarmStateHistoryList;
}
}

View File

@ -0,0 +1,55 @@
/*
* Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.hpcloud.mon.infrastructure.persistence.influxdb;
import com.fasterxml.jackson.annotation.JsonProperty;
public class InfluxDbConfig {
@JsonProperty
String name;
public String getName() {
return name;
}
@JsonProperty
int replicationFactor;
public int getReplicationFactor() {
return replicationFactor;
}
@JsonProperty
String url;
public String getUrl() {
return url;
}
@JsonProperty
String user;
public String getUser() {
return user;
}
@JsonProperty
String password;
public String getPassword() {
return password;
}
}

View File

@ -0,0 +1,100 @@
/*
* Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.hpcloud.mon.infrastructure.persistence.influxdb;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import javax.annotation.Nullable;
import org.influxdb.InfluxDB;
import org.influxdb.dto.Serie;
import org.joda.time.DateTime;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.ISODateTimeFormat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.inject.Inject;
import com.hpcloud.mon.MonApiConfiguration;
import com.hpcloud.mon.domain.model.measurement.MeasurementRepository;
import com.hpcloud.mon.domain.model.measurement.Measurements;
public class MeasurementInfluxDbRepositoryImpl implements MeasurementRepository {
private static final Logger logger = LoggerFactory
.getLogger(MeasurementInfluxDbRepositoryImpl.class);
private final MonApiConfiguration config;
private final InfluxDB influxDB;
public static final DateTimeFormatter DATETIME_FORMATTER = ISODateTimeFormat.dateTimeNoMillis()
.withZoneUTC();
@Inject
public MeasurementInfluxDbRepositoryImpl(MonApiConfiguration config, InfluxDB influxDB) {
this.config = config;
this.influxDB = influxDB;
}
@Override
public Collection<Measurements> find(String tenantId, String name,
Map<String, String> dimensions, DateTime startTime, @Nullable DateTime endTime)
throws Exception {
String dimsPart = Utils.WhereClauseBuilder.buildDimsPart(dimensions);
String timePart = Utils.WhereClauseBuilder.buildTimePart(startTime, endTime);
String query =
String.format("select value " + "from %1$s " + "where tenant_id = '%2$s' %3$s %4$s",
Utils.SQLSanitizer.sanitize(name), Utils.SQLSanitizer.sanitize(tenantId), timePart,
dimsPart);
logger.debug("Query string: {}", query);
List<Serie> result =
this.influxDB.Query(this.config.influxDB.getName(), query, TimeUnit.MILLISECONDS);
Measurements measurements = new Measurements();
measurements.setName(name);
measurements.setDimensions(dimensions == null ? new HashMap<String, String>() : dimensions);
List<Object[]> valObjArryList = new LinkedList<>();
for (Serie serie : result) {
Object[][] valObjArry = serie.getPoints();
for (int i = 0; i < valObjArry.length; i++) {
Object[] objArry = new Object[3];
// sequence_number
objArry[0] = ((Double) valObjArry[i][1]).longValue();
// time
Double timeDouble = (Double) valObjArry[i][0];
objArry[1] = DATETIME_FORMATTER.print(timeDouble.longValue());
// value
objArry[2] = (Double) valObjArry[i][2];
valObjArryList.add(objArry);
}
}
measurements.setMeasurements(valObjArryList);
return Arrays.asList(measurements);
}
}

View File

@ -0,0 +1,73 @@
/*
* Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.hpcloud.mon.infrastructure.persistence.influxdb;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import org.influxdb.InfluxDB;
import org.influxdb.dto.Serie;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.inject.Inject;
import com.hpcloud.mon.MonApiConfiguration;
import com.hpcloud.mon.common.model.metric.MetricDefinition;
import com.hpcloud.mon.domain.model.metric.MetricDefinitionRepository;
public class MetricDefinitionInfluxDbRepositoryImpl implements MetricDefinitionRepository {
private static final Logger logger = LoggerFactory
.getLogger(AlarmStateHistoryInfluxDbRepositoryImpl.class);
private final MonApiConfiguration config;
private final InfluxDB influxDB;
@Inject
public MetricDefinitionInfluxDbRepositoryImpl(MonApiConfiguration config, InfluxDB influxDB) {
this.config = config;
this.influxDB = influxDB;
}
@Override
public List<MetricDefinition> find(String tenantId, String name, Map<String, String> dimensions)
throws Exception {
String dimsPart = Utils.WhereClauseBuilder.buildDimsPart(dimensions);
// name is not used in the query.
String query =
String.format("select first(value) from /.*/ where tenant_id = '%1$s' %2$s",
Utils.SQLSanitizer.sanitize(tenantId), dimsPart);
logger.debug("Query string: {}", query);
List<Serie> result =
this.influxDB.Query(this.config.influxDB.getName(), query, TimeUnit.SECONDS);
List<MetricDefinition> metricDefinitionList = new ArrayList<>();
for (Serie serie : result) {
MetricDefinition metricDefinition = new MetricDefinition();
metricDefinition.name = serie.getName();
metricDefinition.setDimensions(dimensions == null ? new HashMap<String, String>()
: dimensions);
metricDefinitionList.add(metricDefinition);
}
return metricDefinitionList;
}
}

View File

@ -0,0 +1,125 @@
/*
* Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.hpcloud.mon.infrastructure.persistence.influxdb;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import javax.annotation.Nullable;
import org.influxdb.InfluxDB;
import org.influxdb.dto.Serie;
import org.joda.time.DateTime;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.ISODateTimeFormat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.inject.Inject;
import com.hpcloud.mon.MonApiConfiguration;
import com.hpcloud.mon.domain.model.statistic.StatisticRepository;
import com.hpcloud.mon.domain.model.statistic.Statistics;
public class StatisticInfluxDbRepositoryImpl implements StatisticRepository {
private static final Logger logger = LoggerFactory
.getLogger(StatisticInfluxDbRepositoryImpl.class);
private final MonApiConfiguration config;
private final InfluxDB influxDB;
public static final DateTimeFormatter DATETIME_FORMATTER = ISODateTimeFormat.dateTimeNoMillis()
.withZoneUTC();
@Inject
public StatisticInfluxDbRepositoryImpl(MonApiConfiguration config, InfluxDB influxDB) {
this.config = config;
this.influxDB = influxDB;
}
@Override
public List<Statistics> find(String tenantId, String name, Map<String, String> dimensions,
DateTime startTime, @Nullable DateTime endTime, List<String> statistics, int period)
throws Exception {
String statsPart = buildStatsPart(statistics);
String timePart = Utils.WhereClauseBuilder.buildTimePart(startTime, endTime);
String dimsPart = Utils.WhereClauseBuilder.buildDimsPart(dimensions);
String periodPart = buildPeriodPart(period);
String query =
String.format("select time %1$s from %2$s where tenant_id = '%3$s' %4$s %5$s " + "%6$s",
statsPart, Utils.SQLSanitizer.sanitize(name), Utils.SQLSanitizer.sanitize(tenantId),
timePart, dimsPart, periodPart);
logger.debug("Query string: {}", query);
List<Serie> result =
this.influxDB.Query(this.config.influxDB.getName(), query, TimeUnit.MILLISECONDS);
List<Statistics> statisticsList = new LinkedList<Statistics>();
// Should only be one serie -- name.
for (Serie serie : result) {
Statistics stat = new Statistics();
stat.setName(serie.getName());
List<String> colNamesList = new LinkedList<>(statistics);
colNamesList.add(0, "timestamp");
stat.setColumns(colNamesList);
stat.setDimensions(dimensions == null ? new HashMap<String, String>() : dimensions);
List<List<Object>> valObjArryArry = new LinkedList<List<Object>>();
stat.setStatistics(valObjArryArry);
Object[][] pointsArryArry = serie.getPoints();
for (int i = 0; i < pointsArryArry.length; i++) {
List<Object> valObjArry = new ArrayList<>();
// First column is always time.
Double timeDouble = (Double) pointsArryArry[i][0];
valObjArry.add(DATETIME_FORMATTER.print(timeDouble.longValue()));
for (int j = 1; j < statistics.size() + 1; j++) {
valObjArry.add(pointsArryArry[i][j]);
}
valObjArryArry.add(valObjArry);
}
statisticsList.add(stat);
}
return statisticsList;
}
private String buildPeriodPart(int period) {
String s = "";
if (period >= 1) {
s += String.format("group by time(%1$ds)", period);
}
return s;
}
private String buildStatsPart(List<String> statistics) {
String s = "";
for (String statistic : statistics) {
s += ",";
if (statistic.trim().toLowerCase().equals("avg")) {
s += " mean(value)";
} else {
s += " " + statistic + "(value)";
}
}
return s;
}
}

View File

@ -0,0 +1,323 @@
/*
* Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.hpcloud.mon.infrastructure.persistence.mysql;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.inject.Inject;
import javax.inject.Named;
import org.skife.jdbi.v2.DBI;
import org.skife.jdbi.v2.Handle;
import org.skife.jdbi.v2.Query;
import org.skife.jdbi.v2.util.StringMapper;
import com.hpcloud.mon.common.model.alarm.AggregateFunction;
import com.hpcloud.mon.common.model.alarm.AlarmOperator;
import com.hpcloud.mon.common.model.alarm.AlarmState;
import com.hpcloud.mon.common.model.alarm.AlarmSubExpression;
import com.hpcloud.mon.common.model.metric.MetricDefinition;
import com.hpcloud.mon.domain.exception.EntityNotFoundException;
import com.hpcloud.mon.domain.model.alarm.Alarm;
import com.hpcloud.mon.domain.model.alarm.AlarmRepository;
import com.hpcloud.mon.infrastructure.persistence.DimensionQueries;
import com.hpcloud.mon.infrastructure.persistence.SubAlarmQueries;
import com.hpcloud.persistence.BeanMapper;
/**
* Alarm repository implementation.
*/
public class AlarmMySqlRepositoryImpl implements AlarmRepository {
private static final String SUB_ALARM_SQL =
"select sa.*, sad.dimensions from sub_alarm as sa "
+ "left join (select sub_alarm_id, group_concat(dimension_name, '=', value) as dimensions from sub_alarm_dimension group by sub_alarm_id ) as sad "
+ "on sad.sub_alarm_id = sa.id where sa.alarm_id = :alarmId";
private final DBI db;
@Inject
public AlarmMySqlRepositoryImpl(@Named("mysql") DBI db) {
this.db = db;
}
@Override
public Alarm create(String tenantId, String id, String name, String description, String severity,
String expression, Map<String, AlarmSubExpression> subExpressions, List<String> alarmActions,
List<String> okActions, List<String> undeterminedActions) {
Handle h = db.open();
try {
h.begin();
h.insert(
"insert into alarm (id, tenant_id, name, description, severity, expression, state, actions_enabled, created_at, updated_at, deleted_at) values (?, ?, ?, ?, ?, ?, ?, ?, NOW(), NOW(), NULL)",
id, tenantId, name, description, severity, expression,
AlarmState.UNDETERMINED.toString(), true);
// Persist sub-alarms
createSubExpressions(h, id, subExpressions);
// Persist actions
persistActions(h, id, AlarmState.ALARM, alarmActions);
persistActions(h, id, AlarmState.OK, okActions);
persistActions(h, id, AlarmState.UNDETERMINED, undeterminedActions);
h.commit();
return new Alarm(id, name, description, severity, expression, AlarmState.UNDETERMINED, true,
alarmActions, okActions == null ? Collections.<String>emptyList() : okActions,
undeterminedActions == null ? Collections.<String>emptyList() : undeterminedActions);
} catch (RuntimeException e) {
h.rollback();
throw e;
} finally {
h.close();
}
}
@Override
public void deleteById(String tenantId, String alarmId) {
try (Handle h = db.open()) {
if (h
.update(
"update alarm set deleted_at = NOW() where tenant_id = ? and id = ? and deleted_at is NULL",
tenantId, alarmId) == 0)
throw new EntityNotFoundException("No alarm exists for %s", alarmId);
}
}
@Override
public boolean exists(String tenantId, String name) {
try (Handle h = db.open()) {
return h
.createQuery(
"select exists(select 1 from alarm where tenant_id = :tenantId and name = :name and deleted_at is NULL)")
.bind("tenantId", tenantId).bind("name", name).mapTo(Boolean.TYPE).first();
}
}
@Override
@SuppressWarnings("unchecked")
public List<Alarm> find(String tenantId, String name, Map<String, String> dimensions, String state) {
try (Handle h = db.open()) {
String query =
"select distinct alarm.id, alarm.description, alarm.tenant_id, alarm.severity, alarm.expression, alarm.state, alarm.name, alarm.actions_enabled, alarm.created_at, alarm.updated_at, alarm.deleted_at "
+ "from alarm join sub_alarm sub on alarm.id = sub.alarm_id "
+ "left outer join sub_alarm_dimension dim on sub.id = dim.sub_alarm_id%s "
+ "where tenant_id = :tenantId and deleted_at is NULL %s";
StringBuilder sbWhere = new StringBuilder();
if (name != null) {
sbWhere.append(" and alarm.name = :name");
}
if (state != null) {
sbWhere.append(" and alarm.state = :state");
}
String sql = String.format(query, SubAlarmQueries.buildJoinClauseFor(dimensions), sbWhere);
Query<?> q = h.createQuery(sql).bind("tenantId", tenantId);
if (name != null) {
q.bind("name", name);
}
if (state != null) {
q.bind("state", state);
}
q = q.map(new BeanMapper<Alarm>(Alarm.class));
DimensionQueries.bindDimensionsToQuery(q, dimensions);
List<Alarm> alarms = (List<Alarm>) q.list();
for (Alarm alarm : alarms)
hydrateRelationships(h, alarm);
return alarms;
}
}
@Override
public Alarm findById(String tenantId, String alarmId) {
try (Handle h = db.open()) {
Alarm alarm =
h.createQuery(
"select * from alarm where tenant_id = :tenantId and id = :id and deleted_at is NULL")
.bind("tenantId", tenantId).bind("id", alarmId)
.map(new BeanMapper<Alarm>(Alarm.class)).first();
if (alarm == null)
throw new EntityNotFoundException("No alarm exists for %s", alarmId);
hydrateRelationships(h, alarm);
return alarm;
}
}
@Override
public Map<String, MetricDefinition> findSubAlarmMetricDefinitions(String alarmId) {
try (Handle h = db.open()) {
List<Map<String, Object>> rows = h.createQuery(SUB_ALARM_SQL).bind("alarmId", alarmId).list();
Map<String, MetricDefinition> subAlarmMetricDefs = new HashMap<>();
for (Map<String, Object> row : rows) {
String id = (String) row.get("id");
String metricName = (String) row.get("metric_name");
Map<String, String> dimensions = dimensionsFor((String) row.get("dimensions"));
subAlarmMetricDefs.put(id, new MetricDefinition(metricName, dimensions));
}
return subAlarmMetricDefs;
}
}
@Override
public Map<String, AlarmSubExpression> findSubExpressions(String alarmId) {
try (Handle h = db.open()) {
List<Map<String, Object>> rows = h.createQuery(SUB_ALARM_SQL).bind("alarmId", alarmId).list();
Map<String, AlarmSubExpression> subExpressions = new HashMap<>();
for (Map<String, Object> row : rows) {
String id = (String) row.get("id");
AggregateFunction function = AggregateFunction.fromJson((String) row.get("function"));
String metricName = (String) row.get("metric_name");
AlarmOperator operator = AlarmOperator.fromJson((String) row.get("operator"));
Double threshold = (Double) row.get("threshold");
Integer period = (Integer) row.get("period");
Integer periods = (Integer) row.get("periods");
Map<String, String> dimensions = dimensionsFor((String) row.get("dimensions"));
subExpressions.put(id, new AlarmSubExpression(function, new MetricDefinition(metricName,
dimensions), operator, threshold, period, periods));
}
return subExpressions;
}
}
@Override
public void update(String tenantId, String id, boolean patch, String name, String description,
String expression, String severity, AlarmState state, boolean actionsEnabled,
Collection<String> oldSubAlarmIds, Map<String, AlarmSubExpression> changedSubAlarms,
Map<String, AlarmSubExpression> newSubAlarms, List<String> alarmActions,
List<String> okActions, List<String> undeterminedActions) {
Handle h = db.open();
try {
h.begin();
h.insert(
"update alarm set name = ?, description = ?, expression = ?, severity = ?, state = ?, actions_enabled = ?, updated_at = NOW() where tenant_id = ? and id = ?",
name, description, expression, severity, state.name(), actionsEnabled, tenantId, id);
// Delete old sub-alarms
if (oldSubAlarmIds != null)
for (String oldSubAlarmId : oldSubAlarmIds)
h.execute("delete from sub_alarm where id = ?", oldSubAlarmId);
// Update changed sub-alarms
if (changedSubAlarms != null)
for (Map.Entry<String, AlarmSubExpression> entry : changedSubAlarms.entrySet()) {
AlarmSubExpression sa = entry.getValue();
h.execute(
"update sub_alarm set operator = ?, threshold = ?, updated_at = NOW() where id = ?",
sa.getOperator().name(), sa.getThreshold(), entry.getKey());
}
// Insert new sub-alarms
createSubExpressions(h, id, newSubAlarms);
// Delete old actions
if (patch) {
deleteActions(h, id, AlarmState.ALARM, alarmActions);
deleteActions(h, id, AlarmState.OK, okActions);
deleteActions(h, id, AlarmState.UNDETERMINED, undeterminedActions);
} else
h.execute("delete from alarm_action where alarm_id = ?", id);
// Insert new actions
persistActions(h, id, AlarmState.ALARM, alarmActions);
persistActions(h, id, AlarmState.OK, okActions);
persistActions(h, id, AlarmState.UNDETERMINED, undeterminedActions);
h.commit();
} catch (RuntimeException e) {
h.rollback();
throw e;
} finally {
h.close();
}
}
private void deleteActions(Handle handle, String id, AlarmState alarmState, List<String> actions) {
if (actions != null)
handle.execute("delete from alarm_action where alarm_id = ? and alarm_state = ?", id,
alarmState.name());
}
private Map<String, String> dimensionsFor(String dimensionSet) {
Map<String, String> dimensions = null;
if (dimensionSet != null) {
dimensions = new HashMap<String, String>();
for (String kvStr : dimensionSet.split(",")) {
String[] kv = kvStr.split("=");
if (kv.length > 1)
dimensions.put(kv[0], kv[1]);
}
}
return dimensions;
}
private List<String> findActionsById(Handle handle, String alarmId, AlarmState state) {
return handle
.createQuery(
"select action_id from alarm_action where alarm_id = :alarmId and alarm_state = :alarmState")
.bind("alarmId", alarmId).bind("alarmState", state.name()).map(StringMapper.FIRST).list();
}
private void persistActions(Handle handle, String id, AlarmState alarmState, List<String> actions) {
if (actions != null)
for (String action : actions)
handle.insert("insert into alarm_action values (?, ?, ?)", id, alarmState.name(), action);
}
private void hydrateRelationships(Handle handle, Alarm alarm) {
alarm.setAlarmActions(findActionsById(handle, alarm.getId(), AlarmState.ALARM));
alarm.setOkActions(findActionsById(handle, alarm.getId(), AlarmState.OK));
alarm.setUndeterminedActions(findActionsById(handle, alarm.getId(), AlarmState.UNDETERMINED));
}
private void createSubExpressions(Handle handle, String id,
Map<String, AlarmSubExpression> alarmSubExpressions) {
if (alarmSubExpressions != null)
for (Map.Entry<String, AlarmSubExpression> subEntry : alarmSubExpressions.entrySet()) {
String subAlarmId = subEntry.getKey();
AlarmSubExpression subExpr = subEntry.getValue();
MetricDefinition metricDef = subExpr.getMetricDefinition();
// Persist sub-alarm
handle
.insert(
"insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, state, created_at, updated_at) "
+ "values (?, ?, ?, ?, ?, ?, ?, ?, ?, NOW(), NOW())", subAlarmId, id, subExpr
.getFunction().name(), metricDef.name, subExpr.getOperator().name(), subExpr
.getThreshold(), subExpr.getPeriod(), subExpr.getPeriods(),
AlarmState.UNDETERMINED.toString());
// Persist sub-alarm dimensions
if (metricDef.dimensions != null && !metricDef.dimensions.isEmpty())
for (Map.Entry<String, String> dimEntry : metricDef.dimensions.entrySet())
handle.insert("insert into sub_alarm_dimension values (?, ?, ?)", subAlarmId,
dimEntry.getKey(), dimEntry.getValue());
}
}
}

View File

@ -0,0 +1,134 @@
/*
* Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.hpcloud.mon.infrastructure.persistence.mysql;
import java.util.List;
import java.util.UUID;
import javax.inject.Inject;
import javax.inject.Named;
import org.skife.jdbi.v2.DBI;
import org.skife.jdbi.v2.Handle;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.hpcloud.mon.domain.exception.EntityExistsException;
import com.hpcloud.mon.domain.exception.EntityNotFoundException;
import com.hpcloud.mon.domain.model.notificationmethod.NotificationMethod;
import com.hpcloud.mon.domain.model.notificationmethod.NotificationMethodRepository;
import com.hpcloud.mon.domain.model.notificationmethod.NotificationMethodType;
import com.hpcloud.persistence.BeanMapper;
/**
* Notification method repository implementation.
*/
public class NotificationMethodMySqlRepositoryImpl implements NotificationMethodRepository {
private static final Logger LOG = LoggerFactory
.getLogger(NotificationMethodMySqlRepositoryImpl.class);
private final DBI db;
@Inject
public NotificationMethodMySqlRepositoryImpl(@Named("mysql") DBI db) {
this.db = db;
}
@Override
public NotificationMethod create(String tenantId, String name, NotificationMethodType type,
String address) {
if (exists(tenantId, name, type, address))
throw new EntityExistsException("Notification method %s \"%s\" %s \"%s\" already exists.",
tenantId, name, type, address);
try (Handle h = db.open()) {
String id = UUID.randomUUID().toString();
h.insert(
"insert into notification_method (id, tenant_id, name, type, address, created_at, updated_at) values (?, ?, ?, ?, ?, NOW(), NOW())",
id, tenantId, name, type.toString(), address);
LOG.debug("Creating notification method {} for {}", name, tenantId);
return new NotificationMethod(id, name, type, address);
}
}
@Override
public void deleteById(String tenantId, String notificationMethodId) {
try (Handle h = db.open()) {
if (h.update("delete from notification_method where tenant_id = ? and id = ?", tenantId,
notificationMethodId) == 0)
throw new EntityNotFoundException("No notification method exists for %s",
notificationMethodId);
}
}
@Override
public boolean exists(String tenantId, String notificationMethodId) {
try (Handle h = db.open()) {
return h
.createQuery(
"select exists(select 1 from notification_method where tenant_id = :tenantId and id = :notificationMethodId)")
.bind("tenantId", tenantId).bind("notificationMethodId", notificationMethodId)
.mapTo(Boolean.TYPE).first();
}
}
public boolean exists(String tenantId, String name, NotificationMethodType type, String address) {
try (Handle h = db.open()) {
return h
.createQuery(
"select exists(select 1 from notification_method where tenant_id = :tenantId and name = :name and type = :type and address = :address)")
.bind("tenantId", tenantId).bind("name", name).bind("type", type.toString())
.bind("address", address).mapTo(Boolean.TYPE).first();
}
}
@Override
public List<NotificationMethod> find(String tenantId) {
try (Handle h = db.open()) {
return h.createQuery("select * from notification_method where tenant_id = :tenantId")
.bind("tenantId", tenantId)
.map(new BeanMapper<NotificationMethod>(NotificationMethod.class)).list();
}
}
@Override
public NotificationMethod findById(String tenantId, String notificationMethodId) {
try (Handle h = db.open()) {
NotificationMethod notificationMethod =
h.createQuery(
"select * from notification_method where tenant_id = :tenantId and id = :id")
.bind("tenantId", tenantId).bind("id", notificationMethodId)
.map(new BeanMapper<NotificationMethod>(NotificationMethod.class)).first();
if (notificationMethod == null)
throw new EntityNotFoundException("No notification method exists for %s",
notificationMethodId);
return notificationMethod;
}
}
@Override
public NotificationMethod update(String tenantId, String notificationMethodId, String name,
NotificationMethodType type, String address) {
try (Handle h = db.open()) {
if (h
.update(
"update notification_method set name = ?, type = ?, address = ? where tenant_id = ? and id = ?",
name, type.name(), address, tenantId, notificationMethodId) == 0)
throw new EntityNotFoundException("No notification method exists for %s",
notificationMethodId);
return new NotificationMethod(notificationMethodId, name, type, address);
}
}
}

View File

@ -0,0 +1,81 @@
package com.hpcloud.mon.infrastructure.persistence.influxdb;
import com.hpcloud.mon.MonApiConfiguration;
import com.hpcloud.mon.infrastructure.persistence.influxdb.AlarmStateHistoryInfluxDbRepositoryImpl;
import org.influxdb.InfluxDB;
import org.joda.time.DateTime;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.skife.jdbi.v2.DBI;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import scala.actors.threadpool.Arrays;
@Test
public class AlarmStateHistoryInfluxDbRepositoryImplTest {
@Mock(name = "mysql")
private DBI mysql;
@Mock
private MonApiConfiguration monApiConfiguration;
@Mock
private InfluxDB influxDB;
@InjectMocks
private AlarmStateHistoryInfluxDbRepositoryImpl alarmStateHistoryInfluxDBRepository;
@BeforeMethod(alwaysRun = true)
public void initMocks() {
MockitoAnnotations.initMocks(this);
}
public void buildQueryForFindByIdTest() throws Exception {
String er =
"select alarm_id, old_state, new_state, reason, "
+ "reason_data from alarm_state_history where tenant_id = 'tenant-id' and alarm_id = "
+ "'alarm-id'";
String r =
this.alarmStateHistoryInfluxDBRepository.buildQueryForFindById("tenant-id", "alarm-id");
assert (er.equals(r));
}
public void buildTimePartTest() {
String er = " and time > 1388559600s and time < 1388559601s";
String r =
this.alarmStateHistoryInfluxDBRepository.buildTimePart(new DateTime(2014, 01, 01, 0, 0, 0),
new DateTime(2014, 01, 01, 0, 0, 1));
assert (er.equals(r));
}
@SuppressWarnings("unchecked")
public void buildAlarmsPartTest() {
String er = " and ( alarm_id = 'id-1' or alarm_id = 'id-2' )";
String r =
this.alarmStateHistoryInfluxDBRepository.buildAlarmsPart(Arrays.asList(new String[] {
"id-1", "id-2"}));
assert (er.equals(r));
}
public void buildQueryForFindTest() throws Exception {
String er =
"select alarm_id, old_state, new_state, reason, "
+ "reason_data from alarm_state_history where tenant_id = 'tenant-id' and time > "
+ "1388559600s and time < 1388559601s and ( alarm_id = 'id-1' or alarm_id = 'id-2' )";
String r =
this.alarmStateHistoryInfluxDBRepository.buildQueryForFind("tenant-id",
" and time > 1388559600s and time < 1388559601s", " and ( alarm_id = 'id-1' or "
+ "alarm_id" + " = 'id-2' )");
assert (er.equals(r));
}
}

View File

@ -0,0 +1,254 @@
package com.hpcloud.mon.infrastructure.persistence.mysql;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.fail;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import org.skife.jdbi.v2.DBI;
import org.skife.jdbi.v2.Handle;
import org.skife.jdbi.v2.util.StringMapper;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import com.google.common.collect.ImmutableMap;
import com.google.common.io.Resources;
import com.hpcloud.mon.common.model.alarm.AggregateFunction;
import com.hpcloud.mon.common.model.alarm.AlarmOperator;
import com.hpcloud.mon.common.model.alarm.AlarmState;
import com.hpcloud.mon.common.model.alarm.AlarmSubExpression;
import com.hpcloud.mon.common.model.metric.MetricDefinition;
import com.hpcloud.mon.domain.exception.EntityNotFoundException;
import com.hpcloud.mon.domain.model.alarm.Alarm;
import com.hpcloud.mon.domain.model.alarm.AlarmRepository;
@Test
public class AlarmMySqlRepositoryImplTest {
private DBI db;
private Handle handle;
private AlarmRepository repo;
private List<String> alarmActions;
@BeforeClass
protected void setupClass() throws Exception {
db = new DBI("jdbc:h2:mem:test;MODE=MySQL");
handle = db.open();
handle
.execute(Resources.toString(getClass().getResource("alarm.sql"), Charset.defaultCharset()));
repo = new AlarmMySqlRepositoryImpl(db);
alarmActions = new ArrayList<String>();
alarmActions.add("29387234");
alarmActions.add("77778687");
}
@AfterClass
protected void afterClass() {
handle.close();
}
@BeforeMethod
protected void beforeMethod() {
handle.execute("SET foreign_key_checks = 0;");
handle.execute("truncate table sub_alarm");
handle.execute("truncate table alarm_action");
handle.execute("truncate table sub_alarm_dimension");
handle.execute("truncate table alarm");
handle
.execute("insert into alarm (id, tenant_id, name, severity, expression, state, actions_enabled, created_at, updated_at, deleted_at) "
+ "values ('123', 'bob', '90% CPU', 'LOW', 'avg(hpcs.compute{flavor_id=777, image_id=888, metric_name=cpu, device=1}) > 10', 'UNDETERMINED', 1, NOW(), NOW(), NULL)");
handle
.execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, state, created_at, updated_at) "
+ "values ('111', '123', 'avg', 'hpcs.compute', 'GT', 10, 60, 1, 'UNDETERMINED', NOW(), NOW())");
handle.execute("insert into sub_alarm_dimension values ('111', 'flavor_id', '777')");
handle.execute("insert into sub_alarm_dimension values ('111', 'image_id', '888')");
handle.execute("insert into sub_alarm_dimension values ('111', 'metric_name', 'cpu')");
handle.execute("insert into sub_alarm_dimension values ('111', 'device', '1')");
handle.execute("insert into alarm_action values ('123', 'ALARM', '29387234')");
handle.execute("insert into alarm_action values ('123', 'ALARM', '77778687')");
handle
.execute("insert into alarm (id, tenant_id, name, severity, expression, state, actions_enabled, created_at, updated_at, deleted_at) "
+ "values ('234', 'bob', '50% CPU', 'LOW', 'avg(hpcs.compute{flavor_id=777, image_id=888, metric_name=mem}) > 20 and avg(hpcs.compute) < 100', 'UNDETERMINED', 1, NOW(), NOW(), NULL)");
handle
.execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, state, created_at, updated_at) "
+ "values ('222', '234', 'avg', 'hpcs.compute', 'GT', 20, 60, 1, 'UNDETERMINED', NOW(), NOW())");
handle
.execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, state, created_at, updated_at) "
+ "values ('223', '234', 'avg', 'hpcs.compute', 'LT', 100, 60, 1, 'UNDETERMINED', NOW(), NOW())");
handle.execute("insert into sub_alarm_dimension values ('222', 'flavor_id', '777')");
handle.execute("insert into sub_alarm_dimension values ('222', 'image_id', '888')");
handle.execute("insert into sub_alarm_dimension values ('222', 'metric_name', 'mem')");
handle.execute("insert into alarm_action values ('234', 'ALARM', '29387234')");
handle.execute("insert into alarm_action values ('234', 'ALARM', '77778687')");
}
public void shouldCreate() {
Map<String, AlarmSubExpression> subExpressions =
ImmutableMap
.<String, AlarmSubExpression>builder()
.put(
"4433",
AlarmSubExpression
.of("avg(hpcs.compute{flavor_id=777, image_id=888, metric_name=cpu}) > 10"))
.build();
Alarm alarmA =
repo.create("555", "2345", "90% CPU", null, "LOW",
"avg(hpcs.compute{flavor_id=777, image_id=888, metric_name=cpu}) > 10", subExpressions,
alarmActions, null, null);
Alarm alarmB = repo.findById("555", alarmA.getId());
assertEquals(alarmA, alarmB);
// Assert that sub-alarm and sub-alarm-dimensions made it to the db
assertEquals(
handle.createQuery("select count(*) from sub_alarm where id = 4433")
.map(StringMapper.FIRST).first(), "1");
assertEquals(
handle.createQuery("select count(*) from sub_alarm_dimension where sub_alarm_id = 4433")
.map(StringMapper.FIRST).first(), "3");
}
@Test(groups = "database")
public void shouldUpdate() {
db = new DBI("jdbc:mysql://192.168.10.4/mon", "monapi", "password");
handle = db.open();
repo = new AlarmMySqlRepositoryImpl(db);
beforeMethod();
List<String> oldSubAlarmIds = Arrays.asList("222");
AlarmSubExpression changedSubExpression = AlarmSubExpression.of("avg(hpcs.compute) <= 200");
Map<String, AlarmSubExpression> changedSubExpressions =
ImmutableMap.<String, AlarmSubExpression>builder().put("223", changedSubExpression).build();
AlarmSubExpression newSubExpression = AlarmSubExpression.of("avg(foo{flavor_id=777}) > 333");
Map<String, AlarmSubExpression> newSubExpressions =
ImmutableMap.<String, AlarmSubExpression>builder().put("555", newSubExpression).build();
repo.update("bob", "234", false, "90% CPU", null,
"avg(foo{flavor_id=777}) > 333 and avg(hpcs.compute) <= 200", "LOW", AlarmState.ALARM,
false, oldSubAlarmIds, changedSubExpressions, newSubExpressions, alarmActions, null, null);
Alarm alarm = repo.findById("bob", "234");
Alarm expected =
new Alarm("234", "90% CPU", null, "LOW",
"avg(foo{flavor_id=777}) > 333 and avg(hpcs.compute) <= 200", AlarmState.ALARM, false,
alarmActions, Collections.<String>emptyList(), Collections.<String>emptyList());
assertEquals(expected, alarm);
Map<String, AlarmSubExpression> subExpressions = repo.findSubExpressions("234");
assertEquals(subExpressions.get("223"), changedSubExpression);
assertEquals(subExpressions.get("555"), newSubExpression);
}
public void shouldFindById() {
Alarm alarm = repo.findById("bob", "123");
assertEquals(alarm.getId(), "123");
assertEquals(alarm.getName(), "90% CPU");
assertEquals(alarm.getSeverity(), "LOW");
assertEquals(alarm.getExpression(),
"avg(hpcs.compute{flavor_id=777, image_id=888, metric_name=cpu, device=1}) > 10");
assertEquals(alarm.getState(), AlarmState.UNDETERMINED);
assertEquals(alarm.isActionsEnabled(), true);
assertEquals(alarm.getAlarmActions(), alarmActions);
}
@Test(groups = "database")
public void shouldFindSubAlarmMetricDefinitions() {
db = new DBI("jdbc:mysql://192.168.10.4/mon", "monapi", "password");
handle = db.open();
repo = new AlarmMySqlRepositoryImpl(db);
beforeMethod();
assertEquals(
repo.findSubAlarmMetricDefinitions("123").get("111"),
new MetricDefinition("hpcs.compute", ImmutableMap.<String, String>builder()
.put("flavor_id", "777").put("image_id", "888").put("metric_name", "cpu")
.put("device", "1").build()));
assertEquals(
repo.findSubAlarmMetricDefinitions("234").get("222"),
new MetricDefinition("hpcs.compute", ImmutableMap.<String, String>builder()
.put("flavor_id", "777").put("image_id", "888").put("metric_name", "mem").build()));
assertTrue(repo.findSubAlarmMetricDefinitions("asdfasdf").isEmpty());
}
@Test(groups = "database")
public void shouldFindSubExpressions() {
db = new DBI("jdbc:mysql://192.168.10.4/mon", "monapi", "password");
handle = db.open();
repo = new AlarmMySqlRepositoryImpl(db);
beforeMethod();
assertEquals(
repo.findSubExpressions("123").get("111"),
new AlarmSubExpression(AggregateFunction.AVG, new MetricDefinition("hpcs.compute",
ImmutableMap.<String, String>builder().put("flavor_id", "777").put("image_id", "888")
.put("metric_name", "cpu").put("device", "1").build()), AlarmOperator.GT, 10, 60, 1));
assertEquals(repo.findSubExpressions("234").get("223"), new AlarmSubExpression(
AggregateFunction.AVG, new MetricDefinition("hpcs.compute", null), AlarmOperator.LT, 100,
60, 1));
assertTrue(repo.findSubAlarmMetricDefinitions("asdfasdf").isEmpty());
}
public void testExists() {
assertTrue(repo.exists("bob", "90% CPU"));
// Negative
assertFalse(repo.exists("bob", "999% CPU"));
}
public void shouldFind() {
List<Alarm> alarms = repo.find("bob", null, null, null);
assertEquals(
alarms,
Arrays.asList(
new Alarm("123", "90% CPU", null, "LOW",
"avg(hpcs.compute{flavor_id=777, image_id=888, metric_name=cpu, device=1}) > 10",
AlarmState.UNDETERMINED, true, Arrays.asList("29387234", "77778687"), Collections
.<String>emptyList(), Collections.<String>emptyList()),
new Alarm(
"234",
"50% CPU",
null,
"LOW",
"avg(hpcs.compute{flavor_id=777, image_id=888, metric_name=mem}) > 20 and avg(hpcs.compute) < 100",
AlarmState.UNDETERMINED, true, Arrays.asList("29387234", "77778687"), Collections
.<String>emptyList(), Collections.<String>emptyList())));
}
public void shouldFindByName() {
List<Alarm> alarms = repo.find("bob", "90% CPU", null, null);
assertEquals(alarms, Arrays.asList(new Alarm("123", "90% CPU", null, "LOW",
"avg(hpcs.compute{flavor_id=777, image_id=888, metric_name=cpu, device=1}) > 10",
AlarmState.UNDETERMINED, true, Arrays.asList("29387234", "77778687"), Collections
.<String>emptyList(), Collections.<String>emptyList())));
}
public void shouldDeleteById() {
repo.deleteById("bob", "123");
try {
assertNull(repo.findById("bob", "123"));
fail();
} catch (EntityNotFoundException expected) {
}
}
}

View File

@ -0,0 +1,96 @@
package com.hpcloud.mon.infrastructure.persistence.mysql;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.fail;
import java.nio.charset.Charset;
import java.util.Arrays;
import java.util.List;
import org.skife.jdbi.v2.DBI;
import org.skife.jdbi.v2.Handle;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import com.google.common.io.Resources;
import com.hpcloud.mon.domain.exception.EntityNotFoundException;
import com.hpcloud.mon.domain.model.notificationmethod.NotificationMethod;
import com.hpcloud.mon.domain.model.notificationmethod.NotificationMethodType;
import com.hpcloud.mon.infrastructure.persistence.mysql.NotificationMethodMySqlRepositoryImpl;
@Test
public class NotificationMethodMySqlRepositoryImplTest {
private DBI db;
private Handle handle;
private NotificationMethodMySqlRepositoryImpl repo;
@BeforeClass
protected void beforeClass() throws Exception {
db = new DBI("jdbc:h2:mem:test;MODE=MySQL");
handle = db.open();
handle.execute(Resources.toString(getClass().getResource("notification_method.sql"),
Charset.defaultCharset()));
repo = new NotificationMethodMySqlRepositoryImpl(db);
}
@AfterClass
protected void afterClass() {
handle.close();
}
@BeforeMethod
protected void beforeMethod() {
handle.execute("truncate table notification_method");
handle
.execute("insert into notification_method (id, tenant_id, name, type, address, created_at, updated_at) values ('123', '444', 'MySMS', 'SMS', '8675309', NOW(), NOW())");
}
public void shouldCreate() {
NotificationMethod nmA = repo.create("555", "MySMS", NotificationMethodType.SMS, "5555555");
NotificationMethod nmB = repo.findById("555", nmA.getId());
assertEquals(nmA, nmB);
}
public void shouldExistForTenantAndNotificationMethod() {
assertTrue(repo.exists("444", "123"));
assertFalse(repo.exists("444", "1234"));
assertFalse(repo.exists("333", "123"));
}
public void shouldFindById() {
NotificationMethod nm = repo.findById("444", "123");
assertEquals(nm.getId(), "123");
assertEquals(nm.getType(), NotificationMethodType.SMS);
assertEquals(nm.getAddress(), "8675309");
}
public void shouldFind() {
List<NotificationMethod> nms = repo.find("444");
assertEquals(nms, Arrays.asList(new NotificationMethod("123", "MySMS",
NotificationMethodType.SMS, "8675309")));
}
public void shouldUpdate() {
repo.update("444", "123", "Foo", NotificationMethodType.EMAIL, "abc");
NotificationMethod nm = repo.findById("444", "123");
assertEquals(nm, new NotificationMethod("123", "Foo", NotificationMethodType.EMAIL, "abc"));
}
public void shouldDeleteById() {
repo.deleteById("444", "123");
try {
repo.findById("444", "123");
fail();
} catch (EntityNotFoundException expected) {
}
}
}