Changes to match the new style guidelines

This commit is contained in:
Craig Bryant 2014-07-07 16:19:53 -06:00
parent 797c60f567
commit 44851750e6
50 changed files with 3148 additions and 2716 deletions

View File

@ -14,9 +14,10 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon;
public class EventSpoutConfig extends KafkaSpoutConfig {
private static final long serialVersionUID = -8129774848323598123L;
private static final long serialVersionUID = -8129774848323598123L;
}

View File

@ -1,16 +1,34 @@
/*
* Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon;
import com.hpcloud.configuration.KafkaConsumerConfiguration;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.io.Serializable;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.hpcloud.configuration.KafkaConsumerConfiguration;
public class KafkaSpoutConfig implements Serializable {
private static final long serialVersionUID = -6477042435089264571L;
private static final long serialVersionUID = -6477042435089264571L;
@JsonProperty
public Integer maxWaitTime = 100;
@JsonProperty
public Integer maxWaitTime = 100;
public KafkaConsumerConfiguration kafkaConsumerConfiguration;
public KafkaConsumerConfiguration kafkaConsumerConfiguration;
}

View File

@ -14,9 +14,10 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon;
public class MetricSpoutConfig extends KafkaSpoutConfig {
private static final long serialVersionUID = -4285448019855024921L;
private static final long serialVersionUID = -4285448019855024921L;
}

View File

@ -14,19 +14,19 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon;
import com.hpcloud.configuration.KafkaProducerConfiguration;
import com.hpcloud.mon.infrastructure.thresholding.DataSourceFactory;
import org.hibernate.validator.constraints.NotEmpty;
import java.util.Set;
import javax.validation.Valid;
import javax.validation.constraints.NotNull;
import org.hibernate.validator.constraints.NotEmpty;
import com.hpcloud.mon.infrastructure.thresholding.DataSourceFactory;
/**
* Thresholding configuration.
*/

View File

@ -14,16 +14,20 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon;
import ch.qos.logback.classic.LoggerContext;
import ch.qos.logback.core.util.StatusPrinter;
import com.hpcloud.util.Injector;
import com.hpcloud.util.config.ConfigurationFactory;
import backtype.storm.Config;
import backtype.storm.LocalCluster;
import backtype.storm.StormSubmitter;
import backtype.storm.generated.StormTopology;
import ch.qos.logback.classic.LoggerContext;
import ch.qos.logback.core.util.StatusPrinter;
import com.hpcloud.util.Injector;
import com.hpcloud.util.config.ConfigurationFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -33,60 +37,60 @@ import java.io.File;
* Alarm thresholding engine.
*/
public class ThresholdingEngine {
private static final Logger LOG = LoggerFactory.getLogger(ThresholdingEngine.class);
private static final Logger logger = LoggerFactory.getLogger(ThresholdingEngine.class);
private final ThresholdingConfiguration threshConfig;
private final String topologyName;
private final boolean local;
private final ThresholdingConfiguration threshConfig;
private final String topologyName;
private final boolean local;
public ThresholdingEngine(ThresholdingConfiguration threshConfig, String topologyName,
boolean local) {
this.threshConfig = threshConfig;
this.topologyName = topologyName;
this.local = local;
LOG.info("local set to {}", local);
public ThresholdingEngine(ThresholdingConfiguration threshConfig, String topologyName,
boolean local) {
this.threshConfig = threshConfig;
this.topologyName = topologyName;
this.local = local;
logger.info("local set to {}", local);
}
public static final ThresholdingConfiguration configFor(String configFileName) throws Exception {
return ConfigurationFactory
.<ThresholdingConfiguration>forClass(ThresholdingConfiguration.class).build(
new File(configFileName));
}
public static void main(String... args) throws Exception {
// Let's show the logging status.
StatusPrinter.print((LoggerContext) LoggerFactory.getILoggerFactory());
if (args.length < 2) {
logger.error("Expected configuration file name and topology name arguments");
System.exit(1);
}
public static final ThresholdingConfiguration configFor(String configFileName) throws Exception {
return ConfigurationFactory.<ThresholdingConfiguration>forClass(ThresholdingConfiguration.class)
.build(new File(configFileName));
}
public static void main(String... args) throws Exception {
// Let's show the logging status.
StatusPrinter.print((LoggerContext) LoggerFactory.getILoggerFactory());
if (args.length < 2) {
LOG.error("Expected configuration file name and topology name arguments");
System.exit(1);
}
LOG.info("Instantiating ThresholdingEngine with config file: {}, topology: {}",
args[0], args[1]);
ThresholdingEngine engine = new ThresholdingEngine(configFor(args[0]), args[1],
args.length > 2 ? true : false);
engine.configure();
engine.run();
}
protected void configure() {
Injector.registerModules(new TopologyModule(threshConfig));
}
protected void run() throws Exception {
Config config = Injector.getInstance(Config.class);
StormTopology topology = Injector.getInstance(StormTopology.class);
config.registerSerialization(com.hpcloud.mon.domain.model.SubAlarm.class);
if (local) {
LOG.info("submitting topology {} to local storm cluster", topologyName);
new LocalCluster().submitTopology(topologyName, config, topology);
} else {
LOG.info("submitting topology {} to non-local storm cluster", topologyName);
StormSubmitter.submitTopology(topologyName, config, topology);
}
logger.info("Instantiating ThresholdingEngine with config file: {}, topology: {}", args[0],
args[1]);
ThresholdingEngine engine =
new ThresholdingEngine(configFor(args[0]), args[1], args.length > 2 ? true : false);
engine.configure();
engine.run();
}
protected void configure() {
Injector.registerModules(new TopologyModule(threshConfig));
}
protected void run() throws Exception {
Config config = Injector.getInstance(Config.class);
StormTopology topology = Injector.getInstance(StormTopology.class);
config.registerSerialization(com.hpcloud.mon.domain.model.SubAlarm.class);
if (local) {
logger.info("submitting topology {} to local storm cluster", topologyName);
new LocalCluster().submitTopology(topologyName, config, topology);
} else {
logger.info("submitting topology {} to non-local storm cluster", topologyName);
StormSubmitter.submitTopology(topologyName, config, topology);
}
}
}

View File

@ -14,9 +14,17 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon;
import javax.inject.Named;
import com.hpcloud.mon.infrastructure.thresholding.AlarmThresholdingBolt;
import com.hpcloud.mon.infrastructure.thresholding.EventProcessingBolt;
import com.hpcloud.mon.infrastructure.thresholding.EventSpout;
import com.hpcloud.mon.infrastructure.thresholding.MetricAggregationBolt;
import com.hpcloud.mon.infrastructure.thresholding.MetricFilteringBolt;
import com.hpcloud.mon.infrastructure.thresholding.MetricSpout;
import com.hpcloud.mon.infrastructure.thresholding.deserializer.EventDeserializer;
import com.hpcloud.util.Injector;
import backtype.storm.Config;
import backtype.storm.generated.StormTopology;
@ -26,14 +34,8 @@ import backtype.storm.tuple.Fields;
import com.google.inject.AbstractModule;
import com.google.inject.Provides;
import com.hpcloud.mon.infrastructure.thresholding.AlarmThresholdingBolt;
import com.hpcloud.mon.infrastructure.thresholding.EventProcessingBolt;
import com.hpcloud.mon.infrastructure.thresholding.EventSpout;
import com.hpcloud.mon.infrastructure.thresholding.MetricAggregationBolt;
import com.hpcloud.mon.infrastructure.thresholding.MetricFilteringBolt;
import com.hpcloud.mon.infrastructure.thresholding.MetricSpout;
import com.hpcloud.mon.infrastructure.thresholding.deserializer.EventDeserializer;
import com.hpcloud.util.Injector;
import javax.inject.Named;
/**
* Configures types for the thresholding topology.
@ -57,8 +59,7 @@ public class TopologyModule extends AbstractModule {
}
@Override
protected void configure() {
}
protected void configure() {}
@Provides
Config stormConfig() {
@ -100,21 +101,21 @@ public class TopologyModule extends AbstractModule {
// MaaS Event -> Events
builder.setBolt("event-bolt", new EventProcessingBolt(), config.eventBoltThreads)
.shuffleGrouping("event-spout")
.setNumTasks(config.eventBoltTasks);
.shuffleGrouping("event-spout").setNumTasks(config.eventBoltTasks);
// Metrics / Event -> Filtering
builder.setBolt("filtering-bolt", new MetricFilteringBolt(config.database),
config.filteringBoltThreads)
.shuffleGrouping("metrics-spout")
builder
.setBolt("filtering-bolt", new MetricFilteringBolt(config.database),
config.filteringBoltThreads).shuffleGrouping("metrics-spout")
.allGrouping("event-bolt", EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_ID)
.allGrouping("event-bolt", EventProcessingBolt.METRIC_ALARM_EVENT_STREAM_ID)
.setNumTasks(config.filteringBoltTasks);
// Filtering / Event -> Aggregation
builder.setBolt("aggregation-bolt",
new MetricAggregationBolt(config.database, config.sporadicMetricNamespaces),
config.aggregationBoltThreads)
builder
.setBolt("aggregation-bolt",
new MetricAggregationBolt(config.database, config.sporadicMetricNamespaces),
config.aggregationBoltThreads)
.fieldsGrouping("filtering-bolt", new Fields(MetricFilteringBolt.FIELDS[0]))
.allGrouping("filtering-bolt", MetricAggregationBolt.METRIC_AGGREGATION_CONTROL_STREAM)
.fieldsGrouping("event-bolt", EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_ID,
@ -124,9 +125,10 @@ public class TopologyModule extends AbstractModule {
.setNumTasks(config.aggregationBoltTasks);
// Aggregation / Event -> Thresholding
builder.setBolt("thresholding-bolt",
new AlarmThresholdingBolt(config.database, config.kafkaProducerConfig),
config.thresholdingBoltThreads)
builder
.setBolt("thresholding-bolt",
new AlarmThresholdingBolt(config.database, config.kafkaProducerConfig),
config.thresholdingBoltThreads)
.fieldsGrouping("aggregation-bolt", new Fields(MetricAggregationBolt.FIELDS[0]))
.fieldsGrouping("event-bolt", EventProcessingBolt.ALARM_EVENT_STREAM_ID,
new Fields(EventProcessingBolt.ALARM_EVENT_STREAM_FIELDS[1]))

View File

@ -14,19 +14,20 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.domain.model;
import com.hpcloud.mon.common.model.alarm.AlarmExpression;
import com.hpcloud.mon.common.model.alarm.AlarmState;
import com.hpcloud.mon.common.model.alarm.AlarmSubExpression;
import com.hpcloud.mon.domain.common.AbstractEntity;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.hpcloud.mon.common.model.alarm.AlarmExpression;
import com.hpcloud.mon.common.model.alarm.AlarmState;
import com.hpcloud.mon.common.model.alarm.AlarmSubExpression;
import com.hpcloud.mon.domain.common.AbstractEntity;
/**
* An alarm comprised of sub-alarms.
*/
@ -43,8 +44,8 @@ public class Alarm extends AbstractEntity {
public Alarm() {
}
public Alarm(String id, String tenantId, String name, String description, AlarmExpression expression,
List<SubAlarm> subAlarms, AlarmState state, boolean actionsEnabled) {
public Alarm(String id, String tenantId, String name, String description,
AlarmExpression expression, List<SubAlarm> subAlarms, AlarmState state, boolean actionsEnabled) {
this.id = id;
this.tenantId = tenantId;
this.name = name;
@ -56,47 +57,59 @@ public class Alarm extends AbstractEntity {
}
static String buildStateChangeReason(AlarmState alarmState, List<String> subAlarmExpressions) {
if (AlarmState.UNDETERMINED.equals(alarmState))
if (AlarmState.UNDETERMINED.equals(alarmState)) {
return String.format("No data was present for the sub-alarms: %s", subAlarmExpressions);
else if (AlarmState.ALARM.equals(alarmState))
} else if (AlarmState.ALARM.equals(alarmState)) {
return String.format("Thresholds were exceeded for the sub-alarms: %s", subAlarmExpressions);
else
} else {
return "The alarm threshold(s) have not been exceeded";
}
}
@Override
public boolean equals(Object obj) {
if (this == obj)
if (this == obj) {
return true;
if (!super.equals(obj))
}
if (!super.equals(obj)) {
return false;
if (getClass() != obj.getClass())
}
if (getClass() != obj.getClass()) {
return false;
}
Alarm other = (Alarm) obj;
if (!compareObjects(expression, other.expression))
if (!compareObjects(expression, other.expression)) {
return false;
if (!compareObjects(name, other.name))
}
if (!compareObjects(name, other.name)) {
return false;
if (!compareObjects(description, other.description))
}
if (!compareObjects(description, other.description)) {
return false;
if (state != other.state)
}
if (state != other.state) {
return false;
if (actionsEnabled != other.actionsEnabled)
return false;
if (!compareObjects(subAlarms, other.subAlarms))
}
if (actionsEnabled != other.actionsEnabled) {
return false;
if (!compareObjects(tenantId, other.tenantId))
}
if (!compareObjects(subAlarms, other.subAlarms)) {
return false;
}
if (!compareObjects(tenantId, other.tenantId)) {
return false;
}
return true;
}
private boolean compareObjects(final Object o1,
final Object o2) {
private boolean compareObjects(final Object o1, final Object o2) {
if (o1 == null) {
if (o2 != null)
return false;
} else if (!o1.equals(o2))
if (o2 != null) {
return false;
}
} else if (!o1.equals(o2)) {
return false;
}
return true;
}
@ -115,35 +128,42 @@ public class Alarm extends AbstractEntity {
// Handle UNDETERMINED state
if (!unitializedSubAlarms.isEmpty()) {
if (AlarmState.UNDETERMINED.equals(initialState))
if (AlarmState.UNDETERMINED.equals(initialState)) {
return false;
}
state = AlarmState.UNDETERMINED;
stateChangeReason = buildStateChangeReason(state, unitializedSubAlarms);
return true;
}
Map<AlarmSubExpression, Boolean> subExpressionValues = new HashMap<AlarmSubExpression, Boolean>();
for (SubAlarm subAlarm : subAlarms.values())
Map<AlarmSubExpression, Boolean> subExpressionValues =
new HashMap<AlarmSubExpression, Boolean>();
for (SubAlarm subAlarm : subAlarms.values()) {
subExpressionValues.put(subAlarm.getExpression(),
AlarmState.ALARM.equals(subAlarm.getState()));
}
// Handle ALARM state
if (expression.evaluate(subExpressionValues)) {
if (AlarmState.ALARM.equals(initialState))
if (AlarmState.ALARM.equals(initialState)) {
return false;
}
List<String> subAlarmExpressions = new ArrayList<String>();
for (SubAlarm subAlarm : subAlarms.values())
if (AlarmState.ALARM.equals(subAlarm.getState()))
for (SubAlarm subAlarm : subAlarms.values()) {
if (AlarmState.ALARM.equals(subAlarm.getState())) {
subAlarmExpressions.add(subAlarm.getExpression().toString());
}
}
state = AlarmState.ALARM;
stateChangeReason = buildStateChangeReason(state, subAlarmExpressions);
return true;
}
if (AlarmState.OK.equals(initialState))
if (AlarmState.OK.equals(initialState)) {
return false;
}
state = AlarmState.OK;
stateChangeReason = buildStateChangeReason(state, null);
return true;
@ -224,8 +244,9 @@ public class Alarm extends AbstractEntity {
public void setSubAlarms(List<SubAlarm> subAlarms) {
this.subAlarms = new HashMap<String, SubAlarm>();
for (SubAlarm subAlarm : subAlarms)
for (SubAlarm subAlarm : subAlarms) {
this.subAlarms.put(subAlarm.getId(), subAlarm);
}
}
public void setTenantId(String tenantId) {
@ -234,8 +255,9 @@ public class Alarm extends AbstractEntity {
@Override
public String toString() {
return String.format("Alarm [tenantId=%s, name=%s, description=%s, state=%s, actionsEnabled=%s]", tenantId,
name, description, state, actionsEnabled);
return String.format(
"Alarm [tenantId=%s, name=%s, description=%s, state=%s, actionsEnabled=%s]", tenantId,
name, description, state, actionsEnabled);
}
public void updateSubAlarm(SubAlarm subAlarm) {

View File

@ -14,64 +14,73 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.domain.model;
import java.io.Serializable;
package com.hpcloud.mon.domain.model;
import com.hpcloud.mon.common.model.metric.MetricDefinition;
import java.io.Serializable;
public class MetricDefinitionAndTenantId implements Serializable {
private static final long serialVersionUID = -4224596705186481749L;
private static final long serialVersionUID = -4224596705186481749L;
public MetricDefinition metricDefinition;
public String tenantId;
public MetricDefinition metricDefinition;
public String tenantId;
public MetricDefinitionAndTenantId(MetricDefinition metricDefinition,
String tenantId) {
this.metricDefinition = metricDefinition;
this.tenantId = tenantId;
public MetricDefinitionAndTenantId(MetricDefinition metricDefinition, String tenantId) {
this.metricDefinition = metricDefinition;
this.tenantId = tenantId;
}
@Override
public int hashCode() {
int result = 0;
if (this.metricDefinition != null) {
result += this.metricDefinition.hashCode();
}
@Override
public int hashCode() {
int result = 0;
if (this.metricDefinition != null)
result += this.metricDefinition.hashCode();
if (this.tenantId != null)
result = result * 31 + this.tenantId.hashCode();
return result;
if (this.tenantId != null) {
result = result * 31 + this.tenantId.hashCode();
}
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
final MetricDefinitionAndTenantId other = (MetricDefinitionAndTenantId) obj;
if (!compareObjects(this.tenantId, other.tenantId))
return false;
if (!compareObjects(this.metricDefinition, other.metricDefinition))
return false;
return true;
}
private boolean compareObjects(final Object o1,
final Object o2) {
if (o1 == null) {
if (o2 != null)
return false;
} else if (!o1.equals(o2))
return false;
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
@Override
public String toString() {
return String.format("MetricDefinitionAndTenantId tenantId=%s metricDefinition=%s", this.tenantId, this.metricDefinition);
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final MetricDefinitionAndTenantId other = (MetricDefinitionAndTenantId) obj;
if (!compareObjects(this.tenantId, other.tenantId)) {
return false;
}
if (!compareObjects(this.metricDefinition, other.metricDefinition)) {
return false;
}
return true;
}
private boolean compareObjects(final Object o1, final Object o2) {
if (o1 == null) {
if (o2 != null) {
return false;
}
} else if (!o1.equals(o2)) {
return false;
}
return true;
}
@Override
public String toString() {
return String.format("MetricDefinitionAndTenantId tenantId=%s metricDefinition=%s",
this.tenantId, this.metricDefinition);
}
}

View File

@ -14,8 +14,11 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.domain.model;
import com.hpcloud.mon.common.model.metric.MetricDefinition;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@ -24,245 +27,281 @@ import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import com.hpcloud.mon.common.model.metric.MetricDefinition;
/**
* This class is used to find any matching MetricDefinitionAndTenantId instances that match a given MetricDefinitionAndTenantId. This class
* has no way of handling duplicate MetricDefinitionAndTenantIds so it assume some other handles that issue.
*
* The actual MetricDefinitionAndTenantId is not kept in the last Map in order to save heap space. It is expected that possibly millions
* of metrics may be stored in the Matcher and so by only storing the DiminsionPairs instead of the whole MetricDefinitionAndTenantId,
* a significant amount of heap space will be saved thus reducing swapping. The MetricDefinitionAndTenantId is recreated when returned but
* since it will be just sent on and then the reference dropped, the object will be quickly and easily garbage collected. Testing shows
* that this algorithm is faster than keeping the whole MetricDefinitionAndTenantId in the Map.
* This class is used to find any matching MetricDefinitionAndTenantId instances that match a given
* MetricDefinitionAndTenantId. This class has no way of handling duplicate
* MetricDefinitionAndTenantIds so it assume some other handles that issue.
*
* The actual MetricDefinitionAndTenantId is not kept in the last Map in order to save heap space.
* It is expected that possibly millions of metrics may be stored in the Matcher and so by only
* storing the DiminsionPairs instead of the whole MetricDefinitionAndTenantId, a significant amount
* of heap space will be saved thus reducing swapping. The MetricDefinitionAndTenantId is recreated
* when returned but since it will be just sent on and then the reference dropped, the object will
* be quickly and easily garbage collected. Testing shows that this algorithm is faster than keeping
* the whole MetricDefinitionAndTenantId in the Map.
*/
public class MetricDefinitionAndTenantIdMatcher {
final Map<String, Map<String, Map<DimensionSet, Object>>> byTenantId = new ConcurrentHashMap<>();
private final static DimensionSet EMPTY_DIMENSION_SET = new DimensionSet(new DimensionPair[0]);
private final static Object placeHolder = new Object();
@SuppressWarnings("unchecked")
private final static List<MetricDefinitionAndTenantId> EMPTY_LIST = Collections.EMPTY_LIST;
final Map<String, Map<String, Map<DimensionSet, Object>>> byTenantId = new ConcurrentHashMap<>();
private final static DimensionSet EMPTY_DIMENSION_SET = new DimensionSet(new DimensionPair[0]);
private final static Object placeHolder = new Object();
@SuppressWarnings("unchecked")
private final static List<MetricDefinitionAndTenantId> EMPTY_LIST = Collections.EMPTY_LIST;
public void add(MetricDefinitionAndTenantId metricDefinitionAndTenantId) {
Map<String, Map<DimensionSet, Object>> byMetricName = byTenantId.get(metricDefinitionAndTenantId.tenantId);
if (byMetricName == null) {
byMetricName = new ConcurrentHashMap<>();
byTenantId.put(metricDefinitionAndTenantId.tenantId, byMetricName);
}
Map<DimensionSet, Object> byDimensionSet = byMetricName.get(metricDefinitionAndTenantId.metricDefinition.name);
if (byDimensionSet == null) {
byDimensionSet = new ConcurrentHashMap<>();
byMetricName.put(metricDefinitionAndTenantId.metricDefinition.name, byDimensionSet);
}
final DimensionSet dimensionSet = createDimensionSet(metricDefinitionAndTenantId.metricDefinition);
byDimensionSet.put(dimensionSet, placeHolder);
public void add(MetricDefinitionAndTenantId metricDefinitionAndTenantId) {
Map<String, Map<DimensionSet, Object>> byMetricName =
byTenantId.get(metricDefinitionAndTenantId.tenantId);
if (byMetricName == null) {
byMetricName = new ConcurrentHashMap<>();
byTenantId.put(metricDefinitionAndTenantId.tenantId, byMetricName);
}
Map<DimensionSet, Object> byDimensionSet =
byMetricName.get(metricDefinitionAndTenantId.metricDefinition.name);
if (byDimensionSet == null) {
byDimensionSet = new ConcurrentHashMap<>();
byMetricName.put(metricDefinitionAndTenantId.metricDefinition.name, byDimensionSet);
}
final DimensionSet dimensionSet =
createDimensionSet(metricDefinitionAndTenantId.metricDefinition);
byDimensionSet.put(dimensionSet, placeHolder);
}
private DimensionSet createDimensionSet(MetricDefinition metricDefinition) {
return new DimensionSet(createPairs(metricDefinition));
}
public boolean remove(MetricDefinitionAndTenantId metricDefinitionAndTenantId) {
final Map<String, Map<DimensionSet, Object>> byMetricName =
byTenantId.get(metricDefinitionAndTenantId.tenantId);
if (byMetricName == null) {
return false;
}
private DimensionSet createDimensionSet(MetricDefinition metricDefinition) {
return new DimensionSet(createPairs(metricDefinition));
final Map<DimensionSet, Object> byDimensionSet =
byMetricName.get(metricDefinitionAndTenantId.metricDefinition.name);
if (byDimensionSet == null) {
return false;
}
public boolean remove(MetricDefinitionAndTenantId metricDefinitionAndTenantId) {
final Map<String, Map<DimensionSet, Object>> byMetricName = byTenantId.get(metricDefinitionAndTenantId.tenantId);
if (byMetricName == null)
return false;
final Map<DimensionSet, Object> byDimensionSet = byMetricName.get(metricDefinitionAndTenantId.metricDefinition.name);
if (byDimensionSet == null)
return false;
final DimensionSet dimensionSet = createDimensionSet(metricDefinitionAndTenantId.metricDefinition);
final boolean result = byDimensionSet.remove(dimensionSet) != null;
if (result) {
if (byDimensionSet.isEmpty()) {
byMetricName.remove(metricDefinitionAndTenantId.metricDefinition.name);
if (byMetricName.isEmpty())
byTenantId.remove(metricDefinitionAndTenantId.tenantId);
}
final DimensionSet dimensionSet =
createDimensionSet(metricDefinitionAndTenantId.metricDefinition);
final boolean result = byDimensionSet.remove(dimensionSet) != null;
if (result) {
if (byDimensionSet.isEmpty()) {
byMetricName.remove(metricDefinitionAndTenantId.metricDefinition.name);
if (byMetricName.isEmpty()) {
byTenantId.remove(metricDefinitionAndTenantId.tenantId);
}
return result;
}
}
return result;
}
public List<MetricDefinitionAndTenantId> match(final MetricDefinitionAndTenantId toMatch) {
final Map<String, Map<DimensionSet, Object>> byMetricName = byTenantId.get(toMatch.tenantId);
if (byMetricName == null) {
return EMPTY_LIST;
}
public List<MetricDefinitionAndTenantId> match(final MetricDefinitionAndTenantId toMatch) {
final Map<String, Map<DimensionSet, Object>> byMetricName = byTenantId.get(toMatch.tenantId);
if (byMetricName == null)
return EMPTY_LIST;
final Map<DimensionSet, Object> byDimensionSet = byMetricName.get(toMatch.metricDefinition.name);
if (byDimensionSet == null)
return EMPTY_LIST;
final DimensionSet[] possibleDimensionSets = createPossibleDimensionPairs(toMatch.metricDefinition);
List<MetricDefinitionAndTenantId> matches = null;
for (final DimensionSet dimensionSet : possibleDimensionSets) {
if (byDimensionSet.containsKey(dimensionSet)) {
if (matches == null)
matches = new ArrayList<>();
matches.add(createFromDimensionSet(toMatch, dimensionSet));
}
final Map<DimensionSet, Object> byDimensionSet =
byMetricName.get(toMatch.metricDefinition.name);
if (byDimensionSet == null) {
return EMPTY_LIST;
}
final DimensionSet[] possibleDimensionSets =
createPossibleDimensionPairs(toMatch.metricDefinition);
List<MetricDefinitionAndTenantId> matches = null;
for (final DimensionSet dimensionSet : possibleDimensionSets) {
if (byDimensionSet.containsKey(dimensionSet)) {
if (matches == null) {
matches = new ArrayList<>();
}
return matches == null ? EMPTY_LIST : matches;
matches.add(createFromDimensionSet(toMatch, dimensionSet));
}
}
return matches == null ? EMPTY_LIST : matches;
}
private MetricDefinitionAndTenantId createFromDimensionSet(MetricDefinitionAndTenantId toMatch,
DimensionSet dimensionSet) {
final Map<String, String> dimensions = new HashMap<>(dimensionSet.pairs.length);
for (final DimensionPair pair : dimensionSet.pairs) {
dimensions.put(pair.key, pair.value);
}
return new MetricDefinitionAndTenantId(new MetricDefinition(toMatch.metricDefinition.name,
dimensions), toMatch.tenantId);
}
protected DimensionSet[] createPossibleDimensionPairs(MetricDefinition metricDefinition) {
final int dimensionSize =
metricDefinition.dimensions == null ? 0 : metricDefinition.dimensions.size();
final int size = (int) Math.pow(2, dimensionSize);
final DimensionSet[] result = new DimensionSet[size];
int index = 0;
result[index++] = EMPTY_DIMENSION_SET;
if (dimensionSize == 0) {
return result;
}
final DimensionPair[] pairs = createPairs(metricDefinition);
for (int i = 0; i < pairs.length; i++) {
index = addMore(pairs, i, EMPTY_DIMENSION_SET, result, index);
}
return result;
}
private int addMore(DimensionPair[] pairs, int start, DimensionSet dimensionSet,
DimensionSet[] result, int index) {
final DimensionPair[] newPairs = new DimensionPair[dimensionSet.pairs.length + 1];
if (dimensionSet.pairs.length > 0) {
System.arraycopy(dimensionSet.pairs, 0, newPairs, 0, dimensionSet.pairs.length);
}
newPairs[dimensionSet.pairs.length] = pairs[start];
final DimensionSet thisDimensionSet = new DimensionSet(newPairs);
result[index++] = thisDimensionSet;
for (int i = start + 1; i < pairs.length; i++) {
index = addMore(pairs, i, thisDimensionSet, result, index);
}
return index;
}
private DimensionPair[] createPairs(MetricDefinition metricDefinition) {
final int dimensionSize =
metricDefinition.dimensions == null ? 0 : metricDefinition.dimensions.size();
final DimensionPair[] pairs = new DimensionPair[dimensionSize];
if (dimensionSize > 0) { // metricDefinition.dimensions can be null
int index = 0;
for (final Map.Entry<String, String> entry : metricDefinition.dimensions.entrySet()) {
pairs[index++] = new DimensionPair(entry.getKey(), entry.getValue());
}
}
return pairs;
}
public boolean isEmpty() {
return byTenantId.isEmpty();
}
public void clear() {
byTenantId.clear();
}
protected static class DimensionSet {
final DimensionPair[] pairs;
public DimensionSet(DimensionPair... pairs) {
Arrays.sort(pairs);
this.pairs = pairs;
}
private MetricDefinitionAndTenantId createFromDimensionSet(
MetricDefinitionAndTenantId toMatch,
DimensionSet dimensionSet) {
final Map<String, String> dimensions = new HashMap<>(dimensionSet.pairs.length);
for (final DimensionPair pair : dimensionSet.pairs)
dimensions.put(pair.key, pair.value);
return new MetricDefinitionAndTenantId(new MetricDefinition(toMatch.metricDefinition.name, dimensions), toMatch.tenantId);
@Override
public int hashCode() {
int result = 1;
final int prime = 31;
for (DimensionPair pair : pairs) {
result = result * prime + pair.hashCode();
}
return result;
}
protected DimensionSet[] createPossibleDimensionPairs(MetricDefinition metricDefinition) {
final int dimensionSize = metricDefinition.dimensions == null ? 0 : metricDefinition.dimensions.size();
final int size = (int)Math.pow(2, dimensionSize);
final DimensionSet[] result = new DimensionSet[size];
int index = 0;
result[index++] = EMPTY_DIMENSION_SET;
if (dimensionSize == 0)
return result;
final DimensionPair[] pairs = createPairs(metricDefinition);
for (int i = 0; i < pairs.length; i++)
index = addMore(pairs, i, EMPTY_DIMENSION_SET, result, index);
return result;
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final DimensionSet other = (DimensionSet) obj;
if (this.pairs.length != other.pairs.length) {
return false;
}
for (int i = 0; i < this.pairs.length; i++) {
if (!this.pairs[i].equals(other.pairs[i])) {
return false;
}
}
return true;
}
private int addMore(DimensionPair[] pairs, int start,
DimensionSet dimensionSet, DimensionSet[] result, int index) {
final DimensionPair[] newPairs = new DimensionPair[dimensionSet.pairs.length + 1];
if (dimensionSet.pairs.length > 0)
System.arraycopy(dimensionSet.pairs, 0, newPairs, 0, dimensionSet.pairs.length);
newPairs[dimensionSet.pairs.length] = pairs[start];
final DimensionSet thisDimensionSet = new DimensionSet(newPairs);
result[index++] = thisDimensionSet;
for (int i = start + 1; i < pairs.length; i++)
index = addMore(pairs, i, thisDimensionSet, result, index);
return index;
@Override
public String toString() {
final StringBuilder builder = new StringBuilder(256);
builder.append("DimensionSet [");
boolean first = true;
for (DimensionPair pair : pairs) {
if (!first) {
builder.append(", ");
}
builder.append(pair.toString());
first = false;
}
builder.append("]");
return builder.toString();
}
}
protected static class DimensionPair implements Comparable<DimensionPair> {
private String key;
private String value;
public DimensionPair(String key, String value) {
this.key = key;
this.value = value;
}
private DimensionPair[] createPairs(MetricDefinition metricDefinition) {
final int dimensionSize = metricDefinition.dimensions == null ? 0 : metricDefinition.dimensions.size();
final DimensionPair[] pairs = new DimensionPair[dimensionSize];
if (dimensionSize > 0) { // metricDefinition.dimensions can be null
int index = 0;
for (final Map.Entry<String, String> entry : metricDefinition.dimensions.entrySet())
pairs[index++] = new DimensionPair(entry.getKey(), entry.getValue());
}
return pairs;
@Override
public int hashCode() {
int result = 1;
final int prime = 31;
result = prime * result + key.hashCode();
result = prime * result + ((value == null) ? 0 : value.hashCode());
return result;
}
public boolean isEmpty() {
return byTenantId.isEmpty();
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
DimensionPair other = (DimensionPair) obj;
return compareStrings(key, other.key) && compareStrings(value, other.value);
}
public void clear() {
byTenantId.clear();
private boolean compareStrings(final String s1, final String s2) {
if (s1 == s2) {
return true;
}
if (s1 == null) {
return false;
}
return s1.equals(s2);
}
protected static class DimensionSet {
final DimensionPair[] pairs;
public DimensionSet(DimensionPair ... pairs) {
Arrays.sort(pairs);
this.pairs = pairs;
}
@Override
public int hashCode() {
int result = 1;
final int prime = 31;
for (DimensionPair pair : pairs)
result = result * prime + pair.hashCode();
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
final DimensionSet other = (DimensionSet) obj;
if (this.pairs.length != other.pairs.length)
return false;
for (int i = 0; i < this.pairs.length; i++)
if (!this.pairs[i].equals(other.pairs[i]))
return false;
return true;
}
@Override
public String toString() {
final StringBuilder builder = new StringBuilder(256);
builder.append("DimensionSet [");
boolean first = true;
for (DimensionPair pair : pairs) {
if (!first)
builder.append(", ");
builder.append(pair.toString());
first = false;
}
builder.append("]");
return builder.toString();
}
@Override
public int compareTo(DimensionPair o) {
int c = this.key.compareTo(o.key);
if (c != 0) {
return c;
}
// Handle possible null values. A actual value is bigger than a null
if (this.value == null) {
return o.value == null ? 0 : 1;
}
return this.value.compareTo(o.value);
}
protected static class DimensionPair implements Comparable<DimensionPair> {
private String key;
private String value;
public DimensionPair(String key, String value) {
this.key = key;
this.value = value;
}
@Override
public int hashCode() {
int result = 1;
final int prime = 31;
result = prime * result + key.hashCode();
result = prime * result + ((value == null) ? 0 : value.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
DimensionPair other = (DimensionPair) obj;
return compareStrings(key, other.key) &&
compareStrings(value, other.value);
}
private boolean compareStrings(final String s1,
final String s2) {
if (s1 == s2)
return true;
if (s1 == null)
return false;
return s1.equals(s2);
}
@Override
public int compareTo(DimensionPair o) {
int c = this.key.compareTo(o.key);
if (c != 0)
return c;
// Handle possible null values. A actual value is bigger than a null
if (this.value == null)
return o.value == null ? 0: 1;
return this.value.compareTo(o.value);
}
@Override
public String toString() {
return String.format("DimensionPair %s=%s", key, value);
}
@Override
public String toString() {
return String.format("DimensionPair %s=%s", key, value);
}
}
}

View File

@ -14,6 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.domain.model;
import com.hpcloud.mon.common.model.alarm.AlarmState;
@ -26,122 +27,135 @@ import java.io.Serializable;
* Sub-alarm. Decorates an AlarmSubExpression.
*/
public class SubAlarm extends AbstractEntity implements Serializable {
private static final long serialVersionUID = -3946708553723868124L;
private static final long serialVersionUID = -3946708553723868124L;
private String alarmId;
private AlarmSubExpression expression;
private AlarmState state;
private boolean noState;
/**
* Whether metrics for this sub-alarm are received sporadically.
*/
private boolean sporadicMetric;
private String alarmId;
private AlarmSubExpression expression;
private AlarmState state;
private boolean noState;
/**
* Whether metrics for this sub-alarm are received sporadically.
*/
private boolean sporadicMetric;
public SubAlarm(String id, String alarmId, AlarmSubExpression expression) {
this(id, alarmId, expression, AlarmState.UNDETERMINED);
public SubAlarm(String id, String alarmId, AlarmSubExpression expression) {
this(id, alarmId, expression, AlarmState.UNDETERMINED);
}
// Need this for kryo serialization/deserialization. Fixes a bug in default java
// serialization/deserialization where id was not being set. See resources/storm.yaml
// file for how to handle serialization/deserialization with kryo.
public SubAlarm() {
}
public SubAlarm(String id, String alarmId, AlarmSubExpression expression, AlarmState state) {
this.id = id;
this.alarmId = alarmId;
this.expression = expression;
this.state = state;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
// Need this for kryo serialization/deserialization. Fixes a bug in default java
// serialization/deserialization where id was not being set. See resources/storm.yaml
// file for how to handle serialization/deserialization with kryo.
public SubAlarm() {
if (!super.equals(obj)) {
return false;
}
public SubAlarm(String id, String alarmId, AlarmSubExpression expression, AlarmState state) {
this.id = id;
this.alarmId = alarmId;
this.expression = expression;
this.state = state;
if (getClass() != obj.getClass()) {
return false;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (!super.equals(obj))
return false;
if (getClass() != obj.getClass())
return false;
SubAlarm other = (SubAlarm) obj;
if (alarmId == null) {
if (other.alarmId != null)
return false;
} else if (!alarmId.equals(other.alarmId))
return false;
if (expression == null) {
if (other.expression != null)
return false;
} else if (!expression.equals(other.expression))
return false;
if (state != other.state)
return false;
return true;
SubAlarm other = (SubAlarm) obj;
if (alarmId == null) {
if (other.alarmId != null) {
return false;
}
} else if (!alarmId.equals(other.alarmId)) {
return false;
}
public String getAlarmId() {
return alarmId;
if (expression == null) {
if (other.expression != null) {
return false;
}
} else if (!expression.equals(other.expression)) {
return false;
}
public AlarmSubExpression getExpression() {
return expression;
if (state != other.state) {
return false;
}
return true;
}
public AlarmState getState() {
return state;
}
public String getAlarmId() {
return alarmId;
}
@Override
public int hashCode() {
final int prime = 31;
int result = super.hashCode();
result = prime * result + ((alarmId == null) ? 0 : alarmId.hashCode());
result = prime * result + ((expression == null) ? 0 : expression.hashCode());
result = prime * result + ((state == null) ? 0 : state.hashCode());
return result;
}
public AlarmSubExpression getExpression() {
return expression;
}
public boolean isSporadicMetric() {
return sporadicMetric;
}
public AlarmState getState() {
return state;
}
public void setSporadicMetric(boolean sporadicMetric) {
this.sporadicMetric = sporadicMetric;
}
@Override
public int hashCode() {
final int prime = 31;
int result = super.hashCode();
result = prime * result + ((alarmId == null) ? 0 : alarmId.hashCode());
result = prime * result + ((expression == null) ? 0 : expression.hashCode());
result = prime * result + ((state == null) ? 0 : state.hashCode());
return result;
}
public void setState(AlarmState state) {
this.state = state;
}
public boolean isSporadicMetric() {
return sporadicMetric;
}
public boolean isNoState() {
return noState;
}
public void setSporadicMetric(boolean sporadicMetric) {
this.sporadicMetric = sporadicMetric;
}
public void setNoState(boolean noState) {
this.noState = noState;
}
public void setState(AlarmState state) {
this.state = state;
}
@Override
public String toString() {
return String.format("SubAlarm [id=%s, alarmId=%s, expression=%s, state=%s noState=%s]", id, alarmId,
expression, state, noState);
}
public boolean isNoState() {
return noState;
}
/**
* Determine if this SubAlarm and 'other' could reuse saved measurements. Only possible
* only operator and/or threshold are the only properties from the expression that are different
* @param other SubAlarm to compare to
* @return true if 'other' is "compatible", false otherwise
*/
public boolean isCompatible(final SubAlarm other) {
if (!this.expression.getMetricDefinition().equals(other.expression.getMetricDefinition()))
return false;
if (!this.expression.getFunction().equals(other.expression.getFunction()))
return false;
if (this.expression.getPeriod() != other.expression.getPeriod())
return false;
if (this.expression.getPeriods() != other.expression.getPeriods())
return false;
// Operator and Threshold can vary
return true;
public void setNoState(boolean noState) {
this.noState = noState;
}
@Override
public String toString() {
return String.format("SubAlarm [id=%s, alarmId=%s, expression=%s, state=%s noState=%s]", id,
alarmId, expression, state, noState);
}
/**
* Determine if this SubAlarm and 'other' could reuse saved measurements. Only possible only
* operator and/or threshold are the only properties from the expression that are different
*
* @param other SubAlarm to compare to
* @return true if 'other' is "compatible", false otherwise
*/
public boolean isCompatible(final SubAlarm other) {
if (!this.expression.getMetricDefinition().equals(other.expression.getMetricDefinition())) {
return false;
}
if (!this.expression.getFunction().equals(other.expression.getFunction())) {
return false;
}
if (this.expression.getPeriod() != other.expression.getPeriod()) {
return false;
}
if (this.expression.getPeriods() != other.expression.getPeriods()) {
return false;
}
// Operator and Threshold can vary
return true;
}
}

View File

@ -14,20 +14,21 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.domain.model;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
package com.hpcloud.mon.domain.model;
import com.hpcloud.mon.common.model.alarm.AlarmState;
import com.hpcloud.util.stats.SlidingWindowStats;
import com.hpcloud.util.time.TimeResolution;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Aggregates statistics for a specific SubAlarm.
*/
public class SubAlarmStats {
private static final Logger LOG = LoggerFactory.getLogger(SubAlarmStats.class);
private static final Logger logger = LoggerFactory.getLogger(SubAlarmStats.class);
/** Number of slots for future periods that we should collect metrics for. */
private static final int FUTURE_SLOTS = 2;
/** Helps determine how many empty window observations before transitioning to UNDETERMINED. */
@ -48,28 +49,29 @@ public class SubAlarmStats {
slotWidth = subAlarm.getExpression().getPeriod();
this.subAlarm = subAlarm;
this.subAlarm.setNoState(true);
this.stats = new SlidingWindowStats(subAlarm.getExpression().getFunction().toStatistic(),
timeResolution, slotWidth, subAlarm.getExpression().getPeriods(), FUTURE_SLOTS,
viewEndTimestamp);
this.stats =
new SlidingWindowStats(subAlarm.getExpression().getFunction().toStatistic(),
timeResolution, slotWidth, subAlarm.getExpression().getPeriods(), FUTURE_SLOTS,
viewEndTimestamp);
int period = subAlarm.getExpression().getPeriod();
int periodMinutes = period < 60 ? 1 : period / 60; // Assumes the period is in seconds so we
// convert to minutes
emptyWindowObservationThreshold = periodMinutes * subAlarm.getExpression().getPeriods()
* UNDETERMINED_COEFFICIENT;
emptyWindowObservationThreshold =
periodMinutes * subAlarm.getExpression().getPeriods() * UNDETERMINED_COEFFICIENT;
emptyWindowObservations = 0;
}
/**
* Evaluates the {@link #subAlarm} for the current stats window, updating the sub-alarm's state if
* necessary and sliding the window to the {@code slideToTimestamp}.
*
*
* @return true if the alarm's state changed, else false.
*/
public boolean evaluateAndSlideWindow(long slideToTimestamp) {
try {
return evaluate();
} catch (Exception e) {
LOG.error("Failed to evaluate {}", this, e);
logger.error("Failed to evaluate {}", this, e);
return false;
} finally {
slideWindow(slideToTimestamp);
@ -77,8 +79,8 @@ public class SubAlarmStats {
}
/**
* Just slide the window. Either slideWindow or evaluateAndSlideWindow
* should be called for each time period, but never both
* Just slide the window. Either slideWindow or evaluateAndSlideWindow should be called for each
* time period, but never both
*
* @param slideToTimestamp
*/
@ -102,9 +104,10 @@ public class SubAlarmStats {
@Override
public String toString() {
return String.format(
"SubAlarmStats [subAlarm=%s, stats=%s, emptyWindowObservations=%s, emptyWindowObservationThreshold=%s]",
subAlarm, stats, emptyWindowObservations, emptyWindowObservationThreshold);
return String
.format(
"SubAlarmStats [subAlarm=%s, stats=%s, emptyWindowObservations=%s, emptyWindowObservationThreshold=%s]",
subAlarm, stats, emptyWindowObservations, emptyWindowObservationThreshold);
}
/**
@ -115,17 +118,17 @@ public class SubAlarmStats {
boolean thresholdExceeded = false;
boolean hasEmptyWindows = false;
for (double value : values) {
if (Double.isNaN(value))
if (Double.isNaN(value)) {
hasEmptyWindows = true;
else {
} else {
emptyWindowObservations = 0;
// Check if value is OK
if (!subAlarm.getExpression()
.getOperator()
if (!subAlarm.getExpression().getOperator()
.evaluate(value, subAlarm.getExpression().getThreshold())) {
if (!shouldSendStateChange(AlarmState.OK))
if (!shouldSendStateChange(AlarmState.OK)) {
return false;
}
setSubAlarmState(AlarmState.OK);
return true;
} else
@ -134,8 +137,9 @@ public class SubAlarmStats {
}
if (thresholdExceeded && !hasEmptyWindows) {
if (!shouldSendStateChange(AlarmState.ALARM))
if (!shouldSendStateChange(AlarmState.ALARM)) {
return false;
}
setSubAlarmState(AlarmState.ALARM);
return true;
}
@ -143,10 +147,9 @@ public class SubAlarmStats {
// Window is empty at this point
emptyWindowObservations++;
if ((emptyWindowObservations >= emptyWindowObservationThreshold) &&
shouldSendStateChange(AlarmState.UNDETERMINED) &&
!subAlarm.isSporadicMetric()) {
setSubAlarmState(AlarmState.UNDETERMINED);
if ((emptyWindowObservations >= emptyWindowObservationThreshold)
&& shouldSendStateChange(AlarmState.UNDETERMINED) && !subAlarm.isSporadicMetric()) {
setSubAlarmState(AlarmState.UNDETERMINED);
return true;
}
@ -164,10 +167,11 @@ public class SubAlarmStats {
/**
* This MUST only be used for compatible SubAlarms, i.e. where
* this.subAlarm.isCompatible(subAlarm) is true
* this.subAlarm.isCompatible(subAlarm) is true
*
* @param subAlarm
*/
public void updateSubAlarm(final SubAlarm subAlarm) {
this.subAlarm = subAlarm;
this.subAlarm = subAlarm;
}
}

View File

@ -14,6 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.domain.service;
import com.hpcloud.mon.common.model.alarm.AlarmState;

View File

@ -14,6 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.domain.service;
import java.util.List;

View File

@ -14,13 +14,14 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.domain.service;
import java.util.List;
package com.hpcloud.mon.domain.service;
import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantId;
import com.hpcloud.mon.domain.model.SubAlarm;
import java.util.List;
/**
* SubAlarm DAO.
*/

View File

@ -14,61 +14,69 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.domain.service;
import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantId;
public class SubAlarmMetricDefinition {
private final String subAlarmId;
private final MetricDefinitionAndTenantId metricDefinitionAndTenantId;
private final String subAlarmId;
private final MetricDefinitionAndTenantId metricDefinitionAndTenantId;
public SubAlarmMetricDefinition(String subAlarmId,
MetricDefinitionAndTenantId metricDefinitionAndTenantId) {
this.subAlarmId = subAlarmId;
this.metricDefinitionAndTenantId = metricDefinitionAndTenantId;
}
public SubAlarmMetricDefinition(String subAlarmId,
MetricDefinitionAndTenantId metricDefinitionAndTenantId) {
this.subAlarmId = subAlarmId;
this.metricDefinitionAndTenantId = metricDefinitionAndTenantId;
}
public String getSubAlarmId() {
return subAlarmId;
}
public String getSubAlarmId() {
return subAlarmId;
}
public MetricDefinitionAndTenantId getMetricDefinitionAndTenantId() {
return metricDefinitionAndTenantId;
}
public MetricDefinitionAndTenantId getMetricDefinitionAndTenantId() {
return metricDefinitionAndTenantId;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((subAlarmId == null) ? 0 : subAlarmId.hashCode());
result = prime * result + ((metricDefinitionAndTenantId == null) ? 0 : metricDefinitionAndTenantId.hashCode());
return result;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((subAlarmId == null) ? 0 : subAlarmId.hashCode());
result =
prime * result
+ ((metricDefinitionAndTenantId == null) ? 0 : metricDefinitionAndTenantId.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
SubAlarmMetricDefinition other = (SubAlarmMetricDefinition) obj;
return compareObjects(subAlarmId, other.subAlarmId) &&
compareObjects(metricDefinitionAndTenantId, other.metricDefinitionAndTenantId);
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
SubAlarmMetricDefinition other = (SubAlarmMetricDefinition) obj;
return compareObjects(subAlarmId, other.subAlarmId)
&& compareObjects(metricDefinitionAndTenantId, other.metricDefinitionAndTenantId);
}
private boolean compareObjects(final Object o1, final Object o2) {
if (o1 == o2)
return true;
if (o1 == null)
return false;
return o1.equals(o2);
private boolean compareObjects(final Object o1, final Object o2) {
if (o1 == o2) {
return true;
}
if (o1 == null) {
return false;
}
return o1.equals(o2);
}
@Override
public String toString() {
return String.format("SubAlarmMetricDefinition subAlarmId=%s metricDefinitionAndTenantId=%s", subAlarmId,
metricDefinitionAndTenantId);
}
@Override
public String toString() {
return String.format("SubAlarmMetricDefinition subAlarmId=%s metricDefinitionAndTenantId=%s",
subAlarmId, metricDefinitionAndTenantId);
}
}

View File

@ -14,15 +14,16 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.domain.service;
import com.hpcloud.mon.domain.model.SubAlarm;
import com.hpcloud.mon.domain.model.SubAlarmStats;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import com.hpcloud.mon.domain.model.SubAlarm;
import com.hpcloud.mon.domain.model.SubAlarmStats;
/**
* SubAlarmStats repository.
*/
@ -34,8 +35,9 @@ public class SubAlarmStatsRepository {
* adds it to the repository.
*/
public void add(SubAlarm subAlarm, long viewEndTimestamp) {
if (!subAlarmStats.containsKey(subAlarm.getId()))
if (!subAlarmStats.containsKey(subAlarm.getId())) {
subAlarmStats.put(subAlarm.getId(), new SubAlarmStats(subAlarm, viewEndTimestamp));
}
}
public Collection<SubAlarmStats> get() {

View File

@ -14,17 +14,9 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.persistence;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import javax.inject.Inject;
import org.skife.jdbi.v2.DBI;
import org.skife.jdbi.v2.Handle;
import com.hpcloud.mon.common.model.alarm.AggregateFunction;
import com.hpcloud.mon.common.model.alarm.AlarmOperator;
import com.hpcloud.mon.common.model.alarm.AlarmState;
@ -36,6 +28,15 @@ import com.hpcloud.mon.domain.service.AlarmDAO;
import com.hpcloud.persistence.BeanMapper;
import com.hpcloud.persistence.SqlQueries;
import org.skife.jdbi.v2.DBI;
import org.skife.jdbi.v2.Handle;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import javax.inject.Inject;
/**
* Alarm DAO implementation.
*/
@ -63,10 +64,12 @@ public class AlarmDAOImpl implements AlarmDAO {
String subAlarmId = (String) row.get("id");
Map<String, String> dimensions = findDimensionsById(handle, subAlarmId);
AggregateFunction function = AggregateFunction.valueOf((String) row.get("function"));
MetricDefinition metricDef = new MetricDefinition((String) row.get("metric_name"), dimensions);
MetricDefinition metricDef =
new MetricDefinition((String) row.get("metric_name"), dimensions);
AlarmOperator operator = AlarmOperator.valueOf((String) row.get("operator"));
AlarmSubExpression subExpression = new AlarmSubExpression(function, metricDef, operator,
(Double) row.get("threshold"), (Integer) row.get("period"), (Integer) row.get("periods"));
AlarmSubExpression subExpression =
new AlarmSubExpression(function, metricDef, operator, (Double) row.get("threshold"),
(Integer) row.get("period"), (Integer) row.get("periods"));
SubAlarm subAlarm = new SubAlarm(subAlarmId, (String) row.get("alarm_id"), subExpression);
subAlarms.add(subAlarm);
}
@ -79,18 +82,17 @@ public class AlarmDAOImpl implements AlarmDAO {
Handle h = db.open();
try {
Alarm alarm = h.createQuery("select * from alarm where id = :id and deleted_at is null")
.bind("id", id)
.map(new BeanMapper<Alarm>(Alarm.class))
.first();
if (alarm == null)
return alarm;
Alarm alarm =
h.createQuery("select * from alarm where id = :id and deleted_at is null").bind("id", id)
.map(new BeanMapper<Alarm>(Alarm.class)).first();
if (alarm == null) {
return null;
}
alarm.setSubAlarms(subAlarmsForRows(
h,
h.createQuery("select * from sub_alarm where alarm_id = :alarmId")
.bind("alarmId", alarm.getId())
.list()));
.bind("alarmId", alarm.getId()).list()));
return alarm;
} finally {
@ -104,9 +106,7 @@ public class AlarmDAOImpl implements AlarmDAO {
try {
h.createStatement("update alarm set state = :state, updated_at = NOW() where id = :id")
.bind("id", id)
.bind("state", state.toString())
.execute();
.bind("id", id).bind("state", state.toString()).execute();
} finally {
h.close();
}

View File

@ -14,8 +14,17 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.persistence;
import com.hpcloud.mon.common.model.metric.MetricDefinition;
import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantId;
import com.hpcloud.mon.domain.service.MetricDefinitionDAO;
import com.hpcloud.mon.domain.service.SubAlarmMetricDefinition;
import org.skife.jdbi.v2.DBI;
import org.skife.jdbi.v2.Handle;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@ -23,21 +32,14 @@ import java.util.Map;
import javax.inject.Inject;
import org.skife.jdbi.v2.DBI;
import org.skife.jdbi.v2.Handle;
import com.hpcloud.mon.common.model.metric.MetricDefinition;
import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantId;
import com.hpcloud.mon.domain.service.MetricDefinitionDAO;
import com.hpcloud.mon.domain.service.SubAlarmMetricDefinition;
/**
* MetricDefinition DAO implementation.
*/
public class MetricDefinitionDAOImpl implements MetricDefinitionDAO {
private static final String METRIC_DEF_SQL = "select sa.id, a.tenant_id, sa.metric_name, sad.dimensions from alarm as a, sub_alarm as sa "
+ "left join (select sub_alarm_id, group_concat(dimension_name, '=', value) as dimensions from sub_alarm_dimension group by sub_alarm_id) as sad on sa.id = sad.sub_alarm_id "
+ "where a.id = sa.alarm_id and a.deleted_at is null";
private static final String METRIC_DEF_SQL =
"select sa.id, a.tenant_id, sa.metric_name, sad.dimensions from alarm as a, sub_alarm as sa "
+ "left join (select sub_alarm_id, group_concat(dimension_name, '=', value) as dimensions from sub_alarm_dimension group by sub_alarm_id) as sad on sa.id = sad.sub_alarm_id "
+ "where a.id = sa.alarm_id and a.deleted_at is null";
private final DBI db;
@ -65,15 +67,16 @@ public class MetricDefinitionDAOImpl implements MetricDefinitionDAO {
for (String kvStr : dimensionSet.split(",")) {
String[] kv = kvStr.split("=");
if (kv.length > 1) {
if (dimensions == null)
if (dimensions == null) {
dimensions = new HashMap<String, String>();
}
dimensions.put(kv[0], kv[1]);
}
}
}
metricDefs.add(new SubAlarmMetricDefinition(subAlarmId,
new MetricDefinitionAndTenantId(new MetricDefinition(metric_name, dimensions), tenantId)));
metricDefs.add(new SubAlarmMetricDefinition(subAlarmId, new MetricDefinitionAndTenantId(
new MetricDefinition(metric_name, dimensions), tenantId)));
}
return metricDefs;

View File

@ -14,20 +14,22 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.persistence;
import javax.inject.Singleton;
import org.skife.jdbi.v2.DBI;
import com.google.inject.AbstractModule;
import com.google.inject.Provides;
import com.google.inject.Scopes;
import com.hpcloud.mon.domain.service.AlarmDAO;
import com.hpcloud.mon.domain.service.MetricDefinitionDAO;
import com.hpcloud.mon.domain.service.SubAlarmDAO;
import com.hpcloud.mon.infrastructure.thresholding.DataSourceFactory;
import com.google.inject.AbstractModule;
import com.google.inject.Provides;
import com.google.inject.Scopes;
import org.skife.jdbi.v2.DBI;
import javax.inject.Singleton;
/**
* Configures persistence related types.
*/

View File

@ -14,18 +14,9 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.persistence;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import javax.inject.Inject;
import org.skife.jdbi.v2.DBI;
import org.skife.jdbi.v2.Handle;
import org.skife.jdbi.v2.Query;
import com.hpcloud.mon.common.model.alarm.AggregateFunction;
import com.hpcloud.mon.common.model.alarm.AlarmOperator;
import com.hpcloud.mon.common.model.alarm.AlarmSubExpression;
@ -35,6 +26,16 @@ import com.hpcloud.mon.domain.model.SubAlarm;
import com.hpcloud.mon.domain.service.SubAlarmDAO;
import com.hpcloud.persistence.SqlStatements;
import org.skife.jdbi.v2.DBI;
import org.skife.jdbi.v2.Handle;
import org.skife.jdbi.v2.Query;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import javax.inject.Inject;
/**
* SubAlarm DAO implementation.
*/
@ -44,12 +45,14 @@ public class SubAlarmDAOImpl implements SubAlarmDAO {
* table, grouping by the dimension id and counting them to ensure that the number of matched
* dimensions equals the number of actual dimensions in the table for the subscription.
*/
private static final String FIND_BY_METRIC_DEF_SQL = "select sa.* from sub_alarm sa, alarm a, sub_alarm_dimension d "
+ "join (%s) v on d.dimension_name = v.dimension_name and d.value = v.value "
+ "where sa.id = d.sub_alarm_id and sa.metric_name = :metric_name and a.tenant_id = :tenant_id and a.id = sa.alarm_id and a.deleted_at is null "
+ "group by d.sub_alarm_id having count(d.sub_alarm_id) = %s";
private static final String FIND_BY_METRIC_DEF_NO_DIMS_SQL = "select sa.* from sub_alarm sa, alarm a where sa.metric_name = :metric_name "
+ "and a.tenant_id = :tenant_id and a.id = sa.alarm_id and a.deleted_at is null and (select count(*) from sub_alarm_dimension where sub_alarm_id = sa.id) = 0";
private static final String FIND_BY_METRIC_DEF_SQL =
"select sa.* from sub_alarm sa, alarm a, sub_alarm_dimension d "
+ "join (%s) v on d.dimension_name = v.dimension_name and d.value = v.value "
+ "where sa.id = d.sub_alarm_id and sa.metric_name = :metric_name and a.tenant_id = :tenant_id and a.id = sa.alarm_id and a.deleted_at is null "
+ "group by d.sub_alarm_id having count(d.sub_alarm_id) = %s";
private static final String FIND_BY_METRIC_DEF_NO_DIMS_SQL =
"select sa.* from sub_alarm sa, alarm a where sa.metric_name = :metric_name "
+ "and a.tenant_id = :tenant_id and a.id = sa.alarm_id and a.deleted_at is null and (select count(*) from sub_alarm_dimension where sub_alarm_id = sa.id) = 0";
private final DBI db;
@ -65,17 +68,20 @@ public class SubAlarmDAOImpl implements SubAlarmDAO {
try {
final MetricDefinition metricDefinition = metricDefinitionTenantId.metricDefinition;
final String sql;
if (metricDefinition.dimensions == null || metricDefinition.dimensions.isEmpty())
if (metricDefinition.dimensions == null || metricDefinition.dimensions.isEmpty()) {
sql = FIND_BY_METRIC_DEF_NO_DIMS_SQL;
else {
String unionAllStatement = SqlStatements.unionAllStatementFor(metricDefinition.dimensions,
"dimension_name", "value");
sql = String.format(FIND_BY_METRIC_DEF_SQL, unionAllStatement,
metricDefinition.dimensions.size());
} else {
String unionAllStatement =
SqlStatements.unionAllStatementFor(metricDefinition.dimensions, "dimension_name",
"value");
sql =
String.format(FIND_BY_METRIC_DEF_SQL, unionAllStatement,
metricDefinition.dimensions.size());
}
Query<Map<String, Object>> query = h.createQuery(sql).bind("metric_name",
metricDefinition.name).bind("tenant_id", metricDefinitionTenantId.tenantId);
Query<Map<String, Object>> query =
h.createQuery(sql).bind("metric_name", metricDefinition.name)
.bind("tenant_id", metricDefinitionTenantId.tenantId);
List<Map<String, Object>> rows = query.list();
List<SubAlarm> subAlarms = new ArrayList<SubAlarm>(rows.size());
@ -83,9 +89,10 @@ public class SubAlarmDAOImpl implements SubAlarmDAO {
String subAlarmId = (String) row.get("id");
AggregateFunction function = AggregateFunction.valueOf((String) row.get("function"));
AlarmOperator operator = AlarmOperator.valueOf((String) row.get("operator"));
AlarmSubExpression subExpression = new AlarmSubExpression(function, metricDefinition,
operator, (Double) row.get("threshold"), (Integer) row.get("period"),
(Integer) row.get("periods"));
AlarmSubExpression subExpression =
new AlarmSubExpression(function, metricDefinition, operator,
(Double) row.get("threshold"), (Integer) row.get("period"),
(Integer) row.get("periods"));
SubAlarm subAlarm = new SubAlarm(subAlarmId, (String) row.get("alarm_id"), subExpression);
subAlarms.add(subAlarm);

View File

@ -14,11 +14,12 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.thresholding;
public interface AlarmEventForwarder {
void send(String alertExchange, String alertRoutingKey, String json);
void send(String alertExchange, String alertRoutingKey, String json);
void close();
void close();
}

View File

@ -14,20 +14,9 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.thresholding;
import java.util.HashMap;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.base.BaseRichBolt;
import backtype.storm.tuple.Tuple;
import com.hpcloud.configuration.KafkaProducerConfiguration;
import com.hpcloud.mon.ThresholdingConfiguration;
import com.hpcloud.mon.common.event.AlarmStateTransitionedEvent;
@ -43,6 +32,18 @@ import com.hpcloud.streaming.storm.Streams;
import com.hpcloud.util.Injector;
import com.hpcloud.util.Serialization;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.base.BaseRichBolt;
import backtype.storm.tuple.Tuple;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.Map;
/**
* Determines whether an alarm threshold has been exceeded.
* <p/>
@ -55,193 +56,198 @@ import com.hpcloud.util.Serialization;
* </ul>
*/
public class AlarmThresholdingBolt extends BaseRichBolt {
private static final long serialVersionUID = -4126465124017857754L;
private static final long serialVersionUID = -4126465124017857754L;
private transient Logger LOG;
private DataSourceFactory dbConfig;
private KafkaProducerConfiguration producerConfiguration;
final Map<String, Alarm> alarms = new HashMap<String, Alarm>();
private String alertExchange;
private String alertRoutingKey;
private transient AlarmDAO alarmDAO;
private transient AlarmEventForwarder alarmEventForwarder;
private OutputCollector collector;
private transient Logger logger;
private DataSourceFactory dbConfig;
private KafkaProducerConfiguration producerConfiguration;
final Map<String, Alarm> alarms = new HashMap<String, Alarm>();
private String alertExchange;
private String alertRoutingKey;
private transient AlarmDAO alarmDAO;
private transient AlarmEventForwarder alarmEventForwarder;
private OutputCollector collector;
public AlarmThresholdingBolt(DataSourceFactory dbConfig,
KafkaProducerConfiguration producerConfig) {
this.dbConfig = dbConfig;
this.producerConfiguration = producerConfig;
}
public AlarmThresholdingBolt(DataSourceFactory dbConfig, KafkaProducerConfiguration producerConfig) {
this.dbConfig = dbConfig;
this.producerConfiguration = producerConfig;
}
public AlarmThresholdingBolt(final AlarmDAO alarmDAO,
final AlarmEventForwarder alarmEventForwarder) {
this.alarmDAO = alarmDAO;
this.alarmEventForwarder = alarmEventForwarder;
}
public AlarmThresholdingBolt(final AlarmDAO alarmDAO,
final AlarmEventForwarder alarmEventForwarder) {
this.alarmDAO = alarmDAO;
this.alarmEventForwarder = alarmEventForwarder;
}
@Override
public void declareOutputFields(OutputFieldsDeclarer declarer) {
}
@Override
public void declareOutputFields(OutputFieldsDeclarer declarer) {}
@Override
public void execute(Tuple tuple) {
@Override
public void execute(Tuple tuple) {
LOG.debug("tuple: {}", tuple);
try {
if (Streams.DEFAULT_STREAM_ID.equals(tuple.getSourceStreamId())) {
String alarmId = tuple.getString(0);
Alarm alarm = getOrCreateAlarm(alarmId);
if (alarm == null)
return;
SubAlarm subAlarm = (SubAlarm) tuple.getValue(1);
evaluateThreshold(alarm, subAlarm);
} else if (EventProcessingBolt.ALARM_EVENT_STREAM_ID.equals(tuple.getSourceStreamId())) {
String eventType = tuple.getString(0);
String alarmId = tuple.getString(1);
if (EventProcessingBolt.DELETED.equals(eventType))
handleAlarmDeleted(alarmId);
else if (EventProcessingBolt.UPDATED.equals(eventType))
handleAlarmUpdated(alarmId, (AlarmUpdatedEvent) tuple.getValue(2));
}
} catch (Exception e) {
LOG.error("Error processing tuple {}", tuple, e);
} finally {
collector.ack(tuple);
}
}
@Override
@SuppressWarnings("rawtypes")
public void prepare(Map config, TopologyContext context, OutputCollector collector) {
LOG = LoggerFactory.getLogger(Logging.categoryFor(getClass(), context));
LOG.info("Preparing");
this.collector = collector;
alertExchange = (String) config.get(ThresholdingConfiguration.ALERTS_EXCHANGE);
alertRoutingKey = (String) config.get(ThresholdingConfiguration.ALERTS_ROUTING_KEY);
if (alarmDAO == null) {
Injector.registerIfNotBound(AlarmDAO.class, new PersistenceModule(dbConfig));
alarmDAO = Injector.getInstance(AlarmDAO.class);
}
if (alarmEventForwarder == null) {
Injector.registerIfNotBound(AlarmEventForwarder.class, new ProducerModule(this.producerConfiguration));
alarmEventForwarder = Injector.getInstance(AlarmEventForwarder.class);
}
}
void evaluateThreshold(Alarm alarm, SubAlarm subAlarm) {
LOG.debug("Received state change for {}", subAlarm);
subAlarm.setNoState(false);
alarm.updateSubAlarm(subAlarm);
AlarmState initialState = alarm.getState();
// Wait for all sub alarms to have a state before evaluating to prevent flapping on startup
if (allSubAlarmsHaveState(alarm) && alarm.evaluate()) {
changeAlarmState(alarm, initialState, alarm.getStateChangeReason());
}
}
private boolean allSubAlarmsHaveState(final Alarm alarm) {
for (SubAlarm subAlarm : alarm.getSubAlarms()) {
if (subAlarm.isNoState()) {
return false;
}
}
return true;
}
private void changeAlarmState(Alarm alarm, AlarmState initialState,
String stateChangeReason) {
alarmDAO.updateState(alarm.getId(), alarm.getState());
LOG.debug("Alarm {} transitioned from {} to {}", alarm, initialState, alarm.getState());
AlarmStateTransitionedEvent event = new AlarmStateTransitionedEvent(alarm.getTenantId(),
alarm.getId(), alarm.getName(), alarm.getDescription(), initialState, alarm.getState(),
alarm.isActionsEnabled(), stateChangeReason, getTimestamp());
try {
alarmEventForwarder.send(alertExchange, alertRoutingKey, Serialization.toJson(event));
} catch (Exception ignore) {
LOG.debug("Failure sending alarm", ignore);
}
}
protected long getTimestamp() {
return System.currentTimeMillis() / 1000;
}
void handleAlarmDeleted(String alarmId) {
LOG.debug("Received AlarmDeletedEvent for alarm id {}", alarmId);
alarms.remove(alarmId);
}
void handleAlarmUpdated(String alarmId, AlarmUpdatedEvent alarmUpdatedEvent) {
final Alarm oldAlarm = alarms.get(alarmId);
if (oldAlarm == null) {
LOG.debug("Updated Alarm {} not loaded, ignoring");
return;
}
oldAlarm.setName(alarmUpdatedEvent.alarmName);
oldAlarm.setDescription(alarmUpdatedEvent.alarmDescription);
oldAlarm.setExpression(alarmUpdatedEvent.alarmExpression);
oldAlarm.setState(alarmUpdatedEvent.alarmState);
oldAlarm.setActionsEnabled(alarmUpdatedEvent.alarmActionsEnabled);
// Now handle the SubAlarms
// First remove the deleted SubAlarms so we don't have to consider them later
for (Map.Entry<String, AlarmSubExpression> entry : alarmUpdatedEvent.oldAlarmSubExpressions.entrySet()) {
LOG.debug("Removing deleted SubAlarm {}", entry.getValue());
if (!oldAlarm.removeSubAlarmById(entry.getKey()))
LOG.error("Did not find removed SubAlarm {}", entry.getValue());
}
// Reuse what we can from the changed SubAlarms
for (Map.Entry<String, AlarmSubExpression> entry : alarmUpdatedEvent.changedSubExpressions.entrySet()) {
final SubAlarm oldSubAlarm = oldAlarm.getSubAlarm(entry.getKey());
if (oldSubAlarm == null) {
LOG.error("Did not find changed SubAlarm {}", entry.getValue());
continue;
}
final SubAlarm newSubAlarm = new SubAlarm(entry.getKey(), oldAlarm.getId(), entry.getValue());
newSubAlarm.setState(oldSubAlarm.getState());
if (!oldSubAlarm.isCompatible(newSubAlarm)) {
newSubAlarm.setNoState(true);
}
LOG.debug("Changing SubAlarm from {} to {}", oldSubAlarm, newSubAlarm);
oldAlarm.updateSubAlarm(newSubAlarm);
}
// Add the new SubAlarms
for (Map.Entry<String, AlarmSubExpression> entry : alarmUpdatedEvent.newAlarmSubExpressions.entrySet()) {
final SubAlarm newSubAlarm = new SubAlarm(entry.getKey(), oldAlarm.getId(), entry.getValue());
newSubAlarm.setNoState(true);
LOG.debug("Adding SubAlarm {}", newSubAlarm);
oldAlarm.updateSubAlarm(newSubAlarm);
}
alarms.put(alarmId, oldAlarm);
}
String buildStateChangeReason() {
return null;
}
private Alarm getOrCreateAlarm(String alarmId) {
Alarm alarm = alarms.get(alarmId);
logger.debug("tuple: {}", tuple);
try {
if (Streams.DEFAULT_STREAM_ID.equals(tuple.getSourceStreamId())) {
String alarmId = tuple.getString(0);
Alarm alarm = getOrCreateAlarm(alarmId);
if (alarm == null) {
alarm = alarmDAO.findById(alarmId);
if (alarm == null)
LOG.error("Failed to locate alarm for id {}", alarmId);
else {
for (final SubAlarm subAlarm : alarm.getSubAlarms()) {
subAlarm.setNoState(true);
}
alarms.put(alarmId, alarm);
}
return;
}
return alarm;
SubAlarm subAlarm = (SubAlarm) tuple.getValue(1);
evaluateThreshold(alarm, subAlarm);
} else if (EventProcessingBolt.ALARM_EVENT_STREAM_ID.equals(tuple.getSourceStreamId())) {
String eventType = tuple.getString(0);
String alarmId = tuple.getString(1);
if (EventProcessingBolt.DELETED.equals(eventType)) {
handleAlarmDeleted(alarmId);
} else if (EventProcessingBolt.UPDATED.equals(eventType)) {
handleAlarmUpdated(alarmId, (AlarmUpdatedEvent) tuple.getValue(2));
}
}
} catch (Exception e) {
logger.error("Error processing tuple {}", tuple, e);
} finally {
collector.ack(tuple);
}
}
@Override
@SuppressWarnings("rawtypes")
public void prepare(Map config, TopologyContext context, OutputCollector collector) {
logger = LoggerFactory.getLogger(Logging.categoryFor(getClass(), context));
logger.info("Preparing");
this.collector = collector;
alertExchange = (String) config.get(ThresholdingConfiguration.ALERTS_EXCHANGE);
alertRoutingKey = (String) config.get(ThresholdingConfiguration.ALERTS_ROUTING_KEY);
if (alarmDAO == null) {
Injector.registerIfNotBound(AlarmDAO.class, new PersistenceModule(dbConfig));
alarmDAO = Injector.getInstance(AlarmDAO.class);
}
if (alarmEventForwarder == null) {
Injector.registerIfNotBound(AlarmEventForwarder.class, new ProducerModule(
this.producerConfiguration));
alarmEventForwarder = Injector.getInstance(AlarmEventForwarder.class);
}
}
void evaluateThreshold(Alarm alarm, SubAlarm subAlarm) {
logger.debug("Received state change for {}", subAlarm);
subAlarm.setNoState(false);
alarm.updateSubAlarm(subAlarm);
AlarmState initialState = alarm.getState();
// Wait for all sub alarms to have a state before evaluating to prevent flapping on startup
if (allSubAlarmsHaveState(alarm) && alarm.evaluate()) {
changeAlarmState(alarm, initialState, alarm.getStateChangeReason());
}
}
private boolean allSubAlarmsHaveState(final Alarm alarm) {
for (SubAlarm subAlarm : alarm.getSubAlarms()) {
if (subAlarm.isNoState()) {
return false;
}
}
return true;
}
private void changeAlarmState(Alarm alarm, AlarmState initialState, String stateChangeReason) {
alarmDAO.updateState(alarm.getId(), alarm.getState());
logger.debug("Alarm {} transitioned from {} to {}", alarm, initialState, alarm.getState());
AlarmStateTransitionedEvent event =
new AlarmStateTransitionedEvent(alarm.getTenantId(), alarm.getId(), alarm.getName(),
alarm.getDescription(), initialState, alarm.getState(), alarm.isActionsEnabled(),
stateChangeReason, getTimestamp());
try {
alarmEventForwarder.send(alertExchange, alertRoutingKey, Serialization.toJson(event));
} catch (Exception ignore) {
logger.debug("Failure sending alarm", ignore);
}
}
protected long getTimestamp() {
return System.currentTimeMillis() / 1000;
}
void handleAlarmDeleted(String alarmId) {
logger.debug("Received AlarmDeletedEvent for alarm id {}", alarmId);
alarms.remove(alarmId);
}
void handleAlarmUpdated(String alarmId, AlarmUpdatedEvent alarmUpdatedEvent) {
final Alarm oldAlarm = alarms.get(alarmId);
if (oldAlarm == null) {
logger.debug("Updated Alarm {} not loaded, ignoring");
return;
}
oldAlarm.setName(alarmUpdatedEvent.alarmName);
oldAlarm.setDescription(alarmUpdatedEvent.alarmDescription);
oldAlarm.setExpression(alarmUpdatedEvent.alarmExpression);
oldAlarm.setState(alarmUpdatedEvent.alarmState);
oldAlarm.setActionsEnabled(alarmUpdatedEvent.alarmActionsEnabled);
// Now handle the SubAlarms
// First remove the deleted SubAlarms so we don't have to consider them later
for (Map.Entry<String, AlarmSubExpression> entry : alarmUpdatedEvent.oldAlarmSubExpressions
.entrySet()) {
logger.debug("Removing deleted SubAlarm {}", entry.getValue());
if (!oldAlarm.removeSubAlarmById(entry.getKey())) {
logger.error("Did not find removed SubAlarm {}", entry.getValue());
}
}
// Reuse what we can from the changed SubAlarms
for (Map.Entry<String, AlarmSubExpression> entry : alarmUpdatedEvent.changedSubExpressions
.entrySet()) {
final SubAlarm oldSubAlarm = oldAlarm.getSubAlarm(entry.getKey());
if (oldSubAlarm == null) {
logger.error("Did not find changed SubAlarm {}", entry.getValue());
continue;
}
final SubAlarm newSubAlarm = new SubAlarm(entry.getKey(), oldAlarm.getId(), entry.getValue());
newSubAlarm.setState(oldSubAlarm.getState());
if (!oldSubAlarm.isCompatible(newSubAlarm)) {
newSubAlarm.setNoState(true);
}
logger.debug("Changing SubAlarm from {} to {}", oldSubAlarm, newSubAlarm);
oldAlarm.updateSubAlarm(newSubAlarm);
}
// Add the new SubAlarms
for (Map.Entry<String, AlarmSubExpression> entry : alarmUpdatedEvent.newAlarmSubExpressions
.entrySet()) {
final SubAlarm newSubAlarm = new SubAlarm(entry.getKey(), oldAlarm.getId(), entry.getValue());
newSubAlarm.setNoState(true);
logger.debug("Adding SubAlarm {}", newSubAlarm);
oldAlarm.updateSubAlarm(newSubAlarm);
}
alarms.put(alarmId, oldAlarm);
}
String buildStateChangeReason() {
return null;
}
private Alarm getOrCreateAlarm(String alarmId) {
Alarm alarm = alarms.get(alarmId);
if (alarm == null) {
alarm = alarmDAO.findById(alarmId);
if (alarm == null) {
logger.error("Failed to locate alarm for id {}", alarmId);
} else {
for (final SubAlarm subAlarm : alarm.getSubAlarms()) {
subAlarm.setNoState(true);
}
alarms.put(alarmId, alarm);
}
}
return alarm;
}
}

View File

@ -14,107 +14,108 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.thresholding;
import java.io.Serializable;
import java.util.Properties;
/**
* This class replaces io.dropwizard.db.DataSourceFactory which currently can't be used
* with Storm because it is not marked Serializable. This class could be deleted and replaced
* by that class when and if io.dropwizard.db.DataSourceFactory is marked Serializable.
* This class replaces io.dropwizard.db.DataSourceFactory which currently can't be used with Storm
* because it is not marked Serializable. This class could be deleted and replaced by that class
* when and if io.dropwizard.db.DataSourceFactory is marked Serializable.
*/
public class DataSourceFactory implements Serializable {
private static final long serialVersionUID = -1903552028062110222L;
private static final long serialVersionUID = -1903552028062110222L;
private String user;
private String user;
private String password;
private String password;
private String url;
private String url;
private String driverClass;
private String driverClass;
private Properties properties;
private Properties properties;
private String maxWaitForConnection;
private String maxWaitForConnection;
private String validationQuery;
private String validationQuery;
private String minSize;
private String minSize;
private String maxSize;
private String maxSize;
public String getUser() {
return user;
}
public String getUser() {
return user;
}
public void setUser(String user) {
this.user = user;
}
public void setUser(String user) {
this.user = user;
}
public String getPassword() {
return password;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public void setPassword(String password) {
this.password = password;
}
public String getUrl() {
return url;
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public void setUrl(String url) {
this.url = url;
}
public String getDriverClass() {
return driverClass;
}
public String getDriverClass() {
return driverClass;
}
public void setDriverClass(String driverClass) {
this.driverClass = driverClass;
}
public void setDriverClass(String driverClass) {
this.driverClass = driverClass;
}
public Properties getProperties() {
return properties;
}
public Properties getProperties() {
return properties;
}
public void setProperties(Properties properties) {
this.properties = properties;
}
public void setProperties(Properties properties) {
this.properties = properties;
}
public String getMaxWaitForConnection() {
return maxWaitForConnection;
}
public String getMaxWaitForConnection() {
return maxWaitForConnection;
}
public void setMaxWaitForConnection(String maxWaitForConnection) {
this.maxWaitForConnection = maxWaitForConnection;
}
public void setMaxWaitForConnection(String maxWaitForConnection) {
this.maxWaitForConnection = maxWaitForConnection;
}
public String getValidationQuery() {
return validationQuery;
}
public String getValidationQuery() {
return validationQuery;
}
public void setValidationQuery(String validationQuery) {
this.validationQuery = validationQuery;
}
public void setValidationQuery(String validationQuery) {
this.validationQuery = validationQuery;
}
public String getMinSize() {
return minSize;
}
public String getMinSize() {
return minSize;
}
public void setMinSize(String minSize) {
this.minSize = minSize;
}
public void setMinSize(String minSize) {
this.minSize = minSize;
}
public String getMaxSize() {
return maxSize;
}
public String getMaxSize() {
return maxSize;
}
public void setMaxSize(String maxSize) {
this.maxSize = maxSize;
}
public void setMaxSize(String maxSize) {
this.maxSize = maxSize;
}
}

View File

@ -14,21 +14,9 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.thresholding;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.base.BaseRichBolt;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;
import com.hpcloud.mon.common.event.AlarmCreatedEvent;
import com.hpcloud.mon.common.event.AlarmDeletedEvent;
import com.hpcloud.mon.common.event.AlarmUpdatedEvent;
@ -38,9 +26,22 @@ import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantId;
import com.hpcloud.mon.domain.model.SubAlarm;
import com.hpcloud.streaming.storm.Logging;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.base.BaseRichBolt;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Map;
/**
* Processes events by emitting tuples related to the event.
*
*
* <ul>
* <li>Input: Object event
* <li>Output alarm-events: String eventType, String alarmId
@ -60,7 +61,8 @@ public class EventProcessingBolt extends BaseRichBolt {
/** Stream for metric and sub-alarm specific events. */
public static final String METRIC_SUB_ALARM_EVENT_STREAM_ID = "metric-sub-alarm-events";
public static final String[] ALARM_EVENT_STREAM_FIELDS = new String[] {"eventType", "alarmId", "alarm"};
public static final String[] ALARM_EVENT_STREAM_FIELDS = new String[] {"eventType", "alarmId",
"alarm"};
public static final String[] METRIC_ALARM_EVENT_STREAM_FIELDS = new String[] {"eventType",
"metricDefinitionAndTenantId", "subAlarmId"};
public static final String[] METRIC_SUB_ALARM_EVENT_STREAM_FIELDS = new String[] {"eventType",
@ -71,29 +73,32 @@ public class EventProcessingBolt extends BaseRichBolt {
public static final String UPDATED = "updated";
public static final String RESEND = "resend";
private transient Logger LOG;
private transient Logger logger;
private OutputCollector collector;
@Override
public void declareOutputFields(OutputFieldsDeclarer declarer) {
declarer.declareStream(ALARM_EVENT_STREAM_ID, new Fields(ALARM_EVENT_STREAM_FIELDS));
declarer.declareStream(METRIC_ALARM_EVENT_STREAM_ID, new Fields(METRIC_ALARM_EVENT_STREAM_FIELDS));
declarer.declareStream(METRIC_SUB_ALARM_EVENT_STREAM_ID, new Fields(METRIC_SUB_ALARM_EVENT_STREAM_FIELDS));
declarer.declareStream(METRIC_ALARM_EVENT_STREAM_ID, new Fields(
METRIC_ALARM_EVENT_STREAM_FIELDS));
declarer.declareStream(METRIC_SUB_ALARM_EVENT_STREAM_ID, new Fields(
METRIC_SUB_ALARM_EVENT_STREAM_FIELDS));
}
@Override
public void execute(Tuple tuple) {
try {
Object event = tuple.getValue(0);
LOG.trace("Received event for processing {}", event);
if (event instanceof AlarmCreatedEvent)
logger.trace("Received event for processing {}", event);
if (event instanceof AlarmCreatedEvent) {
handle((AlarmCreatedEvent) event);
else if (event instanceof AlarmDeletedEvent)
} else if (event instanceof AlarmDeletedEvent) {
handle((AlarmDeletedEvent) event);
else if (event instanceof AlarmUpdatedEvent)
} else if (event instanceof AlarmUpdatedEvent) {
handle((AlarmUpdatedEvent) event);
}
} catch (Exception e) {
LOG.error("Error processing tuple {}", tuple, e);
logger.error("Error processing tuple {}", tuple, e);
} finally {
collector.ack(tuple);
}
@ -102,34 +107,40 @@ public class EventProcessingBolt extends BaseRichBolt {
@Override
@SuppressWarnings("rawtypes")
public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
LOG = LoggerFactory.getLogger(Logging.categoryFor(getClass(), context));
LOG.info("Preparing");
logger = LoggerFactory.getLogger(Logging.categoryFor(getClass(), context));
logger.info("Preparing");
this.collector = collector;
}
void handle(AlarmCreatedEvent event) {
for (Map.Entry<String, AlarmSubExpression> subExpressionEntry : event.alarmSubExpressions.entrySet()) {
sendAddSubAlarm(event.alarmId, subExpressionEntry.getKey(), event.tenantId, subExpressionEntry.getValue());
for (Map.Entry<String, AlarmSubExpression> subExpressionEntry : event.alarmSubExpressions
.entrySet()) {
sendAddSubAlarm(event.alarmId, subExpressionEntry.getKey(), event.tenantId,
subExpressionEntry.getValue());
}
}
private void sendAddSubAlarm(String alarmId, String subAlarmId, String tenantId, AlarmSubExpression alarmSubExpression) {
sendSubAlarm(CREATED, alarmId, subAlarmId, tenantId, alarmSubExpression);
private void sendAddSubAlarm(String alarmId, String subAlarmId, String tenantId,
AlarmSubExpression alarmSubExpression) {
sendSubAlarm(CREATED, alarmId, subAlarmId, tenantId, alarmSubExpression);
}
private void sendUpdateSubAlarm(String alarmId, String subAlarmId, String tenantId, AlarmSubExpression alarmSubExpression) {
sendSubAlarm(UPDATED, alarmId, subAlarmId, tenantId, alarmSubExpression);
private void sendUpdateSubAlarm(String alarmId, String subAlarmId, String tenantId,
AlarmSubExpression alarmSubExpression) {
sendSubAlarm(UPDATED, alarmId, subAlarmId, tenantId, alarmSubExpression);
}
private void sendResendSubAlarm(String alarmId, String subAlarmId, String tenantId, AlarmSubExpression alarmSubExpression) {
sendSubAlarm(RESEND, alarmId, subAlarmId, tenantId, alarmSubExpression);
private void sendResendSubAlarm(String alarmId, String subAlarmId, String tenantId,
AlarmSubExpression alarmSubExpression) {
sendSubAlarm(RESEND, alarmId, subAlarmId, tenantId, alarmSubExpression);
}
private void sendSubAlarm(String eventType, String alarmId, String subAlarmId, String tenantId,
AlarmSubExpression alarmSubExpression) {
AlarmSubExpression alarmSubExpression) {
MetricDefinition metricDef = alarmSubExpression.getMetricDefinition();
collector.emit(METRIC_SUB_ALARM_EVENT_STREAM_ID, new Values(eventType, new MetricDefinitionAndTenantId(metricDef, tenantId),
new SubAlarm(subAlarmId, alarmId, alarmSubExpression)));
collector.emit(METRIC_SUB_ALARM_EVENT_STREAM_ID, new Values(eventType,
new MetricDefinitionAndTenantId(metricDef, tenantId), new SubAlarm(subAlarmId, alarmId,
alarmSubExpression)));
}
void handle(AlarmDeletedEvent event) {
@ -142,25 +153,24 @@ public class EventProcessingBolt extends BaseRichBolt {
private void sendDeletedSubAlarm(String subAlarmId, String tenantId, MetricDefinition metricDef) {
collector.emit(METRIC_ALARM_EVENT_STREAM_ID, new Values(DELETED,
new MetricDefinitionAndTenantId(metricDef, tenantId), subAlarmId));
new MetricDefinitionAndTenantId(metricDef, tenantId), subAlarmId));
}
void handle(AlarmUpdatedEvent event) {
if ((!event.oldAlarmState.equals(event.alarmState) ||
!event.oldAlarmSubExpressions.isEmpty()) && event.changedSubExpressions.isEmpty() &&
event.newAlarmSubExpressions.isEmpty()) {
if ((!event.oldAlarmState.equals(event.alarmState) || !event.oldAlarmSubExpressions.isEmpty())
&& event.changedSubExpressions.isEmpty() && event.newAlarmSubExpressions.isEmpty()) {
for (Map.Entry<String, AlarmSubExpression> entry : event.unchangedSubExpressions.entrySet()) {
sendResendSubAlarm(event.alarmId, entry.getKey(), event.tenantId, entry.getValue());
sendResendSubAlarm(event.alarmId, entry.getKey(), event.tenantId, entry.getValue());
}
}
for (Map.Entry<String, AlarmSubExpression> entry : event.oldAlarmSubExpressions.entrySet()) {
sendDeletedSubAlarm(entry.getKey(), event.tenantId, entry.getValue().getMetricDefinition());
}
for (Map.Entry<String, AlarmSubExpression> entry : event.changedSubExpressions.entrySet()) {
sendUpdateSubAlarm(event.alarmId, entry.getKey(), event.tenantId, entry.getValue());
}
sendUpdateSubAlarm(event.alarmId, entry.getKey(), event.tenantId, entry.getValue());
}
for (Map.Entry<String, AlarmSubExpression> entry : event.newAlarmSubExpressions.entrySet()) {
sendAddSubAlarm(event.alarmId, entry.getKey(), event.tenantId, entry.getValue());
sendAddSubAlarm(event.alarmId, entry.getKey(), event.tenantId, entry.getValue());
}
collector.emit(ALARM_EVENT_STREAM_ID, new Values(UPDATED, event.alarmId, event));
}

View File

@ -14,51 +14,52 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.thresholding;
import java.io.Serializable;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.hpcloud.mon.EventSpoutConfig;
import com.hpcloud.mon.infrastructure.thresholding.deserializer.EventDeserializer;
import backtype.storm.spout.SpoutOutputCollector;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.tuple.Values;
import com.hpcloud.mon.EventSpoutConfig;
import com.hpcloud.mon.infrastructure.thresholding.deserializer.EventDeserializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.Serializable;
import java.util.List;
public class EventSpout extends KafkaSpout {
private static final Logger LOG = LoggerFactory.getLogger(EventSpout.class);
private static final Logger logger = LoggerFactory.getLogger(EventSpout.class);
private static final long serialVersionUID = 8457340455857276878L;
private static final long serialVersionUID = 8457340455857276878L;
private final EventDeserializer deserializer;
private final EventDeserializer deserializer;
public EventSpout(EventSpoutConfig configuration, EventDeserializer deserializer) {
super(configuration);
this.deserializer = deserializer;
LOG.info("EventSpout created");
}
public EventSpout(EventSpoutConfig configuration, EventDeserializer deserializer) {
super(configuration);
this.deserializer = deserializer;
logger.info("EventSpout created");
}
@Override
public void declareOutputFields(OutputFieldsDeclarer declarer) {
declarer.declare(deserializer.getOutputFields());
}
@Override
public void declareOutputFields(OutputFieldsDeclarer declarer) {
declarer.declare(deserializer.getOutputFields());
}
@Override
protected void processMessage(byte[] message, SpoutOutputCollector collector) {
List<List<?>> events = deserializer.deserialize(message);
if (events != null) {
for (final List<?> event : events) {
final Object eventToSend = event.get(0);
if (!(eventToSend instanceof Serializable)) {
LOG.error("Class {} is not Serializable: {}", eventToSend.getClass(), eventToSend);
continue;
}
collector.emit(new Values(eventToSend));
}
@Override
protected void processMessage(byte[] message, SpoutOutputCollector collector) {
List<List<?>> events = deserializer.deserialize(message);
if (events != null) {
for (final List<?> event : events) {
final Object eventToSend = event.get(0);
if (!(eventToSend instanceof Serializable)) {
logger.error("Class {} is not Serializable: {}", eventToSend.getClass(), eventToSend);
continue;
}
collector.emit(new Values(eventToSend));
}
}
}
}

View File

@ -14,13 +14,16 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.thresholding;
import com.hpcloud.configuration.KafkaProducerConfiguration;
import com.hpcloud.configuration.KafkaProducerProperties;
import kafka.javaapi.producer.Producer;
import kafka.producer.KeyedMessage;
import kafka.producer.ProducerConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -28,31 +31,30 @@ import java.util.Properties;
public class KafkaAlarmEventForwarder implements AlarmEventForwarder {
private static final Logger LOG = LoggerFactory.getLogger(KafkaAlarmEventForwarder.class);
private static final Logger logger = LoggerFactory.getLogger(KafkaAlarmEventForwarder.class);
private final Producer<String, String> producer;
private final Producer<String, String> producer;
private final String topic;
private final String topic;
public KafkaAlarmEventForwarder(KafkaProducerConfiguration kafkaConfig) {
this.topic = kafkaConfig.getTopic();
Properties kafkaProperties = KafkaProducerProperties.createKafkaProperties(kafkaConfig);
ProducerConfig consumerConfig = new ProducerConfig(kafkaProperties);
producer = new Producer<String, String>(consumerConfig);
}
@Override
public void send(String alertExchange, String alertRoutingKey, String json) {
LOG.debug("sending alertExchange: {}, alertRoutingKey: {}, json: {}", alertExchange,
alertRoutingKey, json);
final KeyedMessage<String, String> message = new KeyedMessage<String, String>(topic, alertRoutingKey, json);
producer.send(message);
}
@Override
public void close() {
producer.close();
}
public KafkaAlarmEventForwarder(KafkaProducerConfiguration kafkaConfig) {
this.topic = kafkaConfig.getTopic();
Properties kafkaProperties = KafkaProducerProperties.createKafkaProperties(kafkaConfig);
ProducerConfig consumerConfig = new ProducerConfig(kafkaProperties);
producer = new Producer<String, String>(consumerConfig);
}
@Override
public void send(String alertExchange, String alertRoutingKey, String json) {
logger.debug("sending alertExchange: {}, alertRoutingKey: {}, json: {}", alertExchange,
alertRoutingKey, json);
final KeyedMessage<String, String> message =
new KeyedMessage<String, String>(topic, alertRoutingKey, json);
producer.send(message);
}
@Override
public void close() {
producer.close();
}
}

View File

@ -14,15 +14,16 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.thresholding;
import com.hpcloud.configuration.KafkaConsumerProperties;
import com.hpcloud.mon.KafkaSpoutConfig;
import backtype.storm.spout.SpoutOutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.base.BaseRichSpout;
import com.hpcloud.configuration.KafkaConsumerProperties;
import com.hpcloud.mon.KafkaSpoutConfig;
import kafka.consumer.Consumer;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.ConsumerIterator;
@ -38,144 +39,146 @@ import java.util.Map;
import java.util.Properties;
public abstract class KafkaSpout extends BaseRichSpout implements Runnable {
private static final Logger LOG = LoggerFactory.getLogger(KafkaSpout.class);
private static final Logger logger = LoggerFactory.getLogger(KafkaSpout.class);
private static final long serialVersionUID = 744004533863562119L;
private static final long serialVersionUID = 744004533863562119L;
private final KafkaSpoutConfig kafkaSpoutConfig;
private final KafkaSpoutConfig kafkaSpoutConfig;
private transient ConsumerConnector consumerConnector;
private transient ConsumerConnector consumerConnector;
private transient List<KafkaStream<byte[], byte[]>> streams = null;
private transient List<KafkaStream<byte[], byte[]>> streams = null;
private SpoutOutputCollector collector;
private SpoutOutputCollector collector;
private volatile boolean shouldContinue;
private volatile boolean shouldContinue;
private byte[] message;
private byte[] message;
private Thread readerThread;
private Thread readerThread;
private String spoutName;
private String spoutName;
private boolean waiting = false;
private boolean waiting = false;
protected KafkaSpout(KafkaSpoutConfig kafkaSpoutConfig) {
this.kafkaSpoutConfig = kafkaSpoutConfig;
protected KafkaSpout(KafkaSpoutConfig kafkaSpoutConfig) {
this.kafkaSpoutConfig = kafkaSpoutConfig;
}
@Override
public void activate() {
logger.info("Activated");
if (streams == null) {
Map<String, Integer> topicCountMap = new HashMap<>();
topicCountMap.put(kafkaSpoutConfig.kafkaConsumerConfiguration.getTopic(), new Integer(1));
Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap =
consumerConnector.createMessageStreams(topicCountMap);
streams = consumerMap.get(kafkaSpoutConfig.kafkaConsumerConfiguration.getTopic());
}
}
@Override
public void activate() {
LOG.info("Activated");
if (streams == null) {
Map<String, Integer> topicCountMap = new HashMap<>();
topicCountMap.put(kafkaSpoutConfig.kafkaConsumerConfiguration.getTopic(), new Integer(1));
Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumerConnector.createMessageStreams(topicCountMap);
streams = consumerMap.get(kafkaSpoutConfig.kafkaConsumerConfiguration.getTopic());
}
}
@Override
public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
logger.info("Opened");
this.collector = collector;
logger.info(" topic = " + kafkaSpoutConfig.kafkaConsumerConfiguration.getTopic());
this.spoutName = String.format("%s-%d", context.getThisComponentId(), context.getThisTaskId());
@Override
public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
LOG.info("Opened");
this.collector = collector;
LOG.info(" topic = " + kafkaSpoutConfig.kafkaConsumerConfiguration.getTopic());
this.spoutName = String.format("%s-%d", context.getThisComponentId(), context.getThisTaskId());
Properties kafkaProperties =
KafkaConsumerProperties.createKafkaProperties(kafkaSpoutConfig.kafkaConsumerConfiguration);
// Have to use a different consumer.id for each spout so use the storm taskId. Otherwise,
// zookeeper complains about a conflicted ephemeral node when there is more than one spout
// reading from a topic
kafkaProperties.setProperty("consumer.id", String.valueOf(context.getThisTaskId()));
ConsumerConfig consumerConfig = new ConsumerConfig(kafkaProperties);
this.consumerConnector = Consumer.createJavaConsumerConnector(consumerConfig);
}
Properties kafkaProperties = KafkaConsumerProperties.createKafkaProperties(
kafkaSpoutConfig.kafkaConsumerConfiguration);
// Have to use a different consumer.id for each spout so use the storm taskId. Otherwise,
// zookeeper complains about a conflicted ephemeral node when there is more than one spout
// reading from a topic
kafkaProperties.setProperty("consumer.id", String.valueOf(context.getThisTaskId()));
ConsumerConfig consumerConfig = new ConsumerConfig(kafkaProperties);
this.consumerConnector = Consumer.createJavaConsumerConnector(consumerConfig);
}
@Override
public synchronized void deactivate() {
logger.info("deactivated");
this.consumerConnector.shutdown();
this.shouldContinue = false;
// Wake up the reader thread if it is waiting
notify();
}
@Override
public synchronized void deactivate() {
LOG.info("deactivated");
this.consumerConnector.shutdown();
this.shouldContinue = false;
// Wake up the reader thread if it is waiting
notify();
}
@Override
public void run() {
while (this.shouldContinue) {
final ConsumerIterator<byte[], byte[]> it = streams.get(0).iterator();
if (it.hasNext()) {
final byte[] message = it.next().message();
synchronized (this) {
this.message = message;
// Wake up getMessage() if it is waiting
if (this.waiting)
notify();
while (this.message != null && this.shouldContinue)
try {
wait();
} catch (InterruptedException e) {
LOG.info("Wait interrupted", e);
}
}
}
}
LOG.info("readerThread {} exited", this.readerThread.getName());
this.readerThread = null;
}
@Override
public void nextTuple() {
LOG.debug("nextTuple called");
checkReaderRunning();
final byte[] message = getMessage();
if (message != null) {
LOG.debug("streams iterator has next");
processMessage(message, collector);
}
}
private void checkReaderRunning() {
this.shouldContinue = true;
if (this.readerThread == null) {
final String threadName = String.format("%s reader", this.spoutName);
this.readerThread = new Thread(this, threadName);
this.readerThread.start();
LOG.info("Started Reader Thread {}", this.readerThread.getName());
}
}
/**
* Must only be called from a synchronized method
*
* @return
*/
private byte[] tryToGetMessage() {
final byte[] result = this.message;
if (result != null) {
this.message = null;
@Override
public void run() {
while (this.shouldContinue) {
final ConsumerIterator<byte[], byte[]> it = streams.get(0).iterator();
if (it.hasNext()) {
final byte[] message = it.next().message();
synchronized (this) {
this.message = message;
// Wake up getMessage() if it is waiting
if (this.waiting) {
notify();
}
while (this.message != null && this.shouldContinue)
try {
wait();
} catch (InterruptedException e) {
logger.info("Wait interrupted", e);
}
}
return result;
}
}
logger.info("readerThread {} exited", this.readerThread.getName());
this.readerThread = null;
}
private synchronized byte[] getMessage() {
final byte[] result = tryToGetMessage();
if (result != null) {
return result;
}
// Storm docs recommend a short sleep but make the sleep time
// configurable so we can lessen the load on dev systems
this.waiting = true;
try {
wait(kafkaSpoutConfig.maxWaitTime);
} catch (InterruptedException e) {
LOG.info("Sleep interrupted", e);
}
this.waiting = false;
return tryToGetMessage(); // We might have been woken up because there was a message
@Override
public void nextTuple() {
logger.debug("nextTuple called");
checkReaderRunning();
final byte[] message = getMessage();
if (message != null) {
logger.debug("streams iterator has next");
processMessage(message, collector);
}
}
protected abstract void processMessage(byte[] message, SpoutOutputCollector collector2);
private void checkReaderRunning() {
this.shouldContinue = true;
if (this.readerThread == null) {
final String threadName = String.format("%s reader", this.spoutName);
this.readerThread = new Thread(this, threadName);
this.readerThread.start();
logger.info("Started Reader Thread {}", this.readerThread.getName());
}
}
/**
* Must only be called from a synchronized method
*
* @return
*/
private byte[] tryToGetMessage() {
final byte[] result = this.message;
if (result != null) {
this.message = null;
notify();
}
return result;
}
private synchronized byte[] getMessage() {
final byte[] result = tryToGetMessage();
if (result != null) {
return result;
}
// Storm docs recommend a short sleep but make the sleep time
// configurable so we can lessen the load on dev systems
this.waiting = true;
try {
wait(kafkaSpoutConfig.maxWaitTime);
} catch (InterruptedException e) {
logger.info("Sleep interrupted", e);
}
this.waiting = false;
return tryToGetMessage(); // We might have been woken up because there was a message
}
protected abstract void processMessage(byte[] message, SpoutOutputCollector collector2);
}

View File

@ -14,26 +14,9 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.thresholding;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import backtype.storm.Config;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.base.BaseRichBolt;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;
import com.hpcloud.mon.common.model.metric.Metric;
import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantId;
import com.hpcloud.mon.domain.model.SubAlarm;
@ -46,14 +29,32 @@ import com.hpcloud.streaming.storm.Streams;
import com.hpcloud.streaming.storm.Tuples;
import com.hpcloud.util.Injector;
import backtype.storm.Config;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.base.BaseRichBolt;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Aggregates metrics for individual alarms. Receives metric/alarm tuples and tick tuples, and
* outputs alarm information whenever an alarm's state changes. Concerned with alarms that relate to
* a specific metric.
*
*
* The TICK_TUPLE_SECONDS_KEY value should be no greater than the smallest possible window width.
* This ensures that the window slides in time with the expected metrics.
*
*
* <ul>
* <li>Input: MetricDefinition metricDefinition, Metric metric
* <li>Input metric-alarm-events: String eventType, MetricDefinition metricDefinition, String
@ -66,13 +67,14 @@ import com.hpcloud.util.Injector;
public class MetricAggregationBolt extends BaseRichBolt {
private static final long serialVersionUID = 5624314196838090726L;
public static final String TICK_TUPLE_SECONDS_KEY = "com.hpcloud.mon.aggregation.tick.seconds";
public static final String[] FIELDS = new String[] { "alarmId", "subAlarm" };
public static final String[] FIELDS = new String[] {"alarmId", "subAlarm"};
public static final String METRIC_AGGREGATION_CONTROL_STREAM = "MetricAggregationControl";
public static final String[] METRIC_AGGREGATION_CONTROL_FIELDS = new String[] { "directive" };
public static final String[] METRIC_AGGREGATION_CONTROL_FIELDS = new String[] {"directive"};
public static final String METRICS_BEHIND = "MetricsBehind";
final Map<MetricDefinitionAndTenantId, SubAlarmStatsRepository> subAlarmStatsRepos = new HashMap<>();
private transient Logger LOG;
final Map<MetricDefinitionAndTenantId, SubAlarmStatsRepository> subAlarmStatsRepos =
new HashMap<>();
private transient Logger logger;
private DataSourceFactory dbConfig;
private transient SubAlarmDAO subAlarmDAO;
/** Namespaces for which metrics are received sporadically */
@ -96,38 +98,43 @@ public class MetricAggregationBolt extends BaseRichBolt {
@Override
public void execute(Tuple tuple) {
LOG.debug("tuple: {}", tuple);
logger.debug("tuple: {}", tuple);
try {
if (Tuples.isTickTuple(tuple)) {
evaluateAlarmsAndSlideWindows();
} else {
if (Streams.DEFAULT_STREAM_ID.equals(tuple.getSourceStreamId())) {
MetricDefinitionAndTenantId metricDefinitionAndTenantId = (MetricDefinitionAndTenantId) tuple.getValue(0);
MetricDefinitionAndTenantId metricDefinitionAndTenantId =
(MetricDefinitionAndTenantId) tuple.getValue(0);
Metric metric = (Metric) tuple.getValueByField("metric");
aggregateValues(metricDefinitionAndTenantId, metric);
} else if (METRIC_AGGREGATION_CONTROL_STREAM.equals(tuple.getSourceStreamId())) {
processControl(tuple.getString(0));
} else {
String eventType = tuple.getString(0);
MetricDefinitionAndTenantId metricDefinitionAndTenantId = (MetricDefinitionAndTenantId) tuple.getValue(1);
MetricDefinitionAndTenantId metricDefinitionAndTenantId =
(MetricDefinitionAndTenantId) tuple.getValue(1);
if (EventProcessingBolt.METRIC_ALARM_EVENT_STREAM_ID.equals(tuple.getSourceStreamId())) {
String subAlarmId = tuple.getString(2);
if (EventProcessingBolt.DELETED.equals(eventType))
if (EventProcessingBolt.DELETED.equals(eventType)) {
handleAlarmDeleted(metricDefinitionAndTenantId, subAlarmId);
} else if (EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_ID.equals(tuple.getSourceStreamId())) {
}
} else if (EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_ID.equals(tuple
.getSourceStreamId())) {
SubAlarm subAlarm = (SubAlarm) tuple.getValue(2);
if (EventProcessingBolt.CREATED.equals(eventType))
if (EventProcessingBolt.CREATED.equals(eventType)) {
handleAlarmCreated(metricDefinitionAndTenantId, subAlarm);
else if (EventProcessingBolt.UPDATED.equals(eventType))
} else if (EventProcessingBolt.UPDATED.equals(eventType)) {
handleAlarmUpdated(metricDefinitionAndTenantId, subAlarm);
else if (EventProcessingBolt.RESEND.equals(eventType))
} else if (EventProcessingBolt.RESEND.equals(eventType)) {
handleAlarmResend(metricDefinitionAndTenantId, subAlarm);
}
}
}
}
} catch (Exception e) {
LOG.error("Error processing tuple {}", tuple, e);
logger.error("Error processing tuple {}", tuple, e);
} finally {
collector.ack(tuple);
}
@ -135,11 +142,11 @@ public class MetricAggregationBolt extends BaseRichBolt {
private void processControl(final String directive) {
if (METRICS_BEHIND.equals(directive)) {
LOG.debug("Received {}", directive);
logger.debug("Received {}", directive);
this.upToDate = false;
} else {
logger.error("Unknown directive '{}'", directive);
}
else
LOG.error("Unknown directive '{}'", directive);
}
@Override
@ -153,8 +160,8 @@ public class MetricAggregationBolt extends BaseRichBolt {
@Override
@SuppressWarnings("rawtypes")
public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
LOG = LoggerFactory.getLogger(Logging.categoryFor(getClass(), context));
LOG.info("Preparing");
logger = LoggerFactory.getLogger(Logging.categoryFor(getClass(), context));
logger.info("Preparing");
this.collector = collector;
if (subAlarmDAO == null) {
@ -167,17 +174,21 @@ public class MetricAggregationBolt extends BaseRichBolt {
* Aggregates values for the {@code metric} that are within the periods defined for the alarm.
*/
void aggregateValues(MetricDefinitionAndTenantId metricDefinitionAndTenantId, Metric metric) {
SubAlarmStatsRepository subAlarmStatsRepo = getOrCreateSubAlarmStatsRepo(metricDefinitionAndTenantId);
if (subAlarmStatsRepo == null || metric == null)
SubAlarmStatsRepository subAlarmStatsRepo =
getOrCreateSubAlarmStatsRepo(metricDefinitionAndTenantId);
if (subAlarmStatsRepo == null || metric == null) {
return;
}
for (SubAlarmStats stats : subAlarmStatsRepo.get()) {
if (stats.getStats().addValue(metric.value, metric.timestamp))
LOG.trace("Aggregated value {} at {} for {}. Updated {}", metric.value, metric.timestamp,
metricDefinitionAndTenantId, stats.getStats());
else
LOG.warn("Metric is too old, age {} seconds: timestamp {} for {}, {}", currentTimeSeconds() - metric.timestamp,
metric.timestamp, metricDefinitionAndTenantId, stats.getStats());
if (stats.getStats().addValue(metric.value, metric.timestamp)) {
logger.trace("Aggregated value {} at {} for {}. Updated {}", metric.value,
metric.timestamp, metricDefinitionAndTenantId, stats.getStats());
} else {
logger.warn("Metric is too old, age {} seconds: timestamp {} for {}, {}",
currentTimeSeconds() - metric.timestamp, metric.timestamp, metricDefinitionAndTenantId,
stats.getStats());
}
}
}
@ -186,30 +197,31 @@ public class MetricAggregationBolt extends BaseRichBolt {
* ago, then sliding the window to the current time.
*/
void evaluateAlarmsAndSlideWindows() {
LOG.debug("evaluateAlarmsAndSlideWindows called");
logger.debug("evaluateAlarmsAndSlideWindows called");
long newWindowTimestamp = currentTimeSeconds();
for (SubAlarmStatsRepository subAlarmStatsRepo : subAlarmStatsRepos.values())
for (SubAlarmStatsRepository subAlarmStatsRepo : subAlarmStatsRepos.values()) {
for (SubAlarmStats subAlarmStats : subAlarmStatsRepo.get()) {
if (upToDate) {
LOG.debug("Evaluating {}", subAlarmStats);
logger.debug("Evaluating {}", subAlarmStats);
if (subAlarmStats.evaluateAndSlideWindow(newWindowTimestamp)) {
LOG.debug("Alarm state changed for {}", subAlarmStats);
collector.emit(new Values(subAlarmStats.getSubAlarm().getAlarmId(),
subAlarmStats.getSubAlarm()));
logger.debug("Alarm state changed for {}", subAlarmStats);
collector.emit(new Values(subAlarmStats.getSubAlarm().getAlarmId(), subAlarmStats
.getSubAlarm()));
}
}
else {
} else {
subAlarmStats.slideWindow(newWindowTimestamp);
}
}
}
if (!upToDate) {
LOG.info("Did not evaluate SubAlarms because Metrics are not up to date");
upToDate = true;
logger.info("Did not evaluate SubAlarms because Metrics are not up to date");
upToDate = true;
}
}
/**
* Only used for testing.
*
* @return
*/
protected long currentTimeSeconds() {
@ -217,26 +229,28 @@ public class MetricAggregationBolt extends BaseRichBolt {
}
/**
* Returns an existing or newly created SubAlarmStatsRepository for the {@code metricDefinitionAndTenantId}.
* Newly created SubAlarmStatsRepositories are initialized with stats whose view ends one minute
* from now.
* Returns an existing or newly created SubAlarmStatsRepository for the
* {@code metricDefinitionAndTenantId}. Newly created SubAlarmStatsRepositories are initialized
* with stats whose view ends one minute from now.
*/
SubAlarmStatsRepository getOrCreateSubAlarmStatsRepo(MetricDefinitionAndTenantId metricDefinitionAndTenantId) {
SubAlarmStatsRepository getOrCreateSubAlarmStatsRepo(
MetricDefinitionAndTenantId metricDefinitionAndTenantId) {
SubAlarmStatsRepository subAlarmStatsRepo = subAlarmStatsRepos.get(metricDefinitionAndTenantId);
if (subAlarmStatsRepo == null) {
List<SubAlarm> subAlarms = subAlarmDAO.find(metricDefinitionAndTenantId);
if (subAlarms.isEmpty())
LOG.warn("Failed to find sub alarms for {}", metricDefinitionAndTenantId);
else {
LOG.debug("Creating SubAlarmStats for {}", metricDefinitionAndTenantId);
if (subAlarms.isEmpty()) {
logger.warn("Failed to find sub alarms for {}", metricDefinitionAndTenantId);
} else {
logger.debug("Creating SubAlarmStats for {}", metricDefinitionAndTenantId);
for (SubAlarm subAlarm : subAlarms) {
// TODO should treat metric def name prefix like a namespace
subAlarm.setSporadicMetric(sporadicMetricNamespaces.contains(metricDefinitionAndTenantId.metricDefinition.name));
subAlarm.setSporadicMetric(sporadicMetricNamespaces
.contains(metricDefinitionAndTenantId.metricDefinition.name));
}
subAlarmStatsRepo = new SubAlarmStatsRepository();
for (SubAlarm subAlarm : subAlarms) {
long viewEndTimestamp = currentTimeSeconds() + subAlarm.getExpression().getPeriod();
subAlarmStatsRepo.add(subAlarm, viewEndTimestamp);
long viewEndTimestamp = currentTimeSeconds() + subAlarm.getExpression().getPeriod();
subAlarmStatsRepo.add(subAlarm, viewEndTimestamp);
}
subAlarmStatsRepos.put(metricDefinitionAndTenantId, subAlarmStatsRepo);
}
@ -249,42 +263,50 @@ public class MetricAggregationBolt extends BaseRichBolt {
* Adds the {@code subAlarm} subAlarmStatsRepo for the {@code metricDefinitionAndTenantId}.
*/
void handleAlarmCreated(MetricDefinitionAndTenantId metricDefinitionAndTenantId, SubAlarm subAlarm) {
LOG.debug("Received AlarmCreatedEvent for {}", subAlarm);
logger.debug("Received AlarmCreatedEvent for {}", subAlarm);
addSubAlarm(metricDefinitionAndTenantId, subAlarm);
}
void handleAlarmResend(MetricDefinitionAndTenantId metricDefinitionAndTenantId, SubAlarm resendSubAlarm) {
final RepoAndStats repoAndStats = findExistingSubAlarmStats(metricDefinitionAndTenantId, resendSubAlarm);
if (repoAndStats == null)
void handleAlarmResend(MetricDefinitionAndTenantId metricDefinitionAndTenantId,
SubAlarm resendSubAlarm) {
final RepoAndStats repoAndStats =
findExistingSubAlarmStats(metricDefinitionAndTenantId, resendSubAlarm);
if (repoAndStats == null) {
return;
}
final SubAlarmStats oldSubAlarmStats = repoAndStats.subAlarmStats;
final SubAlarm oldSubAlarm = oldSubAlarmStats.getSubAlarm();
resendSubAlarm.setState(oldSubAlarm.getState());
resendSubAlarm.setNoState(true); // Have it send its state again so the Alarm can be evaluated
LOG.debug("Forcing SubAlarm {} to send state at next evaluation", oldSubAlarm);
logger.debug("Forcing SubAlarm {} to send state at next evaluation", oldSubAlarm);
oldSubAlarmStats.updateSubAlarm(resendSubAlarm);
}
private RepoAndStats findExistingSubAlarmStats(MetricDefinitionAndTenantId metricDefinitionAndTenantId,
SubAlarm oldSubAlarm) {
final SubAlarmStatsRepository oldSubAlarmStatsRepo = subAlarmStatsRepos.get(metricDefinitionAndTenantId);
private RepoAndStats findExistingSubAlarmStats(
MetricDefinitionAndTenantId metricDefinitionAndTenantId, SubAlarm oldSubAlarm) {
final SubAlarmStatsRepository oldSubAlarmStatsRepo =
subAlarmStatsRepos.get(metricDefinitionAndTenantId);
if (oldSubAlarmStatsRepo == null) {
LOG.error("Did not find SubAlarmStatsRepository for MetricDefinition {}", metricDefinitionAndTenantId);
logger.error("Did not find SubAlarmStatsRepository for MetricDefinition {}",
metricDefinitionAndTenantId);
return null;
}
final SubAlarmStats oldSubAlarmStats = oldSubAlarmStatsRepo.get(oldSubAlarm.getId());
if (oldSubAlarmStats == null) {
LOG.error("Did not find existing SubAlarm {} in SubAlarmStatsRepository", oldSubAlarm);
logger.error("Did not find existing SubAlarm {} in SubAlarmStatsRepository", oldSubAlarm);
return null;
}
return new RepoAndStats(oldSubAlarmStatsRepo, oldSubAlarmStats);
}
private void addSubAlarm(MetricDefinitionAndTenantId metricDefinitionAndTenantId, SubAlarm subAlarm) {
SubAlarmStatsRepository subAlarmStatsRepo = getOrCreateSubAlarmStatsRepo(metricDefinitionAndTenantId);
if (subAlarmStatsRepo == null)
private void addSubAlarm(MetricDefinitionAndTenantId metricDefinitionAndTenantId,
SubAlarm subAlarm) {
SubAlarmStatsRepository subAlarmStatsRepo =
getOrCreateSubAlarmStatsRepo(metricDefinitionAndTenantId);
if (subAlarmStatsRepo == null) {
return;
}
long viewEndTimestamp = currentTimeSeconds() + subAlarm.getExpression().getPeriod();
subAlarmStatsRepo.add(subAlarm, viewEndTimestamp);
@ -296,8 +318,9 @@ public class MetricAggregationBolt extends BaseRichBolt {
* MetricDefinition can't have changed, just how it is evaluated
*/
void handleAlarmUpdated(MetricDefinitionAndTenantId metricDefinitionAndTenantId, SubAlarm subAlarm) {
LOG.debug("Received AlarmUpdatedEvent for {}", subAlarm);
final RepoAndStats repoAndStats = findExistingSubAlarmStats(metricDefinitionAndTenantId, subAlarm);
logger.debug("Received AlarmUpdatedEvent for {}", subAlarm);
final RepoAndStats repoAndStats =
findExistingSubAlarmStats(metricDefinitionAndTenantId, subAlarm);
if (repoAndStats != null) {
// Clear the old SubAlarm, but save the SubAlarm state
final SubAlarmStats oldSubAlarmStats = repoAndStats.subAlarmStats;
@ -305,12 +328,14 @@ public class MetricAggregationBolt extends BaseRichBolt {
subAlarm.setState(oldSubAlarm.getState());
subAlarm.setNoState(true); // Doesn't hurt to send too many state changes, just too few
if (oldSubAlarm.isCompatible(subAlarm)) {
LOG.debug("Changing SubAlarm {} to SubAlarm {} and keeping measurements", oldSubAlarm, subAlarm);
logger.debug("Changing SubAlarm {} to SubAlarm {} and keeping measurements", oldSubAlarm,
subAlarm);
oldSubAlarmStats.updateSubAlarm(subAlarm);
return;
}
// Have to completely change the SubAlarmStats
LOG.debug("Changing SubAlarm {} to SubAlarm {} and flushing measurements", oldSubAlarm, subAlarm);
logger.debug("Changing SubAlarm {} to SubAlarm {} and flushing measurements", oldSubAlarm,
subAlarm);
repoAndStats.subAlarmStatsRepository.remove(subAlarm.getId());
}
addSubAlarm(metricDefinitionAndTenantId, subAlarm);
@ -321,12 +346,13 @@ public class MetricAggregationBolt extends BaseRichBolt {
* {@code metricDefinitionAndTenantId}.
*/
void handleAlarmDeleted(MetricDefinitionAndTenantId metricDefinitionAndTenantId, String subAlarmId) {
LOG.debug("Received AlarmDeletedEvent for subAlarm id {}", subAlarmId);
logger.debug("Received AlarmDeletedEvent for subAlarm id {}", subAlarmId);
SubAlarmStatsRepository subAlarmStatsRepo = subAlarmStatsRepos.get(metricDefinitionAndTenantId);
if (subAlarmStatsRepo != null) {
subAlarmStatsRepo.remove(subAlarmId);
if (subAlarmStatsRepo.isEmpty())
if (subAlarmStatsRepo.isEmpty()) {
subAlarmStatsRepos.remove(metricDefinitionAndTenantId);
}
}
}
@ -334,8 +360,7 @@ public class MetricAggregationBolt extends BaseRichBolt {
public final SubAlarmStatsRepository subAlarmStatsRepository;
public final SubAlarmStats subAlarmStats;
public RepoAndStats(SubAlarmStatsRepository subAlarmStatsRepository,
SubAlarmStats subAlarmStats) {
public RepoAndStats(SubAlarmStatsRepository subAlarmStatsRepository, SubAlarmStats subAlarmStats) {
this.subAlarmStatsRepository = subAlarmStatsRepository;
this.subAlarmStats = subAlarmStats;
}

View File

@ -14,24 +14,9 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.thresholding;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.base.BaseRichBolt;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;
import com.hpcloud.mon.common.model.metric.Metric;
import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantId;
import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantIdMatcher;
@ -44,32 +29,48 @@ import com.hpcloud.streaming.storm.Logging;
import com.hpcloud.streaming.storm.Streams;
import com.hpcloud.util.Injector;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.base.BaseRichBolt;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* Filters metrics for which there is no associated alarm and forwards metrics for which there is an
* alarm. Receives metric alarm and metric sub-alarm events to update metric definitions.
*
* METRIC_DEFS table and the matcher are shared between any bolts in the same worker process so that all of the
* MetricDefinitionAndTenantIds for existing SubAlarms only have to be read once and because it is not
* possible to predict which bolt gets which Metrics so all Bolts know about all starting
* MetricDefinitionAndTenantIds.
*
* METRIC_DEFS table and the matcher are shared between any bolts in the same worker process so that
* all of the MetricDefinitionAndTenantIds for existing SubAlarms only have to be read once and
* because it is not possible to predict which bolt gets which Metrics so all Bolts know about all
* starting MetricDefinitionAndTenantIds.
*
* The current topology uses shuffleGrouping for the incoming Metrics and allGrouping for the
* events. So, any Bolt may get any Metric so the METRIC_DEFS table and the matcher must be kept up to date
* for all MetricDefinitionAndTenantIds.
*
* The METRIC_DEFS table contains a List of SubAlarms IDs that reference the same MetricDefinitionAndTenantId
* so if a SubAlarm is deleted, the MetricDefinitionAndTenantId will only be deleted from it and the matcher if no
* more SubAlarms reference it. Incrementing and decrementing the count is done under the static lock SENTINAL
* to ensure it is correct across all Bolts sharing the same METRIC_DEFS table and the matcher. The
* amount of adds and deletes will be very small compared to the number of Metrics so it shouldn't
* block the Metric handling.
*
* events. So, any Bolt may get any Metric so the METRIC_DEFS table and the matcher must be kept up
* to date for all MetricDefinitionAndTenantIds.
*
* The METRIC_DEFS table contains a List of SubAlarms IDs that reference the same
* MetricDefinitionAndTenantId so if a SubAlarm is deleted, the MetricDefinitionAndTenantId will
* only be deleted from it and the matcher if no more SubAlarms reference it. Incrementing and
* decrementing the count is done under the static lock SENTINAL to ensure it is correct across all
* Bolts sharing the same METRIC_DEFS table and the matcher. The amount of adds and deletes will be
* very small compared to the number of Metrics so it shouldn't block the Metric handling.
*
* <ul>
* <li>Input: MetricDefinition metricDefinition, Metric metric
* <li>Input metric-alarm-events: String eventType, MetricDefinitionAndTenantId metricDefinitionAndTenantId, String
* alarmId
* <li>Input metric-sub-alarm-events: String eventType, MetricDefinitionAndTenantId metricDefinitionAndTenantId, SubAlarm
* subAlarm
* <li>Input metric-alarm-events: String eventType, MetricDefinitionAndTenantId
* metricDefinitionAndTenantId, String alarmId
* <li>Input metric-sub-alarm-events: String eventType, MetricDefinitionAndTenantId
* metricDefinitionAndTenantId, SubAlarm subAlarm
* <li>Output: MetricDefinitionAndTenantId metricDefinitionAndTenantId, Metric metric
* </ul>
*/
@ -82,16 +83,21 @@ public class MetricFilteringBolt extends BaseRichBolt {
public static final int MAX_LAG_MESSAGES_DEFAULT = 10;
public static final String LAG_MESSAGE_PERIOD_KEY = "com.hpcloud.mon.filtering.lagMessagePeriod";
public static final int LAG_MESSAGE_PERIOD_DEFAULT = 30;
public static final String[] FIELDS = new String[] { "metricDefinitionAndTenantId", "metric" };
public static final String[] FIELDS = new String[] {"metricDefinitionAndTenantId", "metric"};
private static final int MIN_LAG_VALUE = PropertyFinder.getIntProperty(MIN_LAG_VALUE_KEY, MIN_LAG_VALUE_DEFAULT, 0, Integer.MAX_VALUE);
private static final int MAX_LAG_MESSAGES = PropertyFinder.getIntProperty(MAX_LAG_MESSAGES_KEY, MAX_LAG_MESSAGES_DEFAULT, 0, Integer.MAX_VALUE);
private static final int LAG_MESSAGE_PERIOD = PropertyFinder.getIntProperty(LAG_MESSAGE_PERIOD_KEY, LAG_MESSAGE_PERIOD_DEFAULT, 1, 600);
private static final Map<MetricDefinitionAndTenantId, List<String>> METRIC_DEFS = new ConcurrentHashMap<>();
private static final MetricDefinitionAndTenantIdMatcher matcher = new MetricDefinitionAndTenantIdMatcher();
private static final int MIN_LAG_VALUE = PropertyFinder.getIntProperty(MIN_LAG_VALUE_KEY,
MIN_LAG_VALUE_DEFAULT, 0, Integer.MAX_VALUE);
private static final int MAX_LAG_MESSAGES = PropertyFinder.getIntProperty(MAX_LAG_MESSAGES_KEY,
MAX_LAG_MESSAGES_DEFAULT, 0, Integer.MAX_VALUE);
private static final int LAG_MESSAGE_PERIOD = PropertyFinder.getIntProperty(
LAG_MESSAGE_PERIOD_KEY, LAG_MESSAGE_PERIOD_DEFAULT, 1, 600);
private static final Map<MetricDefinitionAndTenantId, List<String>> METRIC_DEFS =
new ConcurrentHashMap<>();
private static final MetricDefinitionAndTenantIdMatcher matcher =
new MetricDefinitionAndTenantIdMatcher();
private static final Object SENTINAL = new Object();
private transient Logger LOG;
private transient Logger logger;
private DataSourceFactory dbConfig;
private transient MetricDefinitionDAO metricDefDAO;
private OutputCollector collector;
@ -111,86 +117,94 @@ public class MetricFilteringBolt extends BaseRichBolt {
@Override
public void declareOutputFields(OutputFieldsDeclarer declarer) {
declarer.declare(new Fields(FIELDS));
declarer.declareStream(MetricAggregationBolt.METRIC_AGGREGATION_CONTROL_STREAM,
new Fields(MetricAggregationBolt.METRIC_AGGREGATION_CONTROL_FIELDS));
declarer.declareStream(MetricAggregationBolt.METRIC_AGGREGATION_CONTROL_STREAM, new Fields(
MetricAggregationBolt.METRIC_AGGREGATION_CONTROL_FIELDS));
}
@Override
public void execute(Tuple tuple) {
LOG.debug("tuple: {}", tuple);
logger.debug("tuple: {}", tuple);
try {
if (Streams.DEFAULT_STREAM_ID.equals(tuple.getSourceStreamId())) {
final MetricDefinitionAndTenantId metricDefinitionAndTenantId = (MetricDefinitionAndTenantId) tuple.getValue(0);
final Long timestamp = (Long)tuple.getValue(1);
final Metric metric = (Metric)tuple.getValue(2);
final MetricDefinitionAndTenantId metricDefinitionAndTenantId =
(MetricDefinitionAndTenantId) tuple.getValue(0);
final Long timestamp = (Long) tuple.getValue(1);
final Metric metric = (Metric) tuple.getValue(2);
checkLag(timestamp);
LOG.debug("metric definition and tenant id: {}", metricDefinitionAndTenantId);
logger.debug("metric definition and tenant id: {}", metricDefinitionAndTenantId);
// Check for exact matches as well as inexact matches
final List<MetricDefinitionAndTenantId> matches = matcher.match(metricDefinitionAndTenantId);
for (final MetricDefinitionAndTenantId match : matches)
collector.emit(new Values(match, metric));
final List<MetricDefinitionAndTenantId> matches =
matcher.match(metricDefinitionAndTenantId);
for (final MetricDefinitionAndTenantId match : matches) {
collector.emit(new Values(match, metric));
}
} else {
String eventType = tuple.getString(0);
MetricDefinitionAndTenantId metricDefinitionAndTenantId = (MetricDefinitionAndTenantId) tuple.getValue(1);
MetricDefinitionAndTenantId metricDefinitionAndTenantId =
(MetricDefinitionAndTenantId) tuple.getValue(1);
LOG.debug("Received {} for {}", eventType, metricDefinitionAndTenantId);
logger.debug("Received {} for {}", eventType, metricDefinitionAndTenantId);
// UPDATED events can be ignored because the MetricDefinitionAndTenantId doesn't change
if (EventProcessingBolt.METRIC_ALARM_EVENT_STREAM_ID.equals(tuple.getSourceStreamId())) {
if (EventProcessingBolt.DELETED.equals(eventType))
if (EventProcessingBolt.DELETED.equals(eventType)) {
removeSubAlarm(metricDefinitionAndTenantId, tuple.getString(2));
} else if (EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_ID.equals(tuple.getSourceStreamId())) {
if (EventProcessingBolt.CREATED.equals(eventType))
synchronized(SENTINAL) {
}
} else if (EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_ID.equals(tuple
.getSourceStreamId())) {
if (EventProcessingBolt.CREATED.equals(eventType)) {
synchronized (SENTINAL) {
final SubAlarm subAlarm = (SubAlarm) tuple.getValue(2);
addMetricDef(metricDefinitionAndTenantId, subAlarm.getId());
}
}
}
}
} catch (Exception e) {
LOG.error("Error processing tuple {}", tuple, e);
logger.error("Error processing tuple {}", tuple, e);
} finally {
collector.ack(tuple);
}
}
private void checkLag(Long apiTimeStamp) {
if (!lagging)
return;
if ((apiTimeStamp == null) || (apiTimeStamp.longValue() == 0))
if (!lagging) {
return;
}
if ((apiTimeStamp == null) || (apiTimeStamp.longValue() == 0)) {
return; // Remove this code at some point, just to handle old metrics without a NPE
}
final long now = getCurrentTime();
final long lag = now - apiTimeStamp.longValue();
if (lag < minLag)
if (lag < minLag) {
minLag = lag;
}
if (minLag <= MIN_LAG_VALUE) {
lagging = false;
LOG.info("Metrics no longer lagging, minLag = {}", minLag);
}
else if (minLagMessageSent >= MAX_LAG_MESSAGES) {
LOG.info("Waited for {} seconds for Metrics to catch up. Giving up. minLag = {}",
MAX_LAG_MESSAGES * LAG_MESSAGE_PERIOD, minLag);
lagging = false;
}
else if (lastMinLagMessageSent == 0) {
logger.info("Metrics no longer lagging, minLag = {}", minLag);
} else if (minLagMessageSent >= MAX_LAG_MESSAGES) {
logger.info("Waited for {} seconds for Metrics to catch up. Giving up. minLag = {}",
MAX_LAG_MESSAGES * LAG_MESSAGE_PERIOD, minLag);
lagging = false;
} else if (lastMinLagMessageSent == 0) {
lastMinLagMessageSent = now;
}
else if ((now - lastMinLagMessageSent) >= LAG_MESSAGE_PERIOD) {
LOG.info("Sending {} message, minLag = {}", MetricAggregationBolt.METRICS_BEHIND, minLag);
collector.emit(MetricAggregationBolt.METRIC_AGGREGATION_CONTROL_STREAM,
new Values(MetricAggregationBolt.METRICS_BEHIND));
} else if ((now - lastMinLagMessageSent) >= LAG_MESSAGE_PERIOD) {
logger.info("Sending {} message, minLag = {}", MetricAggregationBolt.METRICS_BEHIND, minLag);
collector.emit(MetricAggregationBolt.METRIC_AGGREGATION_CONTROL_STREAM, new Values(
MetricAggregationBolt.METRICS_BEHIND));
lastMinLagMessageSent = now;
minLagMessageSent++;
}
}
private void removeSubAlarm(MetricDefinitionAndTenantId metricDefinitionAndTenantId, String subAlarmId) {
synchronized(SENTINAL) {
private void removeSubAlarm(MetricDefinitionAndTenantId metricDefinitionAndTenantId,
String subAlarmId) {
synchronized (SENTINAL) {
final List<String> subAlarmIds = METRIC_DEFS.get(metricDefinitionAndTenantId);
if (subAlarmIds != null) {
if (subAlarmIds.remove(subAlarmId) && subAlarmIds.isEmpty()) {
METRIC_DEFS.remove(metricDefinitionAndTenantId);
matcher.remove(metricDefinitionAndTenantId);
METRIC_DEFS.remove(metricDefinitionAndTenantId);
matcher.remove(metricDefinitionAndTenantId);
}
}
}
@ -199,8 +213,8 @@ public class MetricFilteringBolt extends BaseRichBolt {
@Override
@SuppressWarnings("rawtypes")
public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
LOG = LoggerFactory.getLogger(Logging.categoryFor(getClass(), context));
LOG.info("Preparing");
logger = LoggerFactory.getLogger(Logging.categoryFor(getClass(), context));
logger.info("Preparing");
this.collector = collector;
if (metricDefDAO == null) {
@ -213,16 +227,18 @@ public class MetricFilteringBolt extends BaseRichBolt {
synchronized (SENTINAL) {
if (METRIC_DEFS.isEmpty()) {
for (SubAlarmMetricDefinition subAlarmMetricDef : metricDefDAO.findForAlarms()) {
addMetricDef(subAlarmMetricDef.getMetricDefinitionAndTenantId(), subAlarmMetricDef.getSubAlarmId());
addMetricDef(subAlarmMetricDef.getMetricDefinitionAndTenantId(),
subAlarmMetricDef.getSubAlarmId());
}
// Iterate again to ensure we only emit each metricDef once
for (MetricDefinitionAndTenantId metricDefinitionAndTenantId : METRIC_DEFS.keySet())
for (MetricDefinitionAndTenantId metricDefinitionAndTenantId : METRIC_DEFS.keySet()) {
collector.emit(new Values(metricDefinitionAndTenantId, null));
LOG.info("Found {} Metric Definitions", METRIC_DEFS.size());
}
logger.info("Found {} Metric Definitions", METRIC_DEFS.size());
// Just output these here so they are only output once per JVM
LOG.info("MIN_LAG_VALUE set to {} seconds", MIN_LAG_VALUE);
LOG.info("MAX_LAG_MESSAGES set to {}", MAX_LAG_MESSAGES);
LOG.info("LAG_MESSAGE_PERIOD set to {} seconds", LAG_MESSAGE_PERIOD);
logger.info("MIN_LAG_VALUE set to {} seconds", MIN_LAG_VALUE);
logger.info("MAX_LAG_MESSAGES set to {}", MAX_LAG_MESSAGES);
logger.info("LAG_MESSAGE_PERIOD set to {} seconds", LAG_MESSAGE_PERIOD);
}
}
}
@ -233,18 +249,19 @@ public class MetricFilteringBolt extends BaseRichBolt {
* Allow override of current time for testing.
*/
protected long getCurrentTime() {
return System.currentTimeMillis()/1000;
return System.currentTimeMillis() / 1000;
}
private void addMetricDef(MetricDefinitionAndTenantId metricDefinitionAndTenantId, String subAlarmId) {
private void addMetricDef(MetricDefinitionAndTenantId metricDefinitionAndTenantId,
String subAlarmId) {
List<String> subAlarmIds = METRIC_DEFS.get(metricDefinitionAndTenantId);
if (subAlarmIds == null) {
subAlarmIds = new LinkedList<>();
METRIC_DEFS.put(metricDefinitionAndTenantId, subAlarmIds);
matcher.add(metricDefinitionAndTenantId);
} else if (subAlarmIds.contains(subAlarmId)) {
return; // Make sure it is only added once. Multiple bolts process the same AlarmCreatedEvent
}
else if (subAlarmIds.contains(subAlarmId))
return; // Make sure it only gets added once. Multiple bolts process the same AlarmCreatedEvent
subAlarmIds.add(subAlarmId);
}
@ -252,14 +269,14 @@ public class MetricFilteringBolt extends BaseRichBolt {
* Only use for testing.
*/
static void clearMetricDefinitions() {
METRIC_DEFS.clear();
matcher.clear();
METRIC_DEFS.clear();
matcher.clear();
}
/**
* Only use for testing.
*/
static int sizeMetricDefinitions() {
return METRIC_DEFS.size();
return METRIC_DEFS.size();
}
}

View File

@ -14,56 +14,58 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.thresholding;
import backtype.storm.spout.SpoutOutputCollector;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Values;
package com.hpcloud.mon.infrastructure.thresholding;
import com.hpcloud.mon.MetricSpoutConfig;
import com.hpcloud.mon.common.model.metric.MetricEnvelope;
import com.hpcloud.mon.common.model.metric.MetricEnvelopes;
import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantId;
import backtype.storm.spout.SpoutOutputCollector;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Values;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class MetricSpout extends KafkaSpout {
private static final Logger LOG = LoggerFactory.getLogger(MetricSpout.class);
private static final Logger logger = LoggerFactory.getLogger(MetricSpout.class);
private static final long serialVersionUID = 744004533863562119L;
private static final long serialVersionUID = 744004533863562119L;
public static final String[] FIELDS = new String[] { "metricDefinitionAndTenantId", "apiTimeStamp", "metric" };
public static final String DEFAULT_TENANT_ID = "TENANT_ID_NOT_SET";
public static final String[] FIELDS = new String[] {"metricDefinitionAndTenantId",
"apiTimeStamp", "metric"};
public static final String DEFAULT_TENANT_ID = "TENANT_ID_NOT_SET";
public MetricSpout(MetricSpoutConfig metricSpoutConfig) {
super(metricSpoutConfig);
LOG.info("Created");
public MetricSpout(MetricSpoutConfig metricSpoutConfig) {
super(metricSpoutConfig);
logger.info("Created");
}
@Override
protected void processMessage(byte[] message, SpoutOutputCollector collector) {
final MetricEnvelope metricEnvelope;
try {
metricEnvelope = MetricEnvelopes.fromJson(message);
logger.debug("metric envelope: {}", metricEnvelope);
} catch (RuntimeException re) {
logger.warn("Error parsing MetricEnvelope", re);
return;
}
@Override
protected void processMessage(byte[] message, SpoutOutputCollector collector) {
final MetricEnvelope metricEnvelope;
try {
metricEnvelope = MetricEnvelopes.fromJson(message);
LOG.debug("metric envelope: {}", metricEnvelope);
}
catch (RuntimeException re) {
LOG.warn("Error parsing MetricEnvelope", re);
return;
}
String tenantId = (String)metricEnvelope.meta.get("tenantId");
if (tenantId == null) {
LOG.error("No tenantId so using default tenantId {} for Metric {}", DEFAULT_TENANT_ID, metricEnvelope.metric);
tenantId = DEFAULT_TENANT_ID;
}
collector.emit(new Values(new MetricDefinitionAndTenantId(metricEnvelope.metric.definition(), tenantId),
metricEnvelope.creationTime, metricEnvelope.metric));
String tenantId = (String) metricEnvelope.meta.get("tenantId");
if (tenantId == null) {
logger.error("No tenantId so using default tenantId {} for Metric {}", DEFAULT_TENANT_ID,
metricEnvelope.metric);
tenantId = DEFAULT_TENANT_ID;
}
collector.emit(new Values(new MetricDefinitionAndTenantId(metricEnvelope.metric.definition(),
tenantId), metricEnvelope.creationTime, metricEnvelope.metric));
}
@Override
public void declareOutputFields(OutputFieldsDeclarer declarer) {
declarer.declare(new Fields(FIELDS));
}
@Override
public void declareOutputFields(OutputFieldsDeclarer declarer) {
declarer.declare(new Fields(FIELDS));
}
}

View File

@ -14,30 +14,31 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.thresholding;
import com.hpcloud.configuration.KafkaProducerConfiguration;
import com.google.inject.AbstractModule;
import com.google.inject.Provides;
import com.hpcloud.configuration.KafkaProducerConfiguration;
public class ProducerModule extends AbstractModule {
private KafkaProducerConfiguration config;
private AlarmEventForwarder alarmEventForwarder;
private KafkaProducerConfiguration config;
private AlarmEventForwarder alarmEventForwarder;
@Override
protected void configure() {
}
@Override
protected void configure() {}
public ProducerModule(KafkaProducerConfiguration config) {
this.config = config;
}
public ProducerModule(KafkaProducerConfiguration config) {
this.config = config;
}
public ProducerModule(AlarmEventForwarder alarmEventForwarder) {
this.alarmEventForwarder = alarmEventForwarder;
}
public ProducerModule(AlarmEventForwarder alarmEventForwarder) {
this.alarmEventForwarder = alarmEventForwarder;
}
@Provides
AlarmEventForwarder alarmEventForwarder() {
return alarmEventForwarder == null ? new KafkaAlarmEventForwarder(config) : alarmEventForwarder;
}
@Provides
AlarmEventForwarder alarmEventForwarder() {
return alarmEventForwarder == null ? new KafkaAlarmEventForwarder(config) : alarmEventForwarder;
}
}

View File

@ -14,37 +14,36 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.thresholding;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class PropertyFinder {
private static final Logger LOG = LoggerFactory.getLogger(PropertyFinder.class);
private static final Logger logger = LoggerFactory.getLogger(PropertyFinder.class);
private PropertyFinder()
{
}
public static int getIntProperty(final String name,
final int defaultValue,
final int minValue,
final int maxValue) {
final String valueString = System.getProperty(name);
if ((valueString != null) && !valueString.isEmpty()) {
try {
final int newValue = Integer.parseInt(valueString);
if ((newValue >= minValue) && (newValue <= maxValue)) {
return newValue;
}
LOG.warn("Invalid value {} for property '{}' must be >= {} and <= {}, using default value of {}",
valueString, name, minValue, maxValue, defaultValue);
}
catch (NumberFormatException nfe) {
LOG.warn("Not an integer value '{}' for property '{}', using default value of {}", valueString,
name, defaultValue);
}
public static int getIntProperty(final String name, final int defaultValue, final int minValue,
final int maxValue) {
final String valueString = System.getProperty(name);
if ((valueString != null) && !valueString.isEmpty()) {
try {
final int newValue = Integer.parseInt(valueString);
if ((newValue >= minValue) && (newValue <= maxValue)) {
return newValue;
}
return defaultValue;
logger.warn(
"Invalid value {} for property '{}' must be >= {} and <= {}, using default value of {}",
valueString, name, minValue, maxValue, defaultValue);
} catch (NumberFormatException nfe) {
logger.warn("Not an integer value '{}' for property '{}', using default value of {}",
valueString, name, defaultValue);
}
}
return defaultValue;
}
}

View File

@ -14,23 +14,24 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.thresholding.deserializer;
import java.io.Serializable;
import java.util.Collections;
import java.util.List;
import backtype.storm.tuple.Fields;
import com.hpcloud.mon.common.event.AlarmCreatedEvent;
import com.hpcloud.mon.common.event.AlarmDeletedEvent;
import com.hpcloud.mon.common.event.AlarmUpdatedEvent;
import com.hpcloud.streaming.storm.TupleDeserializer;
import com.hpcloud.util.Serialization;
import backtype.storm.tuple.Fields;
import java.io.Serializable;
import java.util.Collections;
import java.util.List;
/**
* Deserializes MaaS events using registered serialization types.
*
*
* <ul>
* <li>Output: Object event
* </ul>
@ -49,7 +50,8 @@ public class EventDeserializer implements TupleDeserializer, Serializable {
@Override
public List<List<?>> deserialize(byte[] tuple) {
try {
return Collections.<List<?>>singletonList(Collections.singletonList(Serialization.fromJson(tuple)));
return Collections.<List<?>>singletonList(Collections.singletonList(Serialization
.fromJson(tuple)));
} catch (Exception ignore) {
return null;
}

View File

@ -14,6 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon;
import static org.testng.Assert.assertEquals;
@ -21,12 +22,15 @@ import static org.testng.Assert.fail;
public class Assert {
public static void assertArraysEqual(final double[] actual, final double[] expected) {
if (expected == actual)
if (expected == actual) {
return;
if (null == expected)
}
if (null == expected) {
fail("expected a null array, but not null found.");
if (null == actual)
}
if (null == actual) {
fail("expected not null array, but null found.");
}
assertEquals(actual.length, expected.length, "arrays don't have the same size.");
@ -39,12 +43,15 @@ public class Assert {
}
public static void assertArraysEqual(final long[] actual, final long[] expected) {
if (expected == actual)
if (expected == actual) {
return;
if (null == expected)
}
if (null == expected) {
fail("expected a null array, but not null found.");
if (null == actual)
}
if (null == actual) {
fail("expected not null array, but null found.");
}
assertEquals(actual.length, expected.length, "arrays don't have the same size.");

View File

@ -14,35 +14,17 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import static org.mockito.Mockito.doAnswer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.testng.annotations.Test;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotEquals;
import backtype.storm.Config;
import backtype.storm.testing.FeederSpout;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Values;
import com.google.inject.AbstractModule;
import com.hpcloud.configuration.KafkaProducerConfiguration;
import com.hpcloud.mon.common.event.AlarmCreatedEvent;
import com.hpcloud.mon.common.event.AlarmStateTransitionedEvent;
@ -67,8 +49,29 @@ import com.hpcloud.streaming.storm.TopologyTestCase;
import com.hpcloud.util.Injector;
import com.hpcloud.util.Serialization;
import backtype.storm.Config;
import backtype.storm.testing.FeederSpout;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Values;
import com.google.inject.AbstractModule;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.testng.annotations.Test;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.UUID;
/**
* Simulates a real'ish run of the thresholding engine with alarms being created, updated and deleted
* Simulates a real'ish run of the thresholding engine with alarms being created, updated and
* deleted
*/
@Test(groups = "integration")
public class ThresholdingEngineAlarmTest extends TopologyTestCase {
@ -85,14 +88,13 @@ public class ThresholdingEngineAlarmTest extends TopologyTestCase {
private int nextSubAlarmId = 4242;
private List<SubAlarm> subAlarms;
private AlarmExpression expression = new AlarmExpression(
"max(hpcs.compute.cpu{id=5}) >= 3 or max(hpcs.compute.mem{id=5}) >= 557");
"max(hpcs.compute.cpu{id=5}) >= 3 or max(hpcs.compute.mem{id=5}) >= 557");
private AlarmState currentState = AlarmState.UNDETERMINED;
private volatile int alarmsSent = 0;
public ThresholdingEngineAlarmTest() {
// Fixtures
subAlarms = subAlarmsFor(TEST_ALARM_ID, expression);
// Mocks
@ -101,23 +103,26 @@ public class ThresholdingEngineAlarmTest extends TopologyTestCase {
@Override
public Alarm answer(InvocationOnMock invocation) throws Throwable {
return new Alarm(TEST_ALARM_ID, TEST_ALARM_TENANT_ID, TEST_ALARM_NAME,
TEST_ALARM_DESCRIPTION, expression, subAlarms, currentState, Boolean.TRUE);
TEST_ALARM_DESCRIPTION, expression, subAlarms, currentState, Boolean.TRUE);
}
});
subAlarmDAO = mock(SubAlarmDAO.class);
when(subAlarmDAO.find(any(MetricDefinitionAndTenantId.class))).thenAnswer(new Answer<List<SubAlarm>>() {
@Override
public List<SubAlarm> answer(InvocationOnMock invocation) throws Throwable {
MetricDefinitionAndTenantId metricDefinitionAndTenantId = (MetricDefinitionAndTenantId) invocation.getArguments()[0];
for (final SubAlarm subAlarm : subAlarms) {
if (metricDefinitionAndTenantId.metricDefinition.equals(subAlarm.getExpression().getMetricDefinition())) {
when(subAlarmDAO.find(any(MetricDefinitionAndTenantId.class))).thenAnswer(
new Answer<List<SubAlarm>>() {
@Override
public List<SubAlarm> answer(InvocationOnMock invocation) throws Throwable {
MetricDefinitionAndTenantId metricDefinitionAndTenantId =
(MetricDefinitionAndTenantId) invocation.getArguments()[0];
for (final SubAlarm subAlarm : subAlarms) {
if (metricDefinitionAndTenantId.metricDefinition.equals(subAlarm.getExpression()
.getMetricDefinition())) {
return Arrays.asList(subAlarm);
}
}
}
return Collections.emptyList();
}
});
return Collections.emptyList();
}
});
metricDefinitionDAO = mock(MetricDefinitionDAO.class);
List<SubAlarmMetricDefinition> metricDefs = new ArrayList<>(0);
@ -138,32 +143,32 @@ public class ThresholdingEngineAlarmTest extends TopologyTestCase {
threshConfig.sporadicMetricNamespaces = new HashSet<String>();
Serialization.registerTarget(KafkaProducerConfiguration.class);
threshConfig.kafkaProducerConfig = Serialization.fromJson("{\"KafkaProducerConfiguration\":{\"topic\":\"alarm-state-transitions\",\"metadataBrokerList\":\"192.168.10.10:9092\",\"requestRequiredAcks\":1,\"requestTimeoutMs\":10000,\"producerType\":\"sync\",\"serializerClass\":\"kafka.serializer.StringEncoder\",\"keySerializerClass\":\"\",\"partitionerClass\":\"\",\"compressionCodec\":\"none\",\"compressedTopics\":\"\",\"messageSendMaxRetries\":3,\"retryBackoffMs\":100,\"topicMetadataRefreshIntervalMs\":600000,\"queueBufferingMaxMs\":5000,\"queueBufferingMaxMessages\":10000,\"queueEnqueueTimeoutMs\":-1,\"batchNumMessages\":200,\"sendBufferBytes\":102400,\"clientId\":\"Threshold_Engine\"}}");
threshConfig.kafkaProducerConfig =
Serialization
.fromJson("{\"KafkaProducerConfiguration\":{\"topic\":\"alarm-state-transitions\",\"metadataBrokerList\":\"192.168.10.10:9092\",\"requestRequiredAcks\":1,\"requestTimeoutMs\":10000,\"producerType\":\"sync\",\"serializerClass\":\"kafka.serializer.StringEncoder\",\"keySerializerClass\":\"\",\"partitionerClass\":\"\",\"compressionCodec\":\"none\",\"compressedTopics\":\"\",\"messageSendMaxRetries\":3,\"retryBackoffMs\":100,\"topicMetadataRefreshIntervalMs\":600000,\"queueBufferingMaxMs\":5000,\"queueBufferingMaxMessages\":10000,\"queueEnqueueTimeoutMs\":-1,\"batchNumMessages\":200,\"sendBufferBytes\":102400,\"clientId\":\"Threshold_Engine\"}}");
Config stormConfig = new Config();
stormConfig.setMaxTaskParallelism(1);
metricSpout = new FeederSpout(new Fields(MetricSpout.FIELDS));
eventSpout = new FeederSpout(new Fields("event"));
alarmEventForwarder = mock(AlarmEventForwarder.class);
Injector.registerModules(new TopologyModule(threshConfig, stormConfig,
metricSpout, eventSpout));
Injector
.registerModules(new TopologyModule(threshConfig, stormConfig, metricSpout, eventSpout));
Injector.registerModules(new ProducerModule(alarmEventForwarder));
// Evaluate alarm stats every 1 seconds
System.setProperty(MetricAggregationBolt.TICK_TUPLE_SECONDS_KEY, "5");
}
private List<SubAlarm> subAlarmsFor(final String alarmId,
final AlarmExpression expression,
final String ... ids) {
private List<SubAlarm> subAlarmsFor(final String alarmId, final AlarmExpression expression,
final String... ids) {
final List<SubAlarm> result = new ArrayList<SubAlarm>(expression.getSubExpressions().size());
int index = 0;
for (final AlarmSubExpression expr : expression.getSubExpressions()) {
final String id;
if ((index >= ids.length) || (ids[index] == null)) {
id = String.valueOf(nextSubAlarmId++);
}
else {
id = ids[index];
id = String.valueOf(nextSubAlarmId++);
} else {
id = ids[index];
}
index++;
result.add(new SubAlarm(id, TEST_ALARM_ID, expr));
@ -171,109 +176,119 @@ public class ThresholdingEngineAlarmTest extends TopologyTestCase {
return result;
}
final AlarmState[] expectedStates = { AlarmState.ALARM, AlarmState.OK, AlarmState.ALARM, AlarmState.OK };
final AlarmState[] expectedStates = {AlarmState.ALARM, AlarmState.OK, AlarmState.ALARM,
AlarmState.OK};
public void shouldThreshold() throws Exception {
doAnswer(new Answer<Object>() {
public Object answer(InvocationOnMock invocation) {
final Object[] args = invocation.getArguments();
AlarmStateTransitionedEvent event = Serialization.fromJson((String)args[2]);
System.out.printf("Alarm transitioned from %s to %s%n", event.oldState, event.newState);
assertEquals(event.alarmName, TEST_ALARM_NAME);
assertEquals(event.alarmId, TEST_ALARM_ID);
assertEquals(event.tenantId, TEST_ALARM_TENANT_ID);
assertEquals(event.oldState, currentState);
currentState = event.newState;
assertEquals(event.newState, expectedStates[alarmsSent++]);
return null;
}
public Object answer(InvocationOnMock invocation) {
final Object[] args = invocation.getArguments();
AlarmStateTransitionedEvent event = Serialization.fromJson((String) args[2]);
System.out.printf("Alarm transitioned from %s to %s%n", event.oldState, event.newState);
assertEquals(event.alarmName, TEST_ALARM_NAME);
assertEquals(event.alarmId, TEST_ALARM_ID);
assertEquals(event.tenantId, TEST_ALARM_TENANT_ID);
assertEquals(event.oldState, currentState);
currentState = event.newState;
assertEquals(event.newState, expectedStates[alarmsSent++]);
return null;
}
)
.when(alarmEventForwarder).send(anyString(), anyString(), anyString());
}).when(alarmEventForwarder).send(anyString(), anyString(), anyString());
int goodValueCount = 0;
boolean firstUpdate = true;
boolean secondUpdate = true;
boolean thirdUpdate = true;
final Alarm initialAlarm = new Alarm(TEST_ALARM_ID, TEST_ALARM_TENANT_ID, TEST_ALARM_NAME,
TEST_ALARM_DESCRIPTION, expression, subAlarms, AlarmState.UNDETERMINED, Boolean.TRUE);
final Alarm initialAlarm =
new Alarm(TEST_ALARM_ID, TEST_ALARM_TENANT_ID, TEST_ALARM_NAME, TEST_ALARM_DESCRIPTION,
expression, subAlarms, AlarmState.UNDETERMINED, Boolean.TRUE);
final int expectedAlarms = expectedStates.length;
AlarmExpression savedAlarmExpression = null;
for (int i = 1; alarmsSent != expectedAlarms && i < 300; i++) {
if (i == 5) {
final Map<String, AlarmSubExpression> exprs = createSubExpressionMap();
final AlarmCreatedEvent event = new AlarmCreatedEvent(TEST_ALARM_TENANT_ID, TEST_ALARM_ID, TEST_ALARM_NAME,
expression.getExpression(), exprs);
eventSpout.feed(new Values(event));
System.out.printf("Send AlarmCreatedEvent for expression %s%n", expression.getExpression());
}
else if (alarmsSent == 1 && firstUpdate) {
firstUpdate = false;
final String originalExpression = expression.getExpression();
expression = new AlarmExpression(originalExpression.replace(">= 3", ">= 556"));
assertNotEquals(expression.getExpression(), originalExpression);
final List<SubAlarm> updatedSubAlarms = new ArrayList<>();
updatedSubAlarms.add(new SubAlarm(subAlarms.get(0).getId(), initialAlarm.getId(), expression.getSubExpressions().get(0)));
for (int index = 1; index < subAlarms.size(); index++) {
final SubAlarm subAlarm = subAlarms.get(index);
updatedSubAlarms.add(new SubAlarm(subAlarm.getId(), initialAlarm.getId(), subAlarm.getExpression()));
}
final Map<String, AlarmSubExpression> exprs = createSubExpressionMap();
final AlarmCreatedEvent event =
new AlarmCreatedEvent(TEST_ALARM_TENANT_ID, TEST_ALARM_ID, TEST_ALARM_NAME,
expression.getExpression(), exprs);
eventSpout.feed(new Values(event));
System.out.printf("Send AlarmCreatedEvent for expression %s%n", expression.getExpression());
} else if (alarmsSent == 1 && firstUpdate) {
firstUpdate = false;
final String originalExpression = expression.getExpression();
expression = new AlarmExpression(originalExpression.replace(">= 3", ">= 556"));
assertNotEquals(expression.getExpression(), originalExpression);
final List<SubAlarm> updatedSubAlarms = new ArrayList<>();
updatedSubAlarms.add(new SubAlarm(subAlarms.get(0).getId(), initialAlarm.getId(),
expression.getSubExpressions().get(0)));
for (int index = 1; index < subAlarms.size(); index++) {
final SubAlarm subAlarm = subAlarms.get(index);
updatedSubAlarms.add(new SubAlarm(subAlarm.getId(), initialAlarm.getId(), subAlarm
.getExpression()));
}
initialAlarm.setState(currentState);
final AlarmUpdatedEvent event = EventProcessingBoltTest.createAlarmUpdatedEvent(initialAlarm, initialAlarm.getState(), expression,
updatedSubAlarms);
subAlarms = updatedSubAlarms;
initialAlarm.setSubAlarms(updatedSubAlarms);
eventSpout.feed(new Values(event));
initialAlarm.setState(currentState);
final AlarmUpdatedEvent event =
EventProcessingBoltTest.createAlarmUpdatedEvent(initialAlarm, initialAlarm.getState(),
expression, updatedSubAlarms);
subAlarms = updatedSubAlarms;
initialAlarm.setSubAlarms(updatedSubAlarms);
eventSpout.feed(new Values(event));
System.out.printf("Send AlarmUpdatedEvent for expression %s%n", expression.getExpression());
}
else if (alarmsSent == 2 && secondUpdate) {
secondUpdate = false;
savedAlarmExpression = expression;
expression = new AlarmExpression("max(hpcs.compute.load{id=5}) > 551 and (" + expression.getExpression().replace("556", "554") + ")");
final List<SubAlarm> updatedSubAlarms = new ArrayList<>();
updatedSubAlarms.add(new SubAlarm(UUID.randomUUID().toString(), initialAlarm.getId(), expression.getSubExpressions().get(0)));
for (int index = 0; index < subAlarms.size(); index++) {
updatedSubAlarms.add(new SubAlarm(subAlarms.get(index).getId(), initialAlarm.getId(), expression.getSubExpressions().get(index+1)));
}
System.out.printf("Send AlarmUpdatedEvent for expression %s%n", expression.getExpression());
} else if (alarmsSent == 2 && secondUpdate) {
secondUpdate = false;
savedAlarmExpression = expression;
expression =
new AlarmExpression("max(hpcs.compute.load{id=5}) > 551 and ("
+ expression.getExpression().replace("556", "554") + ")");
final List<SubAlarm> updatedSubAlarms = new ArrayList<>();
updatedSubAlarms.add(new SubAlarm(UUID.randomUUID().toString(), initialAlarm.getId(),
expression.getSubExpressions().get(0)));
for (int index = 0; index < subAlarms.size(); index++) {
updatedSubAlarms.add(new SubAlarm(subAlarms.get(index).getId(), initialAlarm.getId(),
expression.getSubExpressions().get(index + 1)));
}
initialAlarm.setState(currentState);
final AlarmUpdatedEvent event = EventProcessingBoltTest.createAlarmUpdatedEvent(initialAlarm, initialAlarm.getState(), expression,
updatedSubAlarms);
subAlarms = updatedSubAlarms;
initialAlarm.setSubAlarms(updatedSubAlarms);
eventSpout.feed(new Values(event));
initialAlarm.setState(currentState);
final AlarmUpdatedEvent event =
EventProcessingBoltTest.createAlarmUpdatedEvent(initialAlarm, initialAlarm.getState(),
expression, updatedSubAlarms);
subAlarms = updatedSubAlarms;
initialAlarm.setSubAlarms(updatedSubAlarms);
eventSpout.feed(new Values(event));
System.out.printf("Send AlarmUpdatedEvent for expression %s%n", expression.getExpression());
}
else if (alarmsSent == 3 && thirdUpdate) {
thirdUpdate = false;
expression = savedAlarmExpression;
final List<SubAlarm> updatedSubAlarms = new ArrayList<>();
int index = 1;
for (AlarmSubExpression subExpression : expression.getSubExpressions()) {
updatedSubAlarms.add(new SubAlarm(subAlarms.get(index).getId(), initialAlarm.getId(), subExpression));
index++;
}
System.out.printf("Send AlarmUpdatedEvent for expression %s%n", expression.getExpression());
} else if (alarmsSent == 3 && thirdUpdate) {
thirdUpdate = false;
expression = savedAlarmExpression;
final List<SubAlarm> updatedSubAlarms = new ArrayList<>();
int index = 1;
for (AlarmSubExpression subExpression : expression.getSubExpressions()) {
updatedSubAlarms.add(new SubAlarm(subAlarms.get(index).getId(), initialAlarm.getId(),
subExpression));
index++;
}
initialAlarm.setState(currentState);
final AlarmUpdatedEvent event = EventProcessingBoltTest.createAlarmUpdatedEvent(initialAlarm, initialAlarm.getState(), expression,
updatedSubAlarms);
subAlarms = updatedSubAlarms;
initialAlarm.setSubAlarms(updatedSubAlarms);
eventSpout.feed(new Values(event));
initialAlarm.setState(currentState);
final AlarmUpdatedEvent event =
EventProcessingBoltTest.createAlarmUpdatedEvent(initialAlarm, initialAlarm.getState(),
expression, updatedSubAlarms);
subAlarms = updatedSubAlarms;
initialAlarm.setSubAlarms(updatedSubAlarms);
eventSpout.feed(new Values(event));
System.out.printf("Send AlarmUpdatedEvent for expression %s%n", expression.getExpression());
}
else {
System.out.printf("Send AlarmUpdatedEvent for expression %s%n", expression.getExpression());
} else {
System.out.println("Feeding metrics...");
long time = System.currentTimeMillis()/1000;
long time = System.currentTimeMillis() / 1000;
++goodValueCount;
for (final SubAlarm subAlarm : subAlarms) {
final MetricDefinitionAndTenantId metricDefinitionAndTenantId =
new MetricDefinitionAndTenantId(subAlarm.getExpression().getMetricDefinition(), TEST_ALARM_TENANT_ID);
metricSpout.feed(new Values(metricDefinitionAndTenantId, time,
new Metric(metricDefinitionAndTenantId.metricDefinition, time, (double) (goodValueCount == 15 ? 1 : 555))));
new MetricDefinitionAndTenantId(subAlarm.getExpression().getMetricDefinition(),
TEST_ALARM_TENANT_ID);
metricSpout.feed(new Values(metricDefinitionAndTenantId, time, new Metric(
metricDefinitionAndTenantId.metricDefinition, time,
(double) (goodValueCount == 15 ? 1 : 555))));
}
}
try {
@ -284,11 +299,11 @@ public class ThresholdingEngineAlarmTest extends TopologyTestCase {
}
for (int i = 0; alarmsSent != expectedAlarms && i < 60; i++) {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
assertEquals(alarmsSent, expectedAlarms);
assertEquals(currentState, expectedStates[expectedStates.length - 1]);
@ -296,9 +311,9 @@ public class ThresholdingEngineAlarmTest extends TopologyTestCase {
private Map<String, AlarmSubExpression> createSubExpressionMap() {
final Map<String, AlarmSubExpression> exprs = new HashMap<>();
for (final SubAlarm subAlarm : subAlarms) {
exprs.put(subAlarm.getId(), subAlarm.getExpression());
}
for (final SubAlarm subAlarm : subAlarms) {
exprs.put(subAlarm.getId(), subAlarm.getExpression());
}
return exprs;
}
}

View File

@ -14,6 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon;
import com.hpcloud.mon.ThresholdingEngine;

View File

@ -14,33 +14,17 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import static org.mockito.Mockito.doAnswer;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.testng.annotations.Test;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertTrue;
import backtype.storm.Config;
import backtype.storm.testing.FeederSpout;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Values;
import com.google.inject.AbstractModule;
import com.hpcloud.configuration.KafkaProducerConfiguration;
import com.hpcloud.mon.common.event.AlarmStateTransitionedEvent;
import com.hpcloud.mon.common.model.alarm.AlarmExpression;
@ -61,6 +45,24 @@ import com.hpcloud.streaming.storm.TopologyTestCase;
import com.hpcloud.util.Injector;
import com.hpcloud.util.Serialization;
import backtype.storm.Config;
import backtype.storm.testing.FeederSpout;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Values;
import com.google.inject.AbstractModule;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.testng.annotations.Test;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
/**
* Simulates a real'ish run of the thresholding engine, using seconds instead of minutes for the
* evaluation timescale.
@ -86,8 +88,8 @@ public class ThresholdingEngineTest extends TopologyTestCase {
public ThresholdingEngineTest() {
// Fixtures
final AlarmExpression expression = new AlarmExpression(
"max(cpu{id=5}) >= 3 or max(mem{id=5}) >= 5");
final AlarmExpression expression =
new AlarmExpression("max(cpu{id=5}) >= 3 or max(mem{id=5}) >= 5");
cpuMetricDef = expression.getSubExpressions().get(0).getMetricDefinition();
memMetricDef = expression.getSubExpressions().get(1).getMetricDefinition();
@ -98,33 +100,38 @@ public class ThresholdingEngineTest extends TopologyTestCase {
@Override
public Alarm answer(InvocationOnMock invocation) throws Throwable {
return new Alarm(TEST_ALARM_ID, TEST_ALARM_TENANT_ID, TEST_ALARM_NAME,
TEST_ALARM_DESCRIPTION, expression, subAlarmsFor(expression), AlarmState.OK, Boolean.TRUE);
TEST_ALARM_DESCRIPTION, expression, subAlarmsFor(expression), AlarmState.OK,
Boolean.TRUE);
}
});
subAlarmDAO = mock(SubAlarmDAO.class);
final SubAlarm cpuMetricDefSubAlarm = new SubAlarm("123", TEST_ALARM_ID, expression.getSubExpressions().get(0));
final SubAlarm memMetricDefSubAlarm = new SubAlarm("456", TEST_ALARM_ID, expression.getSubExpressions().get(1));
when(subAlarmDAO.find(any(MetricDefinitionAndTenantId.class))).thenAnswer(new Answer<List<SubAlarm>>() {
@Override
public List<SubAlarm> answer(InvocationOnMock invocation) throws Throwable {
MetricDefinitionAndTenantId metricDefinitionAndTenantId = (MetricDefinitionAndTenantId) invocation.getArguments()[0];
MetricDefinition metricDef = metricDefinitionAndTenantId.metricDefinition;
if (metricDef.equals(cpuMetricDef)) {
return Arrays.asList(cpuMetricDefSubAlarm);
} else if (metricDef.equals(memMetricDef)) {
return Arrays.asList(memMetricDefSubAlarm);
}
return Collections.emptyList();
}
});
final SubAlarm cpuMetricDefSubAlarm =
new SubAlarm("123", TEST_ALARM_ID, expression.getSubExpressions().get(0));
final SubAlarm memMetricDefSubAlarm =
new SubAlarm("456", TEST_ALARM_ID, expression.getSubExpressions().get(1));
when(subAlarmDAO.find(any(MetricDefinitionAndTenantId.class))).thenAnswer(
new Answer<List<SubAlarm>>() {
@Override
public List<SubAlarm> answer(InvocationOnMock invocation) throws Throwable {
MetricDefinitionAndTenantId metricDefinitionAndTenantId =
(MetricDefinitionAndTenantId) invocation.getArguments()[0];
MetricDefinition metricDef = metricDefinitionAndTenantId.metricDefinition;
if (metricDef.equals(cpuMetricDef)) {
return Arrays.asList(cpuMetricDefSubAlarm);
} else if (metricDef.equals(memMetricDef)) {
return Arrays.asList(memMetricDefSubAlarm);
}
return Collections.emptyList();
}
});
metricDefinitionDAO = mock(MetricDefinitionDAO.class);
final List<SubAlarmMetricDefinition> metricDefs = Arrays.asList(
new SubAlarmMetricDefinition(cpuMetricDefSubAlarm.getId(),
new MetricDefinitionAndTenantId(cpuMetricDef, TEST_ALARM_TENANT_ID)),
final List<SubAlarmMetricDefinition> metricDefs =
Arrays.asList(new SubAlarmMetricDefinition(cpuMetricDefSubAlarm.getId(),
new MetricDefinitionAndTenantId(cpuMetricDef, TEST_ALARM_TENANT_ID)),
new SubAlarmMetricDefinition(memMetricDefSubAlarm.getId(),
new MetricDefinitionAndTenantId(memMetricDef, TEST_ALARM_TENANT_ID)));
new MetricDefinitionAndTenantId(memMetricDef, TEST_ALARM_TENANT_ID)));
when(metricDefinitionDAO.findForAlarms()).thenReturn(metricDefs);
// Bindings
@ -142,14 +149,16 @@ public class ThresholdingEngineTest extends TopologyTestCase {
threshConfig.sporadicMetricNamespaces = new HashSet<String>();
Serialization.registerTarget(KafkaProducerConfiguration.class);
threshConfig.kafkaProducerConfig = Serialization.fromJson("{\"KafkaProducerConfiguration\":{\"topic\":\"alarm-state-transitions\",\"metadataBrokerList\":\"192.168.10.10:9092\",\"requestRequiredAcks\":1,\"requestTimeoutMs\":10000,\"producerType\":\"sync\",\"serializerClass\":\"kafka.serializer.StringEncoder\",\"keySerializerClass\":\"\",\"partitionerClass\":\"\",\"compressionCodec\":\"none\",\"compressedTopics\":\"\",\"messageSendMaxRetries\":3,\"retryBackoffMs\":100,\"topicMetadataRefreshIntervalMs\":600000,\"queueBufferingMaxMs\":5000,\"queueBufferingMaxMessages\":10000,\"queueEnqueueTimeoutMs\":-1,\"batchNumMessages\":200,\"sendBufferBytes\":102400,\"clientId\":\"Threshold_Engine\"}}");
threshConfig.kafkaProducerConfig =
Serialization
.fromJson("{\"KafkaProducerConfiguration\":{\"topic\":\"alarm-state-transitions\",\"metadataBrokerList\":\"192.168.10.10:9092\",\"requestRequiredAcks\":1,\"requestTimeoutMs\":10000,\"producerType\":\"sync\",\"serializerClass\":\"kafka.serializer.StringEncoder\",\"keySerializerClass\":\"\",\"partitionerClass\":\"\",\"compressionCodec\":\"none\",\"compressedTopics\":\"\",\"messageSendMaxRetries\":3,\"retryBackoffMs\":100,\"topicMetadataRefreshIntervalMs\":600000,\"queueBufferingMaxMs\":5000,\"queueBufferingMaxMessages\":10000,\"queueEnqueueTimeoutMs\":-1,\"batchNumMessages\":200,\"sendBufferBytes\":102400,\"clientId\":\"Threshold_Engine\"}}");
Config stormConfig = new Config();
stormConfig.setMaxTaskParallelism(1);
metricSpout = new FeederSpout(new Fields(MetricSpout.FIELDS));
eventSpout = new FeederSpout(new Fields("event"));
alarmEventForwarder = mock(AlarmEventForwarder.class);
Injector.registerModules(new TopologyModule(threshConfig, stormConfig,
metricSpout, eventSpout));
Injector
.registerModules(new TopologyModule(threshConfig, stormConfig, metricSpout, eventSpout));
Injector.registerModules(new ProducerModule(alarmEventForwarder));
}
@ -161,28 +170,25 @@ public class ThresholdingEngineTest extends TopologyTestCase {
public void shouldThreshold() throws Exception {
doAnswer(new Answer<Object>() {
public Object answer(InvocationOnMock invocation) {
final Object[] args = invocation.getArguments();
AlarmStateTransitionedEvent event = Serialization.fromJson((String)args[2]);
alarmsSent++;
System.out.printf("Alarm transitioned from %s to %s%n", event.oldState, event.newState);
assertEquals(event.alarmName, TEST_ALARM_NAME);
assertEquals(event.alarmId, TEST_ALARM_ID);
assertEquals(event.tenantId, TEST_ALARM_TENANT_ID);
assertEquals(event.oldState, previousState);
assertEquals(event.newState, expectedState);
previousState = event.newState;
if (event.newState == AlarmState.UNDETERMINED) {
expectedState = AlarmState.ALARM;
}
else if (event.newState == AlarmState.ALARM) {
expectedState = AlarmState.UNDETERMINED;
}
return null;
}
public Object answer(InvocationOnMock invocation) {
final Object[] args = invocation.getArguments();
AlarmStateTransitionedEvent event = Serialization.fromJson((String) args[2]);
alarmsSent++;
System.out.printf("Alarm transitioned from %s to %s%n", event.oldState, event.newState);
assertEquals(event.alarmName, TEST_ALARM_NAME);
assertEquals(event.alarmId, TEST_ALARM_ID);
assertEquals(event.tenantId, TEST_ALARM_TENANT_ID);
assertEquals(event.oldState, previousState);
assertEquals(event.newState, expectedState);
previousState = event.newState;
if (event.newState == AlarmState.UNDETERMINED) {
expectedState = AlarmState.ALARM;
} else if (event.newState == AlarmState.ALARM) {
expectedState = AlarmState.UNDETERMINED;
}
return null;
}
)
.when(alarmEventForwarder).send(anyString(), anyString(), anyString());
}).when(alarmEventForwarder).send(anyString(), anyString(), anyString());
int waitCount = 0;
int feedCount = 5;
int goodValueCount = 0;
@ -192,21 +198,26 @@ public class ThresholdingEngineTest extends TopologyTestCase {
if (feedCount > 0) {
System.out.println("Feeding metrics...");
long time = System.currentTimeMillis()/1000;
metricSpout.feed(new Values(new MetricDefinitionAndTenantId(cpuMetricDef, TEST_ALARM_TENANT_ID), time,
new Metric(cpuMetricDef.name, cpuMetricDef.dimensions, time, (double) (++goodValueCount == 15 ? 1 : 555))));
metricSpout.feed(new Values(new MetricDefinitionAndTenantId(memMetricDef, TEST_ALARM_TENANT_ID), time,
new Metric(memMetricDef.name, extraMemMetricDefDimensions, time, (double) (goodValueCount == 15 ? 1 : 555))));
long time = System.currentTimeMillis() / 1000;
metricSpout.feed(new Values(new MetricDefinitionAndTenantId(cpuMetricDef,
TEST_ALARM_TENANT_ID), time, new Metric(cpuMetricDef.name, cpuMetricDef.dimensions,
time, (double) (++goodValueCount == 15 ? 1 : 555))));
metricSpout.feed(new Values(new MetricDefinitionAndTenantId(memMetricDef,
TEST_ALARM_TENANT_ID), time, new Metric(memMetricDef.name, extraMemMetricDefDimensions,
time, (double) (goodValueCount == 15 ? 1 : 555))));
if (--feedCount == 0)
if (--feedCount == 0) {
waitCount = 3;
}
if (goodValueCount == 15)
if (goodValueCount == 15) {
goodValueCount = 0;
}
} else {
System.out.println("Waiting...");
if (--waitCount == 0)
if (--waitCount == 0) {
feedCount = 5;
}
}
try {
@ -218,11 +229,11 @@ public class ThresholdingEngineTest extends TopologyTestCase {
// Give it some extra time if it needs it for the alarm to come out
for (int i = 0; i < 30 && alarmsSent == 0; i++) {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
assertTrue(alarmsSent > 0, "Not enough alarms");
}

View File

@ -14,6 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon;
import static org.mockito.Matchers.any;
@ -21,21 +22,6 @@ import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.testng.annotations.Test;
import backtype.storm.Config;
import backtype.storm.testing.FeederSpout;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Values;
import com.google.common.collect.ImmutableMap;
import com.google.inject.AbstractModule;
import com.hpcloud.mon.common.event.AlarmCreatedEvent;
import com.hpcloud.mon.common.event.AlarmDeletedEvent;
import com.hpcloud.mon.common.model.alarm.AlarmExpression;
@ -57,6 +43,22 @@ import com.hpcloud.mon.infrastructure.thresholding.ProducerModule;
import com.hpcloud.streaming.storm.TopologyTestCase;
import com.hpcloud.util.Injector;
import backtype.storm.Config;
import backtype.storm.testing.FeederSpout;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Values;
import com.google.common.collect.ImmutableMap;
import com.google.inject.AbstractModule;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.testng.annotations.Test;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
/**
* Simulates a real'ish run of the thresholding engine, using seconds instead of minutes for the
* evaluation timescale.
@ -80,8 +82,9 @@ public class ThresholdingEngineTest1 extends TopologyTestCase {
public ThresholdingEngineTest1() {
// Fixtures
expression = new AlarmExpression(
"avg(hpcs.compute.cpu{id=5}, 3) >= 3 times 2 and avg(hpcs.compute.mem{id=5}, 3) >= 5 times 2");
expression =
new AlarmExpression(
"avg(hpcs.compute.cpu{id=5}, 3) >= 3 times 2 and avg(hpcs.compute.mem{id=5}, 3) >= 5 times 2");
customExpression = AlarmExpression.of("avg(my.test{id=4}, 3) > 10");
customSubExpression = customExpression.getSubExpressions().get(0);
@ -94,40 +97,44 @@ public class ThresholdingEngineTest1 extends TopologyTestCase {
when(alarmDAO.findById(anyString())).thenAnswer(new Answer<Alarm>() {
@Override
public Alarm answer(InvocationOnMock invocation) throws Throwable {
if (invocation.getArguments()[0].equals("1"))
return new Alarm("1", BOB_TENANT_ID, "test-alarm", "Descr of test-alarm", expression, Arrays.asList(createCpuSubAlarm(),
createMemSubAlarm()), AlarmState.OK, Boolean.TRUE);
else if (invocation.getArguments()[0].equals("2"))
return new Alarm("2", JOE_TENANT_ID, "joes-alarm", "Descr of joes-alarm", customExpression,
Arrays.asList(createCustomSubAlarm()), AlarmState.OK, Boolean.TRUE);
if (invocation.getArguments()[0].equals("1")) {
return new Alarm("1", BOB_TENANT_ID, "test-alarm", "Descr of test-alarm", expression,
Arrays.asList(createCpuSubAlarm(), createMemSubAlarm()), AlarmState.OK, Boolean.TRUE);
} else if (invocation.getArguments()[0].equals("2")) {
return new Alarm("2", JOE_TENANT_ID, "joes-alarm", "Descr of joes-alarm",
customExpression, Arrays.asList(createCustomSubAlarm()), AlarmState.OK, Boolean.TRUE);
}
return null;
}
});
subAlarmDAO = mock(SubAlarmDAO.class);
when(subAlarmDAO.find(any(MetricDefinitionAndTenantId.class))).thenAnswer(new Answer<List<SubAlarm>>() {
@Override
public List<SubAlarm> answer(InvocationOnMock invocation) throws Throwable {
MetricDefinitionAndTenantId metricDefinitionAndTenantId = (MetricDefinitionAndTenantId) invocation.getArguments()[0];
MetricDefinition metricDef = metricDefinitionAndTenantId.metricDefinition;
if (metricDef.equals(cpuMetricDef))
return Arrays.asList(createCpuSubAlarm());
else if (metricDef.equals(memMetricDef))
return Arrays.asList(createMemSubAlarm());
else if (metricDef.equals(customMetricDef))
return Arrays.asList(createCustomSubAlarm());
return Collections.emptyList();
}
});
when(subAlarmDAO.find(any(MetricDefinitionAndTenantId.class))).thenAnswer(
new Answer<List<SubAlarm>>() {
@Override
public List<SubAlarm> answer(InvocationOnMock invocation) throws Throwable {
MetricDefinitionAndTenantId metricDefinitionAndTenantId =
(MetricDefinitionAndTenantId) invocation.getArguments()[0];
MetricDefinition metricDef = metricDefinitionAndTenantId.metricDefinition;
if (metricDef.equals(cpuMetricDef)) {
return Arrays.asList(createCpuSubAlarm());
} else if (metricDef.equals(memMetricDef)) {
return Arrays.asList(createMemSubAlarm());
} else if (metricDef.equals(customMetricDef)) {
return Arrays.asList(createCustomSubAlarm());
}
return Collections.emptyList();
}
});
metricDefinitionDAO = mock(MetricDefinitionDAO.class);
final List<SubAlarmMetricDefinition> metricDefs = Arrays.asList(
new SubAlarmMetricDefinition(createCpuSubAlarm().getId(),
new MetricDefinitionAndTenantId(cpuMetricDef, BOB_TENANT_ID)),
final List<SubAlarmMetricDefinition> metricDefs =
Arrays.asList(new SubAlarmMetricDefinition(createCpuSubAlarm().getId(),
new MetricDefinitionAndTenantId(cpuMetricDef, BOB_TENANT_ID)),
new SubAlarmMetricDefinition(createMemSubAlarm().getId(),
new MetricDefinitionAndTenantId(memMetricDef, BOB_TENANT_ID)),
new MetricDefinitionAndTenantId(memMetricDef, BOB_TENANT_ID)),
new SubAlarmMetricDefinition(createCustomSubAlarm().getId(),
new MetricDefinitionAndTenantId(customMetricDef, JOE_TENANT_ID)));
new MetricDefinitionAndTenantId(customMetricDef, JOE_TENANT_ID)));
when(metricDefinitionDAO.findForAlarms()).thenReturn(metricDefs);
// Bindings
@ -149,8 +156,8 @@ public class ThresholdingEngineTest1 extends TopologyTestCase {
eventSpout = new FeederSpout(new Fields("event"));
final AlarmEventForwarder alarmEventForwarder = mock(AlarmEventForwarder.class);
Injector.registerModules(new TopologyModule(threshConfig, stormConfig,
metricSpout, eventSpout));
Injector
.registerModules(new TopologyModule(threshConfig, stormConfig, metricSpout, eventSpout));
Injector.registerModules(new ProducerModule(alarmEventForwarder));
// Evaluate alarm stats every 1 seconds
@ -175,23 +182,27 @@ public class ThresholdingEngineTest1 extends TopologyTestCase {
while (true) {
long time = System.currentTimeMillis();
metricSpout.feed(new Values(new MetricDefinitionAndTenantId(cpuMetricDef, BOB_TENANT_ID), new Metric(cpuMetricDef.name,
cpuMetricDef.dimensions, time, count % 10 == 0 ? 555 : 1)));
metricSpout.feed(new Values(new MetricDefinitionAndTenantId(memMetricDef, BOB_TENANT_ID), new Metric(memMetricDef.name,
cpuMetricDef.dimensions, time, count % 10 == 0 ? 555 : 1)));
metricSpout.feed(new Values(new MetricDefinitionAndTenantId(customMetricDef, JOE_TENANT_ID), new Metric(customMetricDef.name,
cpuMetricDef.dimensions, time, count % 20 == 0 ? 1 : 123)));
metricSpout.feed(new Values(new MetricDefinitionAndTenantId(cpuMetricDef, BOB_TENANT_ID),
new Metric(cpuMetricDef.name, cpuMetricDef.dimensions, time, count % 10 == 0 ? 555 : 1)));
metricSpout.feed(new Values(new MetricDefinitionAndTenantId(memMetricDef, BOB_TENANT_ID),
new Metric(memMetricDef.name, cpuMetricDef.dimensions, time, count % 10 == 0 ? 555 : 1)));
metricSpout
.feed(new Values(new MetricDefinitionAndTenantId(customMetricDef, JOE_TENANT_ID),
new Metric(customMetricDef.name, cpuMetricDef.dimensions, time, count % 20 == 0 ? 1
: 123)));
if (count % 5 == 0) {
Object event = null;
if (++eventCounter % 2 == 0)
event = new AlarmDeletedEvent(JOE_TENANT_ID, "2",
ImmutableMap.<String, MetricDefinition>builder().put("444", customMetricDef).build());
else
event = new AlarmCreatedEvent(JOE_TENANT_ID, "2", "foo", customSubExpression.getExpression(),
ImmutableMap.<String, AlarmSubExpression>builder()
.put("444", customSubExpression)
.build());
if (++eventCounter % 2 == 0) {
event =
new AlarmDeletedEvent(JOE_TENANT_ID, "2", ImmutableMap
.<String, MetricDefinition>builder().put("444", customMetricDef).build());
} else {
event =
new AlarmCreatedEvent(JOE_TENANT_ID, "2", "foo", customSubExpression.getExpression(),
ImmutableMap.<String, AlarmSubExpression>builder()
.put("444", customSubExpression).build());
}
eventSpout.feed(new Values(event));
}

View File

@ -14,19 +14,13 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.domain.model;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertTrue;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.testng.annotations.Test;
import com.hpcloud.mon.common.model.alarm.AggregateFunction;
import com.hpcloud.mon.common.model.alarm.AlarmExpression;
import com.hpcloud.mon.common.model.alarm.AlarmOperator;
@ -34,6 +28,13 @@ import com.hpcloud.mon.common.model.alarm.AlarmState;
import com.hpcloud.mon.common.model.alarm.AlarmSubExpression;
import com.hpcloud.mon.common.model.metric.MetricDefinition;
import org.testng.annotations.Test;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@Test
public class AlarmTest {
private static final String TEST_ALARM_ID = "1";
@ -43,25 +44,30 @@ public class AlarmTest {
private static Boolean ALARM_ENABLED = Boolean.FALSE;
public void shouldBeUndeterminedIfAnySubAlarmIsUndetermined() {
AlarmExpression expr = new AlarmExpression(
"avg(hpcs.compute{instance_id=5,metric_name=cpu,device=1}, 1) > 5 times 3 AND avg(hpcs.compute{flavor_id=3,metric_name=mem}, 2) < 4 times 3");
SubAlarm subAlarm1 = new SubAlarm("123", TEST_ALARM_ID, expr.getSubExpressions().get(0),
AlarmState.UNDETERMINED);
SubAlarm subAlarm2 = new SubAlarm("456", TEST_ALARM_ID, expr.getSubExpressions().get(1), AlarmState.ALARM);
Alarm alarm = new Alarm(TEST_ALARM_ID, TEST_ALARM_TENANT_ID, TEST_ALARM_NAME, TEST_ALARM_DESCRIPTION, expr,
Arrays.asList(subAlarm1, subAlarm2), AlarmState.UNDETERMINED, ALARM_ENABLED);
AlarmExpression expr =
new AlarmExpression(
"avg(hpcs.compute{instance_id=5,metric_name=cpu,device=1}, 1) > 5 times 3 AND avg(hpcs.compute{flavor_id=3,metric_name=mem}, 2) < 4 times 3");
SubAlarm subAlarm1 =
new SubAlarm("123", TEST_ALARM_ID, expr.getSubExpressions().get(0), AlarmState.UNDETERMINED);
SubAlarm subAlarm2 =
new SubAlarm("456", TEST_ALARM_ID, expr.getSubExpressions().get(1), AlarmState.ALARM);
Alarm alarm =
new Alarm(TEST_ALARM_ID, TEST_ALARM_TENANT_ID, TEST_ALARM_NAME, TEST_ALARM_DESCRIPTION,
expr, Arrays.asList(subAlarm1, subAlarm2), AlarmState.UNDETERMINED, ALARM_ENABLED);
assertFalse(alarm.evaluate());
assertEquals(alarm.getState(), AlarmState.UNDETERMINED);
}
public void shouldEvaluateExpressionWithBooleanAnd() {
AlarmExpression expr = new AlarmExpression(
"avg(hpcs.compute{instance_id=5,metric_name=cpu,device=1}, 1) > 5 times 3 AND avg(hpcs.compute{flavor_id=3,metric_name=mem}, 2) < 4 times 3");
AlarmExpression expr =
new AlarmExpression(
"avg(hpcs.compute{instance_id=5,metric_name=cpu,device=1}, 1) > 5 times 3 AND avg(hpcs.compute{flavor_id=3,metric_name=mem}, 2) < 4 times 3");
SubAlarm subAlarm1 = new SubAlarm("123", TEST_ALARM_ID, expr.getSubExpressions().get(0));
SubAlarm subAlarm2 = new SubAlarm("456", TEST_ALARM_ID, expr.getSubExpressions().get(1));
Alarm alarm = new Alarm(TEST_ALARM_ID, TEST_ALARM_TENANT_ID, TEST_ALARM_NAME, TEST_ALARM_DESCRIPTION,
Alarm alarm =
new Alarm(TEST_ALARM_ID, TEST_ALARM_TENANT_ID, TEST_ALARM_NAME, TEST_ALARM_DESCRIPTION,
expr, Arrays.asList(subAlarm1, subAlarm2), AlarmState.UNDETERMINED, ALARM_ENABLED);
assertFalse(alarm.evaluate());
@ -90,12 +96,14 @@ public class AlarmTest {
}
public void shouldEvaluateExpressionWithBooleanOr() {
AlarmExpression expr = new AlarmExpression(
"avg(hpcs.compute{instance_id=5,metric_name=cpu,device=1}, 1) > 5 times 3 OR avg(hpcs.compute{flavor_id=3,metric_name=mem}, 2) < 4 times 3");
AlarmExpression expr =
new AlarmExpression(
"avg(hpcs.compute{instance_id=5,metric_name=cpu,device=1}, 1) > 5 times 3 OR avg(hpcs.compute{flavor_id=3,metric_name=mem}, 2) < 4 times 3");
SubAlarm subAlarm1 = new SubAlarm("123", TEST_ALARM_ID, expr.getSubExpressions().get(0));
SubAlarm subAlarm2 = new SubAlarm("456", TEST_ALARM_ID, expr.getSubExpressions().get(1));
Alarm alarm = new Alarm(TEST_ALARM_ID, TEST_ALARM_TENANT_ID, TEST_ALARM_NAME, TEST_ALARM_DESCRIPTION,
Alarm alarm =
new Alarm(TEST_ALARM_ID, TEST_ALARM_TENANT_ID, TEST_ALARM_NAME, TEST_ALARM_DESCRIPTION,
expr, Arrays.asList(subAlarm1, subAlarm2), AlarmState.UNDETERMINED, ALARM_ENABLED);
assertFalse(alarm.evaluate());
@ -131,12 +139,13 @@ public class AlarmTest {
}
public void shouldBuiltStateChangeReason() {
AlarmExpression expr = new AlarmExpression(
"avg(hpcs.compute{instance_id=5,metric_name=cpu,device=1}, 1) > 5 times 3 OR avg(hpcs.compute{flavor_id=3,metric_name=mem}, 2) < 4 times 3");
AlarmExpression expr =
new AlarmExpression(
"avg(hpcs.compute{instance_id=5,metric_name=cpu,device=1}, 1) > 5 times 3 OR avg(hpcs.compute{flavor_id=3,metric_name=mem}, 2) < 4 times 3");
SubAlarm subAlarm1 = new SubAlarm("123", TEST_ALARM_ID, expr.getSubExpressions().get(0));
SubAlarm subAlarm2 = new SubAlarm("456", TEST_ALARM_ID, expr.getSubExpressions().get(1));
List<String> expressions = Arrays.asList(subAlarm1.getExpression().toString(),
subAlarm2.getExpression().toString());
List<String> expressions =
Arrays.asList(subAlarm1.getExpression().toString(), subAlarm2.getExpression().toString());
assertEquals(
Alarm.buildStateChangeReason(AlarmState.UNDETERMINED, expressions),
@ -149,19 +158,22 @@ public class AlarmTest {
/**
* This test is here because this case happened in the Threshold Engine. The AlarmExpression
* resulted in a MetricDefinition with null dimensions and SubAlarm had empty dimensions
* and that didn't match causing an IllegalArgumentException. MetricDefinition.equals() has
* been changed to consider those two values for dimensions the same
* resulted in a MetricDefinition with null dimensions and SubAlarm had empty dimensions and that
* didn't match causing an IllegalArgumentException. MetricDefinition.equals() has been changed to
* consider those two values for dimensions the same
*/
public void testDimensions() {
final AlarmExpression expression = AlarmExpression.of("max(cpu_system_perc) > 1");
final MetricDefinition metricDefinition = new MetricDefinition("cpu_system_perc", new HashMap<String, String>());
final AlarmSubExpression ase = new AlarmSubExpression(AggregateFunction.MAX, metricDefinition, AlarmOperator.GT, 1, 60, 1);
final MetricDefinition metricDefinition =
new MetricDefinition("cpu_system_perc", new HashMap<String, String>());
final AlarmSubExpression ase =
new AlarmSubExpression(AggregateFunction.MAX, metricDefinition, AlarmOperator.GT, 1, 60, 1);
final SubAlarm subAlarm = new SubAlarm("123", "456", ase);
final Map<AlarmSubExpression, Boolean> subExpressionValues = new HashMap<AlarmSubExpression, Boolean>();
final Map<AlarmSubExpression, Boolean> subExpressionValues =
new HashMap<AlarmSubExpression, Boolean>();
subExpressionValues.put(subAlarm.getExpression(), true);
assertEquals(expression.getSubExpressions().get(0).getMetricDefinition().hashCode(),
metricDefinition.hashCode());
metricDefinition.hashCode());
// Handle ALARM state
assertTrue(expression.evaluate(subExpressionValues));

View File

@ -14,193 +14,219 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.domain.model;
import static org.testng.Assert.assertEqualsNoOrder;
import static org.testng.Assert.assertTrue;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import com.hpcloud.mon.common.model.metric.MetricDefinition;
import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantIdMatcher.DimensionPair;
import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantIdMatcher.DimensionSet;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@Test
public class MetricDefinitionAndTenantIdMatcherTest {
private static final String HOST = "host";
private static final String LOAD_BALANCER_GROUP = "loadBalancerGroup";
private static final String CPU_METRIC_NAME = "cpu";
private MetricDefinitionAndTenantIdMatcher matcher;
private final String tenantId = "4242";
private MetricDefinition metricDef;
private Map<String, String> dimensions;
private static final String HOST = "host";
private static final String LOAD_BALANCER_GROUP = "loadBalancerGroup";
private static final String CPU_METRIC_NAME = "cpu";
private MetricDefinitionAndTenantIdMatcher matcher;
private final String tenantId = "4242";
private MetricDefinition metricDef;
private Map<String, String> dimensions;
@BeforeMethod
protected void beforeMethod() {
matcher = new MetricDefinitionAndTenantIdMatcher();
dimensions = new HashMap<>();
dimensions.put(HOST, "CloudAmI");
dimensions.put(LOAD_BALANCER_GROUP, "GroupA");
metricDef = new MetricDefinition(CPU_METRIC_NAME, dimensions);
}
@BeforeMethod
protected void beforeMethod() {
matcher = new MetricDefinitionAndTenantIdMatcher();
dimensions = new HashMap<>();
dimensions.put(HOST, "CloudAmI");
dimensions.put(LOAD_BALANCER_GROUP, "GroupA");
metricDef = new MetricDefinition(CPU_METRIC_NAME, dimensions);
}
public void shouldNotFind() {
assertTrue(matcher.isEmpty());
final MetricDefinitionAndTenantId toMatch = new MetricDefinitionAndTenantId(metricDef, tenantId);
verifyNoMatch(toMatch);
public void shouldNotFind() {
assertTrue(matcher.isEmpty());
final MetricDefinitionAndTenantId toMatch =
new MetricDefinitionAndTenantId(metricDef, tenantId);
verifyNoMatch(toMatch);
final MetricDefinitionAndTenantId diffTenantId = new MetricDefinitionAndTenantId(metricDef, "Different");
matcher.add(diffTenantId);
verifyNoMatch(toMatch);
final MetricDefinitionAndTenantId diffTenantId =
new MetricDefinitionAndTenantId(metricDef, "Different");
matcher.add(diffTenantId);
verifyNoMatch(toMatch);
matcher.add(toMatch);
verifyMatch(toMatch, toMatch);
matcher.add(toMatch);
verifyMatch(toMatch, toMatch);
final MetricDefinitionAndTenantId noMatchOnName = new MetricDefinitionAndTenantId(
new MetricDefinition("NotCpu", dimensions), tenantId);
verifyNoMatch(noMatchOnName);
final MetricDefinitionAndTenantId noMatchOnName =
new MetricDefinitionAndTenantId(new MetricDefinition("NotCpu", dimensions), tenantId);
verifyNoMatch(noMatchOnName);
final Map<String, String> hostDimensions = new HashMap<>(dimensions);
hostDimensions.put(HOST, "OtherHost");
final MetricDefinitionAndTenantId noMatchOnDimensions = new MetricDefinitionAndTenantId(
new MetricDefinition(CPU_METRIC_NAME, hostDimensions), tenantId);
verifyNoMatch(noMatchOnDimensions);
final Map<String, String> hostDimensions = new HashMap<>(dimensions);
hostDimensions.put(HOST, "OtherHost");
final MetricDefinitionAndTenantId noMatchOnDimensions =
new MetricDefinitionAndTenantId(new MetricDefinition(CPU_METRIC_NAME, hostDimensions),
tenantId);
verifyNoMatch(noMatchOnDimensions);
matcher.remove(toMatch);
verifyNoMatch(toMatch);
matcher.remove(diffTenantId);
assertTrue(matcher.isEmpty());
}
matcher.remove(toMatch);
verifyNoMatch(toMatch);
matcher.remove(diffTenantId);
assertTrue(matcher.isEmpty());
}
private void verifyNoMatch(final MetricDefinitionAndTenantId toMatch) {
verifyMatch(toMatch);
}
private void verifyNoMatch(final MetricDefinitionAndTenantId toMatch) {
verifyMatch(toMatch);
}
private void verifyMatch(final MetricDefinitionAndTenantId toMatch,
final MetricDefinitionAndTenantId ... expected) {
final List<MetricDefinitionAndTenantId> matches = matcher.match(toMatch);
assertEqualsNoOrder(matches.toArray(), expected);
}
private void verifyMatch(final MetricDefinitionAndTenantId toMatch,
final MetricDefinitionAndTenantId... expected) {
final List<MetricDefinitionAndTenantId> matches = matcher.match(toMatch);
assertEqualsNoOrder(matches.toArray(), expected);
}
public void shouldFind() {
assertTrue(matcher.isEmpty());
final MetricDefinitionAndTenantId toMatch = new MetricDefinitionAndTenantId(metricDef, tenantId);
public void shouldFind() {
assertTrue(matcher.isEmpty());
final MetricDefinitionAndTenantId toMatch =
new MetricDefinitionAndTenantId(metricDef, tenantId);
final Map<String, String> nullDimensions = new HashMap<>(dimensions);
nullDimensions.put(HOST, null);
final MetricDefinitionAndTenantId nullMatch = new MetricDefinitionAndTenantId(
new MetricDefinition(CPU_METRIC_NAME, nullDimensions), tenantId);
matcher.add(nullMatch);
verifyMatch(nullMatch, nullMatch);
final Map<String, String> nullDimensions = new HashMap<>(dimensions);
nullDimensions.put(HOST, null);
final MetricDefinitionAndTenantId nullMatch =
new MetricDefinitionAndTenantId(new MetricDefinition(CPU_METRIC_NAME, nullDimensions),
tenantId);
matcher.add(nullMatch);
verifyMatch(nullMatch, nullMatch);
final Map<String, String> noDimensions = new HashMap<>();
final MetricDefinitionAndTenantId noMatch = new MetricDefinitionAndTenantId(
new MetricDefinition(CPU_METRIC_NAME, noDimensions), tenantId);
matcher.add(noMatch);
verifyMatch(noMatch, noMatch);
final Map<String, String> noDimensions = new HashMap<>();
final MetricDefinitionAndTenantId noMatch =
new MetricDefinitionAndTenantId(new MetricDefinition(CPU_METRIC_NAME, noDimensions),
tenantId);
matcher.add(noMatch);
verifyMatch(noMatch, noMatch);
final Map<String, String> hostDimensions = new HashMap<>();
hostDimensions.put(HOST, dimensions.get(HOST));
final MetricDefinitionAndTenantId hostMatch = new MetricDefinitionAndTenantId(
new MetricDefinition(CPU_METRIC_NAME, hostDimensions), tenantId);
matcher.add(hostMatch);
final Map<String, String> hostDimensions = new HashMap<>();
hostDimensions.put(HOST, dimensions.get(HOST));
final MetricDefinitionAndTenantId hostMatch =
new MetricDefinitionAndTenantId(new MetricDefinition(CPU_METRIC_NAME, hostDimensions),
tenantId);
matcher.add(hostMatch);
final Map<String, String> groupDimensions = new HashMap<>();
groupDimensions.put(LOAD_BALANCER_GROUP, dimensions.get(LOAD_BALANCER_GROUP));
final MetricDefinitionAndTenantId groupMatch = new MetricDefinitionAndTenantId(
new MetricDefinition(CPU_METRIC_NAME, groupDimensions), tenantId);
matcher.add(groupMatch);
final Map<String, String> groupDimensions = new HashMap<>();
groupDimensions.put(LOAD_BALANCER_GROUP, dimensions.get(LOAD_BALANCER_GROUP));
final MetricDefinitionAndTenantId groupMatch =
new MetricDefinitionAndTenantId(new MetricDefinition(CPU_METRIC_NAME, groupDimensions),
tenantId);
matcher.add(groupMatch);
verifyMatch(toMatch, noMatch, hostMatch, groupMatch);
verifyMatch(toMatch, noMatch, hostMatch, groupMatch);
matcher.add(toMatch);
verifyMatch(toMatch, noMatch, hostMatch, groupMatch, toMatch);
matcher.add(toMatch);
verifyMatch(toMatch, noMatch, hostMatch, groupMatch, toMatch);
matcher.remove(groupMatch);
verifyMatch(toMatch, noMatch, hostMatch, toMatch);
matcher.remove(groupMatch);
verifyMatch(toMatch, noMatch, hostMatch, toMatch);
matcher.remove(noMatch);
verifyMatch(toMatch, hostMatch, toMatch);
matcher.remove(noMatch);
verifyMatch(toMatch, hostMatch, toMatch);
matcher.remove(toMatch);
verifyMatch(toMatch, hostMatch);
matcher.remove(toMatch);
verifyMatch(toMatch, hostMatch);
// Remove it again to ensure it won't throw an exception if the MetricDefinitionAndTenantId
// doesn't exist
matcher.remove(toMatch);
// Remove it again to ensure it won't throw an exception if the MetricDefinitionAndTenantId
// doesn't exist
matcher.remove(toMatch);
final MetricDefinitionAndTenantId loadMetric = new MetricDefinitionAndTenantId(
new MetricDefinition("load", new HashMap<String, String>(dimensions)), tenantId);
matcher.add(loadMetric);
final MetricDefinitionAndTenantId loadMetric =
new MetricDefinitionAndTenantId(new MetricDefinition("load", new HashMap<String, String>(
dimensions)), tenantId);
matcher.add(loadMetric);
matcher.remove(hostMatch);
verifyNoMatch(toMatch);
matcher.remove(hostMatch);
verifyNoMatch(toMatch);
// Remove it again to ensure it won't throw an exception if the MetricDefinitionAndTenantId
// doesn't exist
matcher.remove(hostMatch);
// Remove it again to ensure it won't throw an exception if the MetricDefinitionAndTenantId
// doesn't exist
matcher.remove(hostMatch);
matcher.remove(loadMetric);
matcher.remove(nullMatch);
assertTrue(matcher.isEmpty());
verifyNoMatch(toMatch);
}
matcher.remove(loadMetric);
matcher.remove(nullMatch);
assertTrue(matcher.isEmpty());
verifyNoMatch(toMatch);
}
public void shouldCreatePossiblePairs() {
final Map<String, String> dimensions = new HashMap<>();
DimensionSet[] actual = matcher.createPossibleDimensionPairs(new MetricDefinition(CPU_METRIC_NAME, dimensions));
DimensionSet[] expected = { new DimensionSet() };
assertEqualsNoOrder(actual, expected);
public void shouldCreatePossiblePairs() {
final Map<String, String> dimensions = new HashMap<>();
DimensionSet[] actual =
matcher.createPossibleDimensionPairs(new MetricDefinition(CPU_METRIC_NAME, dimensions));
DimensionSet[] expected = {new DimensionSet()};
assertEqualsNoOrder(actual, expected);
dimensions.put("1", "a");
actual = matcher.createPossibleDimensionPairs(new MetricDefinition(CPU_METRIC_NAME, dimensions));
expected = new DimensionSet[] { new DimensionSet(), new DimensionSet(new DimensionPair("1", "a")) };
assertEqualsNoOrder(actual, expected);
dimensions.put("1", "a");
actual =
matcher.createPossibleDimensionPairs(new MetricDefinition(CPU_METRIC_NAME, dimensions));
expected =
new DimensionSet[] {new DimensionSet(), new DimensionSet(new DimensionPair("1", "a"))};
assertEqualsNoOrder(actual, expected);
dimensions.put("2", "b");
actual = matcher.createPossibleDimensionPairs(new MetricDefinition(CPU_METRIC_NAME, dimensions));
expected = new DimensionSet[] { new DimensionSet(), new DimensionSet(new DimensionPair("1", "a")),
new DimensionSet(new DimensionPair("2", "b")),
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("2", "b")) };
assertEqualsNoOrder(actual, expected);
dimensions.put("2", "b");
actual =
matcher.createPossibleDimensionPairs(new MetricDefinition(CPU_METRIC_NAME, dimensions));
expected =
new DimensionSet[] {new DimensionSet(), new DimensionSet(new DimensionPair("1", "a")),
new DimensionSet(new DimensionPair("2", "b")),
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("2", "b"))};
assertEqualsNoOrder(actual, expected);
dimensions.put("3", "c");
actual = matcher.createPossibleDimensionPairs(new MetricDefinition(CPU_METRIC_NAME, dimensions));
expected = new DimensionSet[] { new DimensionSet(),
new DimensionSet(new DimensionPair("1", "a")),
new DimensionSet(new DimensionPair("2", "b")),
new DimensionSet(new DimensionPair("3", "c")),
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("2", "b")),
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("3", "c")),
new DimensionSet(new DimensionPair("2", "b"), new DimensionPair("3", "c")),
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("2", "b"), new DimensionPair("3", "c"))
};
dimensions.put("3", "c");
actual =
matcher.createPossibleDimensionPairs(new MetricDefinition(CPU_METRIC_NAME, dimensions));
expected =
new DimensionSet[] {
new DimensionSet(),
new DimensionSet(new DimensionPair("1", "a")),
new DimensionSet(new DimensionPair("2", "b")),
new DimensionSet(new DimensionPair("3", "c")),
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("2", "b")),
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("3", "c")),
new DimensionSet(new DimensionPair("2", "b"), new DimensionPair("3", "c")),
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("2", "b"),
new DimensionPair("3", "c"))};
dimensions.put("4", "d");
actual = matcher.createPossibleDimensionPairs(new MetricDefinition(CPU_METRIC_NAME, dimensions));
expected = new DimensionSet[] { new DimensionSet(),
new DimensionSet(new DimensionPair("1", "a")),
new DimensionSet(new DimensionPair("2", "b")),
new DimensionSet(new DimensionPair("3", "c")),
new DimensionSet(new DimensionPair("4", "d")),
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("2", "b")),
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("3", "c")),
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("4", "d")),
new DimensionSet(new DimensionPair("2", "b"), new DimensionPair("3", "c")),
new DimensionSet(new DimensionPair("2", "b"), new DimensionPair("4", "d")),
new DimensionSet(new DimensionPair("3", "c"), new DimensionPair("4", "d")),
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("2", "b"), new DimensionPair("3", "c")),
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("2", "b"), new DimensionPair("4", "d")),
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("3", "c"), new DimensionPair("4", "d")),
new DimensionSet(new DimensionPair("2", "b"), new DimensionPair("3", "c"), new DimensionPair("4", "d")),
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("2", "b"), new DimensionPair("3", "c"), new DimensionPair("4", "d"))
};
assertEqualsNoOrder(actual, expected);
}
dimensions.put("4", "d");
actual =
matcher.createPossibleDimensionPairs(new MetricDefinition(CPU_METRIC_NAME, dimensions));
expected =
new DimensionSet[] {
new DimensionSet(),
new DimensionSet(new DimensionPair("1", "a")),
new DimensionSet(new DimensionPair("2", "b")),
new DimensionSet(new DimensionPair("3", "c")),
new DimensionSet(new DimensionPair("4", "d")),
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("2", "b")),
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("3", "c")),
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("4", "d")),
new DimensionSet(new DimensionPair("2", "b"), new DimensionPair("3", "c")),
new DimensionSet(new DimensionPair("2", "b"), new DimensionPair("4", "d")),
new DimensionSet(new DimensionPair("3", "c"), new DimensionPair("4", "d")),
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("2", "b"),
new DimensionPair("3", "c")),
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("2", "b"),
new DimensionPair("4", "d")),
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("3", "c"),
new DimensionPair("4", "d")),
new DimensionSet(new DimensionPair("2", "b"), new DimensionPair("3", "c"),
new DimensionPair("4", "d")),
new DimensionSet(new DimensionPair("1", "a"), new DimensionPair("2", "b"),
new DimensionPair("3", "c"), new DimensionPair("4", "d"))};
assertEqualsNoOrder(actual, expected);
}
}

View File

@ -14,18 +14,19 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.domain.model;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertTrue;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import com.hpcloud.mon.common.model.alarm.AlarmState;
import com.hpcloud.mon.common.model.alarm.AlarmSubExpression;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
@Test
public class SubAlarmStatsTest {
private AlarmSubExpression expression;
@ -43,7 +44,7 @@ public class SubAlarmStatsTest {
public void shouldBeOkIfAnySlotsInViewAreBelowThreshold() {
subAlarmStats.getStats().addValue(5, 1);
assertFalse(subAlarmStats.evaluateAndSlideWindow(61));
assertEquals(subAlarmStats.getSubAlarm().getState(), AlarmState.UNDETERMINED);
assertEquals(subAlarmStats.getSubAlarm().getState(), AlarmState.UNDETERMINED);
subAlarmStats.getStats().addValue(1, 62);
assertTrue(subAlarmStats.evaluateAndSlideWindow(121));
@ -106,8 +107,9 @@ public class SubAlarmStatsTest {
// equivalent to the behavior in CloudWatch for an alarm with 3 evaluation periods. 2 more
// slides to move the value outside of the window and 6 more to exceed the observation
// threshold.
for (int i = 0; i < 7; i++)
for (int i = 0; i < 7; i++) {
assertFalse(subAlarmStats.evaluateAndSlideWindow(initialTime += 60));
}
assertTrue(subAlarmStats.evaluateAndSlideWindow(initialTime += 60));
assertEquals(subAlarmStats.getSubAlarm().getState(), AlarmState.UNDETERMINED);
subAlarmStats.getStats().addValue(5, initialTime - 1);
@ -139,10 +141,11 @@ public class SubAlarmStatsTest {
}
public void checkLongPeriod() {
final AlarmSubExpression subExpr = AlarmSubExpression.of("sum(hpcs.compute.mem{id=5}, 120) >= 96");
final AlarmSubExpression subExpr =
AlarmSubExpression.of("sum(hpcs.compute.mem{id=5}, 120) >= 96");
final SubAlarm subAlarm = new SubAlarm("42", "4242", subExpr);
long t1 = 0;
final SubAlarmStats stats = new SubAlarmStats(subAlarm, t1 + subExpr.getPeriod());
for (int i = 0; i < 360; i++) {
@ -150,13 +153,14 @@ public class SubAlarmStatsTest {
stats.getStats().addValue(1.0, t1);
if ((t1 % 60) == 0) {
stats.evaluateAndSlideWindow(t1);
if (i <= 60)
// First check will show it is OK. You could argue that this is incorrect
// as we have not waited for the whole period so we can't really evaluate it.
// That is true for sum and count
assertEquals(stats.getSubAlarm().getState(), AlarmState.OK);
else
assertEquals(stats.getSubAlarm().getState(), AlarmState.ALARM);
if (i <= 60) {
// First check will show it is OK. You could argue that this is incorrect
// as we have not waited for the whole period so we can't really evaluate it.
// That is true for sum and count
assertEquals(stats.getSubAlarm().getState(), AlarmState.OK);
} else {
assertEquals(stats.getSubAlarm().getState(), AlarmState.ALARM);
}
}
}
}

View File

@ -14,12 +14,19 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.persistence;
import static org.testng.Assert.assertEquals;
import java.nio.charset.Charset;
import java.util.Arrays;
import com.hpcloud.mon.common.model.alarm.AlarmExpression;
import com.hpcloud.mon.common.model.alarm.AlarmState;
import com.hpcloud.mon.common.model.alarm.AlarmSubExpression;
import com.hpcloud.mon.domain.model.Alarm;
import com.hpcloud.mon.domain.model.SubAlarm;
import com.hpcloud.mon.domain.service.AlarmDAO;
import com.google.common.io.Resources;
import org.skife.jdbi.v2.DBI;
import org.skife.jdbi.v2.Handle;
@ -28,13 +35,8 @@ import org.testng.annotations.BeforeClass;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import com.google.common.io.Resources;
import com.hpcloud.mon.common.model.alarm.AlarmExpression;
import com.hpcloud.mon.common.model.alarm.AlarmState;
import com.hpcloud.mon.common.model.alarm.AlarmSubExpression;
import com.hpcloud.mon.domain.model.Alarm;
import com.hpcloud.mon.domain.model.SubAlarm;
import com.hpcloud.mon.domain.service.AlarmDAO;
import java.nio.charset.Charset;
import java.util.Arrays;
@Test
public class AlarmDAOImplTest {
@ -52,7 +54,8 @@ public class AlarmDAOImplTest {
protected void setupClass() throws Exception {
db = new DBI("jdbc:h2:mem:test;MODE=MySQL");
handle = db.open();
handle.execute(Resources.toString(getClass().getResource("alarm.sql"), Charset.defaultCharset()));
handle
.execute(Resources.toString(getClass().getResource("alarm.sql"), Charset.defaultCharset()));
dao = new AlarmDAOImpl(db);
}
@ -68,12 +71,16 @@ public class AlarmDAOImplTest {
handle.execute("truncate table sub_alarm_dimension");
handle.execute("truncate table alarm_action");
String sql = String.format("insert into alarm (id, tenant_id, name, description, expression, state, actions_enabled, created_at, updated_at) "
+ "values ('%s', '%s', '%s', '%s', 'avg(hpcs.compute{disk=vda, instance_id=123, metric_name=cpu}) > 10', 'UNDETERMINED', %d, NOW(), NOW())",
ALARM_ID, TENANT_ID, ALARM_NAME, ALARM_DESCR, ALARM_ENABLED ? 1 : 0);
String sql =
String
.format(
"insert into alarm (id, tenant_id, name, description, expression, state, actions_enabled, created_at, updated_at) "
+ "values ('%s', '%s', '%s', '%s', 'avg(hpcs.compute{disk=vda, instance_id=123, metric_name=cpu}) > 10', 'UNDETERMINED', %d, NOW(), NOW())",
ALARM_ID, TENANT_ID, ALARM_NAME, ALARM_DESCR, ALARM_ENABLED ? 1 : 0);
handle.execute(sql);
handle.execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, created_at, updated_at) "
+ "values ('111', '123', 'AVG', 'hpcs.compute', 'GT', 10, 60, 1, NOW(), NOW())");
handle
.execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, created_at, updated_at) "
+ "values ('111', '123', 'AVG', 'hpcs.compute', 'GT', 10, 60, 1, NOW(), NOW())");
handle.execute("insert into sub_alarm_dimension values ('111', 'instance_id', '123')");
handle.execute("insert into sub_alarm_dimension values ('111', 'disk', 'vda')");
handle.execute("insert into sub_alarm_dimension values ('111', 'metric_name', 'cpu')");
@ -83,9 +90,10 @@ public class AlarmDAOImplTest {
public void shouldFindById() {
String expr = "avg(hpcs.compute{disk=vda, instance_id=123, metric_name=cpu}) > 10";
Alarm expected = new Alarm(ALARM_ID, TENANT_ID, ALARM_NAME, ALARM_DESCR, AlarmExpression.of(expr),
Arrays.asList(new SubAlarm("111", ALARM_ID, AlarmSubExpression.of(expr))),
AlarmState.UNDETERMINED, Boolean.TRUE);
Alarm expected =
new Alarm(ALARM_ID, TENANT_ID, ALARM_NAME, ALARM_DESCR, AlarmExpression.of(expr),
Arrays.asList(new SubAlarm("111", ALARM_ID, AlarmSubExpression.of(expr))),
AlarmState.UNDETERMINED, Boolean.TRUE);
Alarm alarm = dao.findById(ALARM_ID);

View File

@ -14,6 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.persistence;
import static org.testng.Assert.assertTrue;
@ -36,8 +37,8 @@ import com.hpcloud.mon.domain.service.MetricDefinitionDAO;
import com.hpcloud.mon.domain.service.SubAlarmMetricDefinition;
/**
* Note: MySQL dependent test because of the group_concat() used in the SQL in MetricDefinitionDAOImpl.
* Depends on the MySQL in mini-mon.
* Note: MySQL dependent test because of the group_concat() used in the SQL in
* MetricDefinitionDAOImpl. Depends on the MySQL in mini-mon.
*/
@Test(groups = "database")
public class MetricDefinitionDAOImplTest {
@ -64,38 +65,39 @@ public class MetricDefinitionDAOImplTest {
protected void beforeMethod() {
cleanUp();
handle.execute("insert into alarm (id, tenant_id, name, description, expression, state, created_at, updated_at) "
+ "values ('123', '" + TENANT_ID + "', 'Test Alarm', 'Test Alarm Description', 'Not real expr', 'OK', NOW(), NOW())");
handle
.execute("insert into alarm (id, tenant_id, name, description, expression, state, created_at, updated_at) "
+ "values ('123', '"
+ TENANT_ID
+ "', 'Test Alarm', 'Test Alarm Description', 'Not real expr', 'OK', NOW(), NOW())");
handle.execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, state, created_at, updated_at) "
+ "values ('111', '123', 'AVG', 'cpu', 'GT', 10, 60, 1, 'OK', NOW(), NOW())");
handle
.execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, state, created_at, updated_at) "
+ "values ('111', '123', 'AVG', 'cpu', 'GT', 10, 60, 1, 'OK', NOW(), NOW())");
handle.execute("insert into sub_alarm_dimension values ('111', 'device', '1')");
handle.execute("insert into sub_alarm_dimension values ('111', 'instance_id', '777')");
handle.execute("insert into sub_alarm_dimension values ('111', 'image_id', '888')");
handle.execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, state, created_at, updated_at) "
+ "values ('222', '123', 'AVG', 'mem', 'GT', 10, 60, 1, 'OK', NOW(), NOW())");
handle
.execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, state, created_at, updated_at) "
+ "values ('222', '123', 'AVG', 'mem', 'GT', 10, 60, 1, 'OK', NOW(), NOW())");
handle.execute("insert into sub_alarm_dimension values ('222', 'instance_id', '123')");
handle.execute("insert into sub_alarm_dimension values ('222', 'az', '2')");
handle.execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, state, created_at, updated_at) "
+ "values ('333', '123', 'AVG', 'bar', 'GT', 10, 60, 1, 'OK', NOW(), NOW())");
SubAlarmMetricDefinition metricDef1 = new SubAlarmMetricDefinition("111",
new MetricDefinitionAndTenantId(new MetricDefinition("cpu",
ImmutableMap.<String, String>builder()
.put("device", "1")
.put("instance_id", "777")
.put("image_id", "888")
.build()), TENANT_ID));
SubAlarmMetricDefinition metricDef2 = new SubAlarmMetricDefinition("222",
new MetricDefinitionAndTenantId(new MetricDefinition("mem",
ImmutableMap.<String, String>builder()
.put("az", "2")
.put("instance_id", "123")
.build()), TENANT_ID));
SubAlarmMetricDefinition metricDef3 = new SubAlarmMetricDefinition("333",
new MetricDefinitionAndTenantId(new MetricDefinition("bar",
null), TENANT_ID));
handle
.execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, state, created_at, updated_at) "
+ "values ('333', '123', 'AVG', 'bar', 'GT', 10, 60, 1, 'OK', NOW(), NOW())");
SubAlarmMetricDefinition metricDef1 =
new SubAlarmMetricDefinition("111", new MetricDefinitionAndTenantId(new MetricDefinition(
"cpu", ImmutableMap.<String, String>builder().put("device", "1")
.put("instance_id", "777").put("image_id", "888").build()), TENANT_ID));
SubAlarmMetricDefinition metricDef2 =
new SubAlarmMetricDefinition("222", new MetricDefinitionAndTenantId(new MetricDefinition(
"mem", ImmutableMap.<String, String>builder().put("az", "2").put("instance_id", "123")
.build()), TENANT_ID));
SubAlarmMetricDefinition metricDef3 =
new SubAlarmMetricDefinition("333", new MetricDefinitionAndTenantId(new MetricDefinition(
"bar", null), TENANT_ID));
expected = Arrays.asList(metricDef1, metricDef2, metricDef3);
}
@ -109,15 +111,17 @@ public class MetricDefinitionDAOImplTest {
List<SubAlarmMetricDefinition> found = dao.findForAlarms();
for (final SubAlarmMetricDefinition toFind : expected)
for (final SubAlarmMetricDefinition toFind : expected) {
assertTrue(found.contains(toFind), "Did not find " + toFind);
}
}
public void shouldNotFindDeletedAlarms() {
handle.execute("update alarm set deleted_at=NOW() where id in ('123')");
handle.execute("update alarm set deleted_at=NOW() where id in ('123')");
List<SubAlarmMetricDefinition> found = dao.findForAlarms();
for (final SubAlarmMetricDefinition toFind : expected)
assertFalse(found.contains(toFind), "Should not have found " + toFind);
List<SubAlarmMetricDefinition> found = dao.findForAlarms();
for (final SubAlarmMetricDefinition toFind : expected) {
assertFalse(found.contains(toFind), "Should not have found " + toFind);
}
}
}

View File

@ -14,13 +14,19 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.persistence;
import static org.testng.Assert.assertEquals;
import java.nio.charset.Charset;
import java.util.Arrays;
import java.util.List;
import com.hpcloud.mon.common.model.alarm.AlarmState;
import com.hpcloud.mon.common.model.alarm.AlarmSubExpression;
import com.hpcloud.mon.common.model.metric.MetricDefinition;
import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantId;
import com.hpcloud.mon.domain.model.SubAlarm;
import com.hpcloud.mon.domain.service.SubAlarmDAO;
import com.google.common.io.Resources;
import org.skife.jdbi.v2.DBI;
import org.skife.jdbi.v2.Handle;
@ -29,13 +35,9 @@ import org.testng.annotations.BeforeClass;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import com.google.common.io.Resources;
import com.hpcloud.mon.common.model.alarm.AlarmState;
import com.hpcloud.mon.common.model.alarm.AlarmSubExpression;
import com.hpcloud.mon.common.model.metric.MetricDefinition;
import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantId;
import com.hpcloud.mon.domain.model.SubAlarm;
import com.hpcloud.mon.domain.service.SubAlarmDAO;
import java.nio.charset.Charset;
import java.util.Arrays;
import java.util.List;
@Test
public class SubAlarmDAOImplTest {
@ -48,7 +50,8 @@ public class SubAlarmDAOImplTest {
protected void setupClass() throws Exception {
db = new DBI("jdbc:h2:mem:test;MODE=MySQL");
handle = db.open();
handle.execute(Resources.toString(getClass().getResource("alarm.sql"), Charset.defaultCharset()));
handle
.execute(Resources.toString(getClass().getResource("alarm.sql"), Charset.defaultCharset()));
dao = new SubAlarmDAOImpl(db);
}
@ -64,75 +67,101 @@ public class SubAlarmDAOImplTest {
handle.execute("truncate table sub_alarm_dimension");
// These don't have the real Alarm expression because it doesn't matter for this test
handle.execute("insert into alarm (id, tenant_id, name, description, expression, state, created_at, updated_at) "
+ "values ('123', '" + TENANT_ID + "', 'Test Alarm', 'Test Alarm Description', 'Not real expr', 'OK', NOW(), NOW())");
handle.execute("insert into alarm (id, tenant_id, name, description, expression, state, created_at, updated_at) "
+ "values ('234', '" + TENANT_ID + "', 'Test Alarm2', 'Test Alarm2 Description', 'Not real expr', 'OK', NOW(), NOW())");
handle.execute("insert into alarm (id, tenant_id, name, description, expression, state, created_at, updated_at) "
+ "values ('345', '" + TENANT_ID + "', 'Test Alarm3', 'Test Alarm3 Description', 'Not real expr', 'OK', NOW(), NOW())");
handle.execute("insert into alarm (id, tenant_id, name, description, expression, state, created_at, updated_at) "
+ "values ('456', '" + TENANT_ID + "', 'Test Alarm4', 'Test Alarm4 Description', 'Not real expr', 'OK', NOW(), NOW())");
handle
.execute("insert into alarm (id, tenant_id, name, description, expression, state, created_at, updated_at) "
+ "values ('123', '"
+ TENANT_ID
+ "', 'Test Alarm', 'Test Alarm Description', 'Not real expr', 'OK', NOW(), NOW())");
handle
.execute("insert into alarm (id, tenant_id, name, description, expression, state, created_at, updated_at) "
+ "values ('234', '"
+ TENANT_ID
+ "', 'Test Alarm2', 'Test Alarm2 Description', 'Not real expr', 'OK', NOW(), NOW())");
handle
.execute("insert into alarm (id, tenant_id, name, description, expression, state, created_at, updated_at) "
+ "values ('345', '"
+ TENANT_ID
+ "', 'Test Alarm3', 'Test Alarm3 Description', 'Not real expr', 'OK', NOW(), NOW())");
handle
.execute("insert into alarm (id, tenant_id, name, description, expression, state, created_at, updated_at) "
+ "values ('456', '"
+ TENANT_ID
+ "', 'Test Alarm4', 'Test Alarm4 Description', 'Not real expr', 'OK', NOW(), NOW())");
handle.execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, created_at, updated_at) "
+ "values ('111', '123', 'AVG', 'cpu', 'GT', 10, 60, 1, NOW(), NOW())");
handle
.execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, created_at, updated_at) "
+ "values ('111', '123', 'AVG', 'cpu', 'GT', 10, 60, 1, NOW(), NOW())");
handle.execute("insert into sub_alarm_dimension values ('111', 'instance_id', '555')");
handle.execute("insert into sub_alarm_dimension values ('111', 'az', '1')");
handle.execute("insert into sub_alarm_dimension values ('111', 'instance_uuid', '555')");
handle.execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, created_at, updated_at) "
+ "values ('222', '234', 'AVG', 'cpu', 'GT', 10, 60, 1, NOW(), NOW())");
handle
.execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, created_at, updated_at) "
+ "values ('222', '234', 'AVG', 'cpu', 'GT', 10, 60, 1, NOW(), NOW())");
handle.execute("insert into sub_alarm_dimension values ('222', 'instance_id', '666')");
handle.execute("insert into sub_alarm_dimension values ('222', 'az', '1')");
handle.execute("insert into sub_alarm_dimension values ('222', 'instance_uuid', '666')");
handle.execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, created_at, updated_at) "
+ "values ('333', '345', 'AVG', 'disk', 'GT', 10, 60, 1, NOW(), NOW())");
handle
.execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, created_at, updated_at) "
+ "values ('333', '345', 'AVG', 'disk', 'GT', 10, 60, 1, NOW(), NOW())");
handle.execute("insert into sub_alarm_dimension values ('333', 'instance_id', '777')");
handle.execute("insert into sub_alarm_dimension values ('333', 'az', '1')");
handle.execute("insert into sub_alarm_dimension values ('333', 'instance_uuid', '777')");
handle.execute("insert into sub_alarm_dimension values ('333', 'device', 'vda')");
handle.execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, created_at, updated_at) "
+ "values ('444', '456', 'AVG', 'cpu', 'GT', 10, 60, 1, NOW(), NOW())");
handle
.execute("insert into sub_alarm (id, alarm_id, function, metric_name, operator, threshold, period, periods, created_at, updated_at) "
+ "values ('444', '456', 'AVG', 'cpu', 'GT', 10, 60, 1, NOW(), NOW())");
}
public void shouldFind() {
List<SubAlarm> expected = Arrays.asList(new SubAlarm("111", "123",
AlarmSubExpression.of("avg(cpu{instance_id=555,az=1}) > 10"),
AlarmState.UNDETERMINED));
List<SubAlarm> subAlarms = dao.find(new MetricDefinitionAndTenantId(expected.get(0).getExpression().getMetricDefinition(), TENANT_ID));
List<SubAlarm> expected =
Arrays.asList(new SubAlarm("111", "123", AlarmSubExpression
.of("avg(cpu{instance_id=555,az=1}) > 10"), AlarmState.UNDETERMINED));
List<SubAlarm> subAlarms =
dao.find(new MetricDefinitionAndTenantId(expected.get(0).getExpression()
.getMetricDefinition(), TENANT_ID));
assertEquals(subAlarms, expected);
expected = Arrays.asList(new SubAlarm("222", "234",
AlarmSubExpression.of("avg(cpu{instance_id=666,az=1}) > 10"),
AlarmState.UNDETERMINED));
subAlarms = dao.find(new MetricDefinitionAndTenantId(expected.get(0).getExpression().getMetricDefinition(), TENANT_ID));
expected =
Arrays.asList(new SubAlarm("222", "234", AlarmSubExpression
.of("avg(cpu{instance_id=666,az=1}) > 10"), AlarmState.UNDETERMINED));
subAlarms =
dao.find(new MetricDefinitionAndTenantId(expected.get(0).getExpression()
.getMetricDefinition(), TENANT_ID));
assertEquals(subAlarms, expected);
}
public void shouldNotFind() {
final String badTenantId = TENANT_ID + "42";
List<SubAlarm> subAlarms = dao.find(new MetricDefinitionAndTenantId(AlarmSubExpression.of("avg(cpu{instance_id=555,az=1}) > 10").getMetricDefinition(), badTenantId));
List<SubAlarm> subAlarms =
dao.find(new MetricDefinitionAndTenantId(AlarmSubExpression.of(
"avg(cpu{instance_id=555,az=1}) > 10").getMetricDefinition(), badTenantId));
assertEquals(subAlarms.size(), 0);
subAlarms = dao.find(new MetricDefinitionAndTenantId(AlarmSubExpression.of("avg(cpu{instance_id=666,az=1}) > 10").getMetricDefinition(), badTenantId));
subAlarms =
dao.find(new MetricDefinitionAndTenantId(AlarmSubExpression.of(
"avg(cpu{instance_id=666,az=1}) > 10").getMetricDefinition(), badTenantId));
assertEquals(subAlarms.size(), 0);
}
public void shouldFindWithSubject() {
List<SubAlarm> expected = Arrays.asList(new SubAlarm(
"333",
"345",
AlarmSubExpression.of("avg(disk{instance_id=777,az=1,device=vda}) > 10"),
AlarmState.UNDETERMINED));
List<SubAlarm> subAlarms = dao.find(new MetricDefinitionAndTenantId(expected.get(0).getExpression().getMetricDefinition(), TENANT_ID));
List<SubAlarm> expected =
Arrays.asList(new SubAlarm("333", "345", AlarmSubExpression
.of("avg(disk{instance_id=777,az=1,device=vda}) > 10"), AlarmState.UNDETERMINED));
List<SubAlarm> subAlarms =
dao.find(new MetricDefinitionAndTenantId(expected.get(0).getExpression()
.getMetricDefinition(), TENANT_ID));
assertEquals(subAlarms, expected);
}
public void shouldFindForNullDimensions() {
List<SubAlarm> expected = Arrays.asList(new SubAlarm("444", "456",
AlarmSubExpression.of("avg(cpu{}) > 10"), AlarmState.UNDETERMINED));
List<SubAlarm> subAlarms = dao.find(new MetricDefinitionAndTenantId(new MetricDefinition("cpu", null), TENANT_ID));
List<SubAlarm> expected =
Arrays.asList(new SubAlarm("444", "456", AlarmSubExpression.of("avg(cpu{}) > 10"),
AlarmState.UNDETERMINED));
List<SubAlarm> subAlarms =
dao.find(new MetricDefinitionAndTenantId(new MetricDefinition("cpu", null), TENANT_ID));
assertEquals(subAlarms, expected);
}
}

View File

@ -14,30 +14,15 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.thresholding;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static org.mockito.Mockito.times;
import static org.testng.Assert.assertEquals;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import backtype.storm.Testing;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.testing.MkTupleParam;
import backtype.storm.tuple.Tuple;
import com.hpcloud.mon.ThresholdingConfiguration;
import com.hpcloud.mon.common.event.AlarmUpdatedEvent;
import com.hpcloud.mon.common.model.alarm.AlarmExpression;
@ -48,208 +33,240 @@ import com.hpcloud.mon.domain.model.SubAlarm;
import com.hpcloud.mon.domain.service.AlarmDAO;
import com.hpcloud.streaming.storm.Streams;
import backtype.storm.Testing;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.testing.MkTupleParam;
import backtype.storm.tuple.Tuple;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
@Test
public class AlarmThresholdingBoltTest {
private static final String ALERT_ROUTING_KEY = "Alert Routing Key";
private static final String ALERTS_EXCHANGE = "Alerts";
private static final String tenantId = "AAAAABBBBBBCCCCC";
private static final String ALERT_ROUTING_KEY = "Alert Routing Key";
private static final String ALERTS_EXCHANGE = "Alerts";
private static final String tenantId = "AAAAABBBBBBCCCCC";
private AlarmExpression alarmExpression;
private Alarm alarm;
private List<SubAlarm> subAlarms;
private AlarmExpression alarmExpression;
private Alarm alarm;
private List<SubAlarm> subAlarms;
private AlarmEventForwarder alarmEventForwarder;
private AlarmDAO alarmDAO;
private AlarmThresholdingBolt bolt;
private OutputCollector collector;
private final String[] subExpressions = {
"avg(cpu{instance_id=123,device=42}, 1) > 5",
"max(load{instance_id=123,device=42}, 1) > 8",
"sum(diskio{instance_id=123,device=42}, 1) > 5000" };
private AlarmEventForwarder alarmEventForwarder;
private AlarmDAO alarmDAO;
private AlarmThresholdingBolt bolt;
private OutputCollector collector;
private final String[] subExpressions = {"avg(cpu{instance_id=123,device=42}, 1) > 5",
"max(load{instance_id=123,device=42}, 1) > 8",
"sum(diskio{instance_id=123,device=42}, 1) > 5000"};
@BeforeMethod
protected void beforeMethod() {
final String alarmId = "111111112222222222233333333334";
final StringBuilder builder = new StringBuilder();
for (final String subExpression : subExpressions) {
if (builder.length() > 0)
builder.append(" or ");
builder.append(subExpression);
}
final String expression = builder.toString();
alarm = new Alarm();
alarm.setName("Test CPU Alarm");
alarm.setDescription("Description of Alarm");
alarm.setTenantId(tenantId);
alarm.setId(alarmId);
alarm.setExpression(expression);
alarm.setState(AlarmState.OK);
alarmExpression = new AlarmExpression(expression);
final List<AlarmSubExpression> subExpressions = alarmExpression.getSubExpressions();
subAlarms = new ArrayList<SubAlarm>(subExpressions.size());
for (int i = 0; i < subExpressions.size(); i++) {
final SubAlarm subAlarm = new SubAlarm(UUID.randomUUID().toString(), alarmId, subExpressions.get(i));
subAlarms.add(subAlarm);
}
alarm.setSubAlarms(subAlarms);
@BeforeMethod
protected void beforeMethod() {
final String alarmId = "111111112222222222233333333334";
final StringBuilder builder = new StringBuilder();
for (final String subExpression : subExpressions) {
if (builder.length() > 0) {
builder.append(" or ");
}
builder.append(subExpression);
}
final String expression = builder.toString();
alarm = new Alarm();
alarm.setName("Test CPU Alarm");
alarm.setDescription("Description of Alarm");
alarm.setTenantId(tenantId);
alarm.setId(alarmId);
alarm.setExpression(expression);
alarm.setState(AlarmState.OK);
alarmExpression = new AlarmExpression(expression);
final List<AlarmSubExpression> subExpressions = alarmExpression.getSubExpressions();
subAlarms = new ArrayList<SubAlarm>(subExpressions.size());
for (int i = 0; i < subExpressions.size(); i++) {
final SubAlarm subAlarm =
new SubAlarm(UUID.randomUUID().toString(), alarmId, subExpressions.get(i));
subAlarms.add(subAlarm);
}
alarm.setSubAlarms(subAlarms);
alarmEventForwarder = mock(AlarmEventForwarder.class);
alarmDAO = mock(AlarmDAO.class);
bolt = new MockAlarmThreshholdBolt(alarmDAO, alarmEventForwarder);
collector = mock(OutputCollector.class);
final Map<String, String> config = new HashMap<>();
config.put(ThresholdingConfiguration.ALERTS_EXCHANGE, ALERTS_EXCHANGE);
config.put(ThresholdingConfiguration.ALERTS_ROUTING_KEY, ALERT_ROUTING_KEY);
final TopologyContext context = mock(TopologyContext.class);
bolt.prepare(config, context, collector);
alarmEventForwarder = mock(AlarmEventForwarder.class);
alarmDAO = mock(AlarmDAO.class);
bolt = new MockAlarmThreshholdBolt(alarmDAO, alarmEventForwarder);
collector = mock(OutputCollector.class);
final Map<String, String> config = new HashMap<>();
config.put(ThresholdingConfiguration.ALERTS_EXCHANGE, ALERTS_EXCHANGE);
config.put(ThresholdingConfiguration.ALERTS_ROUTING_KEY, ALERT_ROUTING_KEY);
final TopologyContext context = mock(TopologyContext.class);
bolt.prepare(config, context, collector);
}
/**
* Create a simple Alarm with one sub expression. Send a SubAlarm with state set to ALARM. Ensure
* that the Alarm was triggered and sent
*/
public void simpleAlarmCreation() {
final SubAlarm subAlarm = subAlarms.get(0);
final String alarmId = alarm.getId();
when(alarmDAO.findById(alarmId)).thenReturn(alarm);
emitSubAlarmStateChange(alarmId, subAlarm, AlarmState.ALARM);
for (int i = 1; i < subAlarms.size(); i++) {
emitSubAlarmStateChange(alarmId, subAlarms.get(i), AlarmState.OK);
}
final String alarmJson =
"{\"alarm-transitioned\":{\"tenantId\":\""
+ tenantId
+ "\","
+ "\"alarmId\":\"111111112222222222233333333334\",\"alarmName\":\"Test CPU Alarm\","
+ "\"alarmDescription\":\"Description of Alarm\",\"oldState\":\"OK\",\"newState\":\"ALARM\","
+ "\"actionsEnabled\":true,"
+ "\"stateChangeReason\":\"Thresholds were exceeded for the sub-alarms: ["
+ subAlarm.getExpression().getExpression() + "]\"," + "\"timestamp\":1395587091}}";
verify(alarmEventForwarder, times(1)).send(ALERTS_EXCHANGE, ALERT_ROUTING_KEY, alarmJson);
verify(alarmDAO, times(1)).updateState(alarmId, AlarmState.ALARM);
// Now clear the alarm and ensure another notification gets sent out
subAlarm.setState(AlarmState.OK);
final Tuple clearTuple = createSubAlarmStateChangeTuple(alarmId, subAlarm);
bolt.execute(clearTuple);
verify(collector, times(1)).ack(clearTuple);
final String okJson =
"{\"alarm-transitioned\":{\"tenantId\":\""
+ tenantId
+ "\","
+ "\"alarmId\":\"111111112222222222233333333334\",\"alarmName\":\"Test CPU Alarm\","
+ "\"alarmDescription\":\"Description of Alarm\",\"oldState\":\"ALARM\",\"newState\":\"OK\","
+ "\"actionsEnabled\":true,"
+ "\"stateChangeReason\":\"The alarm threshold(s) have not been exceeded\",\"timestamp\":1395587091}}";
verify(alarmEventForwarder, times(1)).send(ALERTS_EXCHANGE, ALERT_ROUTING_KEY, okJson);
verify(alarmDAO, times(1)).updateState(alarmId, AlarmState.OK);
}
public void simpleAlarmUpdate() {
String alarmId = setUpInitialAlarm();
// Now send an AlarmUpdatedEvent
final Map<String, AlarmSubExpression> empty = new HashMap<>();
final String newName = "New Name";
final String newDescription = "New Description";
final AlarmState newState = AlarmState.OK;
boolean newEnabled = false;
final AlarmUpdatedEvent event =
new AlarmUpdatedEvent(tenantId, alarmId, newName, newDescription, alarm
.getAlarmExpression().getExpression(), alarm.getState(), newState, newEnabled, empty,
empty, empty, empty);
final Tuple updateTuple = createAlarmUpdateTuple(event);
bolt.execute(updateTuple);
verify(collector, times(1)).ack(updateTuple);
assertEquals(alarm.getName(), newName);
assertEquals(alarm.getState(), newState);
assertEquals(alarm.isActionsEnabled(), newEnabled);
}
public void complexAlarmUpdate() {
String alarmId = setUpInitialAlarm();
// Now send an AlarmUpdatedEvent
final Map<String, AlarmSubExpression> newSubExpressions = new HashMap<>();
final Map<String, AlarmSubExpression> oldSubExpressions = new HashMap<>();
final Map<String, AlarmSubExpression> changedSubExpressions = new HashMap<>();
final Map<String, AlarmSubExpression> unchangedSubExpressions = new HashMap<>();
final String newExpression =
subExpressions[1] + " or " + subExpressions[2].replace("max", "avg") + " or "
+ "sum(diskio{instance_id=123,device=4242}, 1) > 5000";
final AlarmExpression newAlarmExpression = new AlarmExpression(newExpression);
final SubAlarm newSubAlarm =
new SubAlarm(UUID.randomUUID().toString(), alarmId, newAlarmExpression.getSubExpressions()
.get(2));
newSubExpressions.put(newSubAlarm.getId(), newSubAlarm.getExpression());
final SubAlarm deletedSubAlarm = subAlarms.get(0);
oldSubExpressions.put(deletedSubAlarm.getId(), deletedSubAlarm.getExpression());
final SubAlarm changedSubAlarm =
new SubAlarm(subAlarms.get(2).getId(), alarmId, newAlarmExpression.getSubExpressions().get(
1));
changedSubExpressions.put(changedSubAlarm.getId(), changedSubAlarm.getExpression());
final SubAlarm unChangedSubAlarm =
new SubAlarm(subAlarms.get(1).getId(), alarmId, subAlarms.get(1).getExpression());
unchangedSubExpressions.put(unChangedSubAlarm.getId(), unChangedSubAlarm.getExpression());
emitSubAlarmStateChange(alarmId, changedSubAlarm, AlarmState.OK);
emitSubAlarmStateChange(alarmId, unChangedSubAlarm, AlarmState.OK);
unChangedSubAlarm.setState(AlarmState.OK);
final AlarmUpdatedEvent event =
new AlarmUpdatedEvent(tenantId, alarmId, alarm.getName(), alarm.getDescription(),
newExpression, alarm.getState(), alarm.getState(), alarm.isActionsEnabled(),
oldSubExpressions, changedSubExpressions, unchangedSubExpressions, newSubExpressions);
final Tuple updateTuple = createAlarmUpdateTuple(event);
bolt.execute(updateTuple);
verify(collector, times(1)).ack(updateTuple);
final Alarm changedAlarm = bolt.alarms.get(alarmId);
assertEquals(changedAlarm.getAlarmExpression(), newAlarmExpression);
assertEquals(changedAlarm.getSubAlarms().size(), 3);
assertEquals(changedAlarm.getSubAlarm(unChangedSubAlarm.getId()), unChangedSubAlarm);
assertEquals(changedAlarm.getSubAlarm(newSubAlarm.getId()), newSubAlarm);
changedSubAlarm.setState(AlarmState.OK);
assertEquals(changedAlarm.getSubAlarm(changedSubAlarm.getId()), changedSubAlarm);
assertEquals(changedSubAlarm.isNoState(), false);
}
private String setUpInitialAlarm() {
final String alarmId = alarm.getId();
when(alarmDAO.findById(alarmId)).thenReturn(alarm);
// Load up the original Alarm
emitSubAlarmStateChange(alarmId, subAlarms.get(0), AlarmState.ALARM);
return alarmId;
}
private void emitSubAlarmStateChange(String alarmId, final SubAlarm subAlarm, AlarmState state) {
// Create a copy so changing the state doesn't directly update the ones in the bolt
final SubAlarm toEmit =
new SubAlarm(subAlarm.getId(), subAlarm.getAlarmId(), subAlarm.getExpression());
toEmit.setState(state);
final Tuple tuple = createSubAlarmStateChangeTuple(alarmId, toEmit);
bolt.execute(tuple);
verify(collector, times(1)).ack(tuple);
}
private Tuple createAlarmUpdateTuple(AlarmUpdatedEvent event) {
final MkTupleParam tupleParam = new MkTupleParam();
tupleParam.setFields(EventProcessingBolt.ALARM_EVENT_STREAM_FIELDS);
tupleParam.setStream(EventProcessingBolt.ALARM_EVENT_STREAM_ID);
final Tuple tuple =
Testing.testTuple(Arrays.asList(EventProcessingBolt.UPDATED, event.alarmId, event),
tupleParam);
return tuple;
}
private Tuple createSubAlarmStateChangeTuple(String alarmId, final SubAlarm subAlarm) {
final MkTupleParam tupleParam = new MkTupleParam();
tupleParam.setFields("alarmId", "subAlarm");
tupleParam.setStream(Streams.DEFAULT_STREAM_ID);
final Tuple tuple = Testing.testTuple(Arrays.asList(alarmId, subAlarm), tupleParam);
return tuple;
}
private class MockAlarmThreshholdBolt extends AlarmThresholdingBolt {
private static final long serialVersionUID = 1L;
public MockAlarmThreshholdBolt(AlarmDAO alarmDAO, AlarmEventForwarder alarmEventForwarder) {
super(alarmDAO, alarmEventForwarder);
}
/**
* Create a simple Alarm with one sub expression.
* Send a SubAlarm with state set to ALARM.
* Ensure that the Alarm was triggered and sent
*/
public void simpleAlarmCreation() {
final SubAlarm subAlarm = subAlarms.get(0);
final String alarmId = alarm.getId();
when(alarmDAO.findById(alarmId)).thenReturn(alarm);
emitSubAlarmStateChange(alarmId, subAlarm, AlarmState.ALARM);
for (int i = 1; i < subAlarms.size(); i++) {
emitSubAlarmStateChange(alarmId, subAlarms.get(i), AlarmState.OK);
}
final String alarmJson = "{\"alarm-transitioned\":{\"tenantId\":\"" + tenantId + "\"," +
"\"alarmId\":\"111111112222222222233333333334\",\"alarmName\":\"Test CPU Alarm\"," +
"\"alarmDescription\":\"Description of Alarm\",\"oldState\":\"OK\",\"newState\":\"ALARM\"," +
"\"actionsEnabled\":true," +
"\"stateChangeReason\":\"Thresholds were exceeded for the sub-alarms: [" + subAlarm.getExpression().getExpression() + "]\"," +
"\"timestamp\":1395587091}}";
verify(alarmEventForwarder, times(1)).send(ALERTS_EXCHANGE, ALERT_ROUTING_KEY, alarmJson);
verify(alarmDAO, times(1)).updateState(alarmId, AlarmState.ALARM);
// Now clear the alarm and ensure another notification gets sent out
subAlarm.setState(AlarmState.OK);
final Tuple clearTuple = createSubAlarmStateChangeTuple(alarmId, subAlarm);
bolt.execute(clearTuple);
verify(collector, times(1)).ack(clearTuple);
final String okJson = "{\"alarm-transitioned\":{\"tenantId\":\"" + tenantId + "\"," +
"\"alarmId\":\"111111112222222222233333333334\",\"alarmName\":\"Test CPU Alarm\"," +
"\"alarmDescription\":\"Description of Alarm\",\"oldState\":\"ALARM\",\"newState\":\"OK\"," +
"\"actionsEnabled\":true," +
"\"stateChangeReason\":\"The alarm threshold(s) have not been exceeded\",\"timestamp\":1395587091}}";
verify(alarmEventForwarder, times(1)).send(ALERTS_EXCHANGE, ALERT_ROUTING_KEY, okJson);
verify(alarmDAO, times(1)).updateState(alarmId, AlarmState.OK);
}
public void simpleAlarmUpdate() {
String alarmId = setUpInitialAlarm();
// Now send an AlarmUpdatedEvent
final Map<String, AlarmSubExpression> empty = new HashMap<>();
final String newName = "New Name";
final String newDescription = "New Description";
final AlarmState newState = AlarmState.OK;
boolean newEnabled = false;
final AlarmUpdatedEvent event = new AlarmUpdatedEvent(tenantId, alarmId, newName, newDescription, alarm.getAlarmExpression().getExpression(),
alarm.getState(), newState, newEnabled, empty, empty, empty, empty);
final Tuple updateTuple = createAlarmUpdateTuple(event);
bolt.execute(updateTuple);
verify(collector, times(1)).ack(updateTuple);
assertEquals(alarm.getName(), newName);
assertEquals(alarm.getState(), newState);
assertEquals(alarm.isActionsEnabled(), newEnabled);
}
public void complexAlarmUpdate() {
String alarmId = setUpInitialAlarm();
// Now send an AlarmUpdatedEvent
final Map<String, AlarmSubExpression> newSubExpressions = new HashMap<>();
final Map<String, AlarmSubExpression> oldSubExpressions = new HashMap<>();
final Map<String, AlarmSubExpression> changedSubExpressions = new HashMap<>();
final Map<String, AlarmSubExpression> unchangedSubExpressions = new HashMap<>();
final String newExpression = subExpressions[1] + " or " +
subExpressions[2].replace("max", "avg") + " or " +
"sum(diskio{instance_id=123,device=4242}, 1) > 5000";
final AlarmExpression newAlarmExpression = new AlarmExpression(newExpression);
final SubAlarm newSubAlarm = new SubAlarm(UUID.randomUUID().toString(), alarmId, newAlarmExpression.getSubExpressions().get(2));
newSubExpressions.put(newSubAlarm.getId(), newSubAlarm.getExpression());
final SubAlarm deletedSubAlarm = subAlarms.get(0);
oldSubExpressions.put(deletedSubAlarm.getId(), deletedSubAlarm.getExpression());
final SubAlarm changedSubAlarm = new SubAlarm(subAlarms.get(2).getId(), alarmId, newAlarmExpression.getSubExpressions().get(1));
changedSubExpressions.put(changedSubAlarm.getId(), changedSubAlarm.getExpression());
final SubAlarm unChangedSubAlarm = new SubAlarm(subAlarms.get(1).getId(), alarmId, subAlarms.get(1).getExpression());
unchangedSubExpressions.put(unChangedSubAlarm.getId(), unChangedSubAlarm.getExpression());
emitSubAlarmStateChange(alarmId, changedSubAlarm, AlarmState.OK);
emitSubAlarmStateChange(alarmId, unChangedSubAlarm, AlarmState.OK);
unChangedSubAlarm.setState(AlarmState.OK);
final AlarmUpdatedEvent event = new AlarmUpdatedEvent(tenantId, alarmId, alarm.getName(), alarm.getDescription(), newExpression,
alarm.getState(), alarm.getState(), alarm.isActionsEnabled(), oldSubExpressions, changedSubExpressions, unchangedSubExpressions, newSubExpressions);
final Tuple updateTuple = createAlarmUpdateTuple(event);
bolt.execute(updateTuple);
verify(collector, times(1)).ack(updateTuple);
final Alarm changedAlarm = bolt.alarms.get(alarmId);
assertEquals(changedAlarm.getAlarmExpression(), newAlarmExpression);
assertEquals(changedAlarm.getSubAlarms().size(), 3);
assertEquals(changedAlarm.getSubAlarm(unChangedSubAlarm.getId()), unChangedSubAlarm);
assertEquals(changedAlarm.getSubAlarm(newSubAlarm.getId()), newSubAlarm);
changedSubAlarm.setState(AlarmState.OK);
assertEquals(changedAlarm.getSubAlarm(changedSubAlarm.getId()), changedSubAlarm);
assertEquals(changedSubAlarm.isNoState(), false);
}
private String setUpInitialAlarm() {
final String alarmId = alarm.getId();
when(alarmDAO.findById(alarmId)).thenReturn(alarm);
// Load up the original Alarm
emitSubAlarmStateChange(alarmId, subAlarms.get(0), AlarmState.ALARM);
return alarmId;
}
private void emitSubAlarmStateChange(String alarmId,
final SubAlarm subAlarm, AlarmState state) {
// Create a copy so changing the state doesn't directly update the ones in the bolt
final SubAlarm toEmit = new SubAlarm(subAlarm.getId(), subAlarm.getAlarmId(), subAlarm.getExpression());
toEmit.setState(state);
final Tuple tuple = createSubAlarmStateChangeTuple(alarmId, toEmit);
bolt.execute(tuple);
verify(collector, times(1)).ack(tuple);
}
private Tuple createAlarmUpdateTuple(AlarmUpdatedEvent event) {
final MkTupleParam tupleParam = new MkTupleParam();
tupleParam.setFields(EventProcessingBolt.ALARM_EVENT_STREAM_FIELDS);
tupleParam.setStream(EventProcessingBolt.ALARM_EVENT_STREAM_ID);
final Tuple tuple = Testing.testTuple(Arrays.asList(EventProcessingBolt.UPDATED, event.alarmId, event), tupleParam);
return tuple;
}
private Tuple createSubAlarmStateChangeTuple(String alarmId, final SubAlarm subAlarm) {
final MkTupleParam tupleParam = new MkTupleParam();
tupleParam.setFields("alarmId", "subAlarm");
tupleParam.setStream(Streams.DEFAULT_STREAM_ID);
final Tuple tuple = Testing.testTuple(Arrays.asList(alarmId, subAlarm), tupleParam);
return tuple;
}
private class MockAlarmThreshholdBolt extends AlarmThresholdingBolt {
private static final long serialVersionUID = 1L;
public MockAlarmThreshholdBolt(AlarmDAO alarmDAO,
AlarmEventForwarder alarmEventForwarder) {
super(alarmDAO, alarmEventForwarder);
}
@Override
protected long getTimestamp() {
// Have to keep the time stamp constant so JSON comparison works
return 1395587091;
}
@Override
protected long getTimestamp() {
// Have to keep the time stamp constant so JSON comparison works
return 1395587091;
}
}
}

View File

@ -14,35 +14,13 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.thresholding;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import backtype.storm.Testing;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.testing.MkTupleParam;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;
import com.google.common.collect.BiMap;
import com.google.common.collect.HashBiMap;
import com.google.common.collect.Sets;
import com.hpcloud.mon.common.event.AlarmCreatedEvent;
import com.hpcloud.mon.common.event.AlarmDeletedEvent;
import com.hpcloud.mon.common.event.AlarmUpdatedEvent;
@ -55,212 +33,246 @@ import com.hpcloud.mon.domain.model.MetricDefinitionAndTenantId;
import com.hpcloud.mon.domain.model.SubAlarm;
import com.hpcloud.streaming.storm.Streams;
import backtype.storm.Testing;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.testing.MkTupleParam;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;
import com.google.common.collect.BiMap;
import com.google.common.collect.HashBiMap;
import com.google.common.collect.Sets;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
@Test
public class EventProcessingBoltTest {
private static final String TENANT_ID = "AAAAABBBBBBCCCCC";
private EventProcessingBolt bolt;
private OutputCollector collector;
private AlarmExpression alarmExpression;
private Alarm alarm;
private List<SubAlarm> subAlarms;
private static final String TENANT_ID = "AAAAABBBBBBCCCCC";
private EventProcessingBolt bolt;
private OutputCollector collector;
private AlarmExpression alarmExpression;
private Alarm alarm;
private List<SubAlarm> subAlarms;
@BeforeMethod
protected void beforeMethod() {
collector = mock(OutputCollector.class);
bolt = new EventProcessingBolt();
@BeforeMethod
protected void beforeMethod() {
collector = mock(OutputCollector.class);
bolt = new EventProcessingBolt();
final Map<String, String> config = new HashMap<>();
final TopologyContext context = mock(TopologyContext.class);
bolt.prepare(config, context, collector);
final Map<String, String> config = new HashMap<>();
final TopologyContext context = mock(TopologyContext.class);
bolt.prepare(config, context, collector);
final String alarmId = "111111112222222222233333333334";
final String name = "Test CPU Alarm";
final String description = "Description of " + name;
final String expression = "avg(hpcs.compute.cpu{instance_id=123,device=42}, 1) > 5 " +
"and max(hpcs.compute.mem{instance_id=123,device=42}) > 80 " +
"and max(hpcs.compute.load{instance_id=123,device=42}) > 5";
alarmExpression = new AlarmExpression(expression);
subAlarms = createSubAlarms(alarmId, alarmExpression);
alarm = new Alarm(alarmId, TENANT_ID, name, description, alarmExpression, subAlarms,
AlarmState.UNDETERMINED, Boolean.TRUE);
final String alarmId = "111111112222222222233333333334";
final String name = "Test CPU Alarm";
final String description = "Description of " + name;
final String expression =
"avg(hpcs.compute.cpu{instance_id=123,device=42}, 1) > 5 "
+ "and max(hpcs.compute.mem{instance_id=123,device=42}) > 80 "
+ "and max(hpcs.compute.load{instance_id=123,device=42}) > 5";
alarmExpression = new AlarmExpression(expression);
subAlarms = createSubAlarms(alarmId, alarmExpression);
alarm =
new Alarm(alarmId, TENANT_ID, name, description, alarmExpression, subAlarms,
AlarmState.UNDETERMINED, Boolean.TRUE);
}
private List<SubAlarm> createSubAlarms(final String alarmId,
final AlarmExpression alarmExpression, String... ids) {
final List<AlarmSubExpression> subExpressions = alarmExpression.getSubExpressions();
final List<SubAlarm> subAlarms = new ArrayList<SubAlarm>(subExpressions.size());
for (int i = 0; i < subExpressions.size(); i++) {
final String id;
if (i >= ids.length) {
id = UUID.randomUUID().toString();
} else {
id = ids[i];
}
final SubAlarm subAlarm = new SubAlarm(id, alarmId, subExpressions.get(i));
subAlarms.add(subAlarm);
}
return subAlarms;
}
public void testAlarmCreatedEvent() {
final Map<String, AlarmSubExpression> expressions = createAlarmSubExpressionMap(alarm);
final AlarmCreatedEvent event =
new AlarmCreatedEvent(alarm.getTenantId(), alarm.getId(), alarm.getName(), alarm
.getAlarmExpression().getExpression(), expressions);
final Tuple tuple = createTuple(event);
bolt.execute(tuple);
for (final SubAlarm subAlarm : subAlarms) {
verifyAddedSubAlarm(subAlarm);
}
verify(collector, times(1)).ack(tuple);
}
private Tuple createTuple(final Object event) {
MkTupleParam tupleParam = new MkTupleParam();
tupleParam.setFields("event");
tupleParam.setStream(Streams.DEFAULT_STREAM_ID);
final Tuple tuple = Testing.testTuple(Arrays.asList(event), tupleParam);
return tuple;
}
public void testAlarmDeletedEvent() {
final Map<String, MetricDefinition> metricDefinitions = new HashMap<>();
for (final SubAlarm subAlarm : alarm.getSubAlarms()) {
metricDefinitions.put(subAlarm.getId(), subAlarm.getExpression().getMetricDefinition());
}
final AlarmDeletedEvent event =
new AlarmDeletedEvent(alarm.getTenantId(), alarm.getId(), metricDefinitions);
final Tuple tuple = createTuple(event);
bolt.execute(tuple);
for (final SubAlarm subAlarm : subAlarms) {
verifyDeletedSubAlarm(subAlarm);
}
verify(collector, times(1)).emit(EventProcessingBolt.ALARM_EVENT_STREAM_ID,
new Values(EventProcessingBolt.DELETED, event.alarmId, event));
verify(collector, times(1)).ack(tuple);
}
private void verifyDeletedSubAlarm(final SubAlarm subAlarm) {
verify(collector, times(1)).emit(
EventProcessingBolt.METRIC_ALARM_EVENT_STREAM_ID,
new Values(EventProcessingBolt.DELETED, new MetricDefinitionAndTenantId(subAlarm
.getExpression().getMetricDefinition(), TENANT_ID), subAlarm.getId()));
}
public static AlarmUpdatedEvent createAlarmUpdatedEvent(final Alarm alarm,
final AlarmState newState, final AlarmExpression updatedAlarmExpression,
List<SubAlarm> updatedSubAlarms) {
final Map<String, AlarmSubExpression> oldAlarmSubExpressions = new HashMap<>();
for (final SubAlarm subAlarm : alarm.getSubAlarms()) {
oldAlarmSubExpressions.put(subAlarm.getId(), subAlarm.getExpression());
}
BiMap<String, AlarmSubExpression> oldExpressions = HashBiMap.create(oldAlarmSubExpressions);
Set<AlarmSubExpression> oldSet = oldExpressions.inverse().keySet();
Set<AlarmSubExpression> newSet = new HashSet<>();
for (final SubAlarm subAlarm : updatedSubAlarms) {
newSet.add(subAlarm.getExpression());
}
private List<SubAlarm> createSubAlarms(final String alarmId,
final AlarmExpression alarmExpression,
String ... ids) {
final List<AlarmSubExpression> subExpressions = alarmExpression.getSubExpressions();
final List<SubAlarm> subAlarms = new ArrayList<SubAlarm>(subExpressions.size());
for (int i = 0; i < subExpressions.size(); i++) {
final String id;
if (i >= ids.length) {
id = UUID.randomUUID().toString();
}
else {
id = ids[i];
}
final SubAlarm subAlarm = new SubAlarm(id, alarmId, subExpressions.get(i));
subAlarms.add(subAlarm);
// Identify old or changed expressions
Set<AlarmSubExpression> oldOrChangedExpressions =
new HashSet<>(Sets.difference(oldSet, newSet));
// Identify new or changed expressions
Set<AlarmSubExpression> newOrChangedExpressions =
new HashSet<>(Sets.difference(newSet, oldSet));
// Find changed expressions
Map<String, AlarmSubExpression> changedExpressions = new HashMap<>();
for (Iterator<AlarmSubExpression> oldIt = oldOrChangedExpressions.iterator(); oldIt.hasNext();) {
AlarmSubExpression oldExpr = oldIt.next();
for (Iterator<AlarmSubExpression> newIt = newOrChangedExpressions.iterator(); newIt.hasNext();) {
AlarmSubExpression newExpr = newIt.next();
if (sameKeyFields(oldExpr, newExpr)) {
oldIt.remove();
newIt.remove();
changedExpressions.put(oldExpressions.inverse().get(oldExpr), newExpr);
break;
}
return subAlarms;
}
}
public void testAlarmCreatedEvent() {
final Map<String, AlarmSubExpression> expressions = createAlarmSubExpressionMap(alarm);
final AlarmCreatedEvent event = new AlarmCreatedEvent(alarm.getTenantId(), alarm.getId(),
alarm.getName(), alarm.getAlarmExpression().getExpression(), expressions);
final Tuple tuple = createTuple(event);
bolt.execute(tuple);
for (final SubAlarm subAlarm : subAlarms) {
verifyAddedSubAlarm(subAlarm);
BiMap<String, AlarmSubExpression> unchangedExpressions = HashBiMap.create(oldExpressions);
unchangedExpressions.values().removeAll(oldOrChangedExpressions);
unchangedExpressions.keySet().removeAll(changedExpressions.keySet());
// Remove old sub expressions
oldExpressions.values().retainAll(oldOrChangedExpressions);
// Create IDs for new expressions
Map<String, AlarmSubExpression> newExpressions = new HashMap<>();
for (AlarmSubExpression expression : newOrChangedExpressions) {
for (final SubAlarm subAlarm : updatedSubAlarms) {
if (subAlarm.getExpression().equals(expression)) {
newExpressions.put(subAlarm.getId(), expression);
}
verify(collector, times(1)).ack(tuple);
}
}
private Tuple createTuple(final Object event) {
MkTupleParam tupleParam = new MkTupleParam();
tupleParam.setFields("event");
tupleParam.setStream(Streams.DEFAULT_STREAM_ID);
final Tuple tuple = Testing.testTuple(Arrays.asList(event), tupleParam);
return tuple;
final AlarmUpdatedEvent event =
new AlarmUpdatedEvent(alarm.getTenantId(), alarm.getId(), alarm.getName(),
alarm.getDescription(), updatedAlarmExpression.getExpression(), newState,
alarm.getState(), true, oldExpressions, changedExpressions, unchangedExpressions,
newExpressions);
return event;
}
/**
* Returns whether all of the fields of {@code a} and {@code b} are the same except the operator
* and threshold.
*/
private static boolean sameKeyFields(AlarmSubExpression a, AlarmSubExpression b) {
return a.getMetricDefinition().equals(b.getMetricDefinition())
&& a.getFunction().equals(b.getFunction()) && a.getPeriod() == b.getPeriod()
&& a.getPeriods() == b.getPeriods();
}
public void testAlarmUpdatedEvent() {
final String updatedExpression =
"avg(hpcs.compute.cpu{instance_id=123,device=42}, 1) > 5 "
+ "and max(hpcs.compute.mem{instance_id=123,device=42}) > 90 "
+ "and max(hpcs.compute.newLoad{instance_id=123,device=42}) > 5";
final AlarmExpression updatedAlarmExpression = new AlarmExpression(updatedExpression);
final List<SubAlarm> updatedSubAlarms = new ArrayList<>();
updatedSubAlarms.add(subAlarms.get(0));
updatedSubAlarms.add(new SubAlarm(subAlarms.get(1).getId(), alarm.getId(),
updatedAlarmExpression.getSubExpressions().get(1)));
updatedSubAlarms.add(new SubAlarm(UUID.randomUUID().toString(), alarm.getId(),
updatedAlarmExpression.getSubExpressions().get(2)));
final AlarmUpdatedEvent event =
createAlarmUpdatedEvent(alarm, alarm.getState(), updatedAlarmExpression, updatedSubAlarms);
final Tuple tuple = createTuple(event);
bolt.execute(tuple);
verify(collector, times(1)).ack(tuple);
verifyDeletedSubAlarm(subAlarms.get(2));
verifyUpdatedSubAlarm(updatedSubAlarms.get(1));
verifyAddedSubAlarm(updatedSubAlarms.get(2));
verify(collector, times(1)).emit(EventProcessingBolt.ALARM_EVENT_STREAM_ID,
new Values(EventProcessingBolt.UPDATED, event.alarmId, event));
}
private void verifyAddedSubAlarm(final SubAlarm subAlarm) {
sendSubAlarm(subAlarm, EventProcessingBolt.CREATED);
}
private void verifyUpdatedSubAlarm(final SubAlarm subAlarm) {
sendSubAlarm(subAlarm, EventProcessingBolt.UPDATED);
}
private void sendSubAlarm(final SubAlarm subAlarm, String eventType) {
verify(collector, times(1)).emit(
EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_ID,
new Values(eventType, new MetricDefinitionAndTenantId(subAlarm.getExpression()
.getMetricDefinition(), TENANT_ID), subAlarm));
}
private static Map<String, AlarmSubExpression> createAlarmSubExpressionMap(Alarm alarm) {
final Map<String, AlarmSubExpression> oldAlarmSubExpressions = new HashMap<>();
for (final SubAlarm subAlarm : alarm.getSubAlarms()) {
oldAlarmSubExpressions.put(subAlarm.getId(), subAlarm.getExpression());
}
public void testAlarmDeletedEvent() {
final Map<String, MetricDefinition> metricDefinitions = new HashMap<>();
for (final SubAlarm subAlarm : alarm.getSubAlarms()) {
metricDefinitions.put(subAlarm.getId(), subAlarm.getExpression().getMetricDefinition());
}
final AlarmDeletedEvent event = new AlarmDeletedEvent(alarm.getTenantId(), alarm.getId(),
metricDefinitions);
final Tuple tuple = createTuple(event);
bolt.execute(tuple);
for (final SubAlarm subAlarm : subAlarms) {
verifyDeletedSubAlarm(subAlarm);
}
verify(collector, times(1)).emit(EventProcessingBolt.ALARM_EVENT_STREAM_ID,
new Values(EventProcessingBolt.DELETED, event.alarmId, event));
verify(collector, times(1)).ack(tuple);
}
private void verifyDeletedSubAlarm(final SubAlarm subAlarm) {
verify(collector, times(1)).emit(EventProcessingBolt.METRIC_ALARM_EVENT_STREAM_ID,
new Values(EventProcessingBolt.DELETED,
new MetricDefinitionAndTenantId(
subAlarm.getExpression().getMetricDefinition(), TENANT_ID), subAlarm.getId()));
}
public static AlarmUpdatedEvent createAlarmUpdatedEvent(final Alarm alarm,
final AlarmState newState,
final AlarmExpression updatedAlarmExpression,
List<SubAlarm> updatedSubAlarms) {
final Map<String, AlarmSubExpression> oldAlarmSubExpressions = new HashMap<>();
for (final SubAlarm subAlarm : alarm.getSubAlarms())
oldAlarmSubExpressions.put(subAlarm.getId(), subAlarm.getExpression());
BiMap<String, AlarmSubExpression> oldExpressions = HashBiMap.create(oldAlarmSubExpressions);
Set<AlarmSubExpression> oldSet = oldExpressions.inverse().keySet();
Set<AlarmSubExpression> newSet = new HashSet<>();
for (final SubAlarm subAlarm : updatedSubAlarms)
newSet.add(subAlarm.getExpression());
// Identify old or changed expressions
Set<AlarmSubExpression> oldOrChangedExpressions = new HashSet<>(Sets.difference(oldSet, newSet));
// Identify new or changed expressions
Set<AlarmSubExpression> newOrChangedExpressions = new HashSet<>(Sets.difference(newSet, oldSet));
// Find changed expressions
Map<String, AlarmSubExpression> changedExpressions = new HashMap<>();
for (Iterator<AlarmSubExpression> oldIt = oldOrChangedExpressions.iterator(); oldIt.hasNext();) {
AlarmSubExpression oldExpr = oldIt.next();
for (Iterator<AlarmSubExpression> newIt = newOrChangedExpressions.iterator(); newIt.hasNext();) {
AlarmSubExpression newExpr = newIt.next();
if (sameKeyFields(oldExpr, newExpr)) {
oldIt.remove();
newIt.remove();
changedExpressions.put(oldExpressions.inverse().get(oldExpr), newExpr);
break;
}
}
}
BiMap<String, AlarmSubExpression> unchangedExpressions = HashBiMap.create(oldExpressions);
unchangedExpressions.values().removeAll(oldOrChangedExpressions);
unchangedExpressions.keySet().removeAll(changedExpressions.keySet());
// Remove old sub expressions
oldExpressions.values().retainAll(oldOrChangedExpressions);
// Create IDs for new expressions
Map<String, AlarmSubExpression> newExpressions = new HashMap<>();
for (AlarmSubExpression expression : newOrChangedExpressions)
for (final SubAlarm subAlarm : updatedSubAlarms)
if (subAlarm.getExpression().equals(expression))
newExpressions.put(subAlarm.getId(), expression);
final AlarmUpdatedEvent event = new AlarmUpdatedEvent(alarm.getTenantId(), alarm.getId(),
alarm.getName(), alarm.getDescription(), updatedAlarmExpression.getExpression(), newState, alarm.getState(),
true, oldExpressions,
changedExpressions, unchangedExpressions, newExpressions);
return event;
}
/**
* Returns whether all of the fields of {@code a} and {@code b} are the same except the operator
* and threshold.
*/
private static boolean sameKeyFields(AlarmSubExpression a, AlarmSubExpression b) {
return a.getMetricDefinition().equals(b.getMetricDefinition())
&& a.getFunction().equals(b.getFunction()) && a.getPeriod() == b.getPeriod()
&& a.getPeriods() == b.getPeriods();
}
public void testAlarmUpdatedEvent() {
final String updatedExpression = "avg(hpcs.compute.cpu{instance_id=123,device=42}, 1) > 5 " +
"and max(hpcs.compute.mem{instance_id=123,device=42}) > 90 " +
"and max(hpcs.compute.newLoad{instance_id=123,device=42}) > 5";
final AlarmExpression updatedAlarmExpression = new AlarmExpression(updatedExpression);
final List<SubAlarm> updatedSubAlarms = new ArrayList<>();
updatedSubAlarms.add(subAlarms.get(0));
updatedSubAlarms.add(new SubAlarm(subAlarms.get(1).getId(), alarm.getId(), updatedAlarmExpression.getSubExpressions().get(1)));
updatedSubAlarms.add(new SubAlarm(UUID.randomUUID().toString(), alarm.getId(), updatedAlarmExpression.getSubExpressions().get(2)));
final AlarmUpdatedEvent event = createAlarmUpdatedEvent(alarm, alarm.getState(), updatedAlarmExpression,
updatedSubAlarms);
final Tuple tuple = createTuple(event);
bolt.execute(tuple);
verify(collector, times(1)).ack(tuple);
verifyDeletedSubAlarm(subAlarms.get(2));
verifyUpdatedSubAlarm(updatedSubAlarms.get(1));
verifyAddedSubAlarm(updatedSubAlarms.get(2));
verify(collector, times(1)).emit(EventProcessingBolt.ALARM_EVENT_STREAM_ID,
new Values(EventProcessingBolt.UPDATED, event.alarmId, event));
}
private void verifyAddedSubAlarm(final SubAlarm subAlarm) {
sendSubAlarm(subAlarm, EventProcessingBolt.CREATED);
}
private void verifyUpdatedSubAlarm(final SubAlarm subAlarm) {
sendSubAlarm(subAlarm, EventProcessingBolt.UPDATED);
}
private void sendSubAlarm(final SubAlarm subAlarm, String eventType) {
verify(collector, times(1)).emit(EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_ID,
new Values(eventType,
new MetricDefinitionAndTenantId(
subAlarm.getExpression().getMetricDefinition(), TENANT_ID), subAlarm));
}
private static Map<String, AlarmSubExpression> createAlarmSubExpressionMap(
Alarm alarm) {
final Map<String, AlarmSubExpression> oldAlarmSubExpressions = new HashMap<>();
for (final SubAlarm subAlarm : alarm.getSubAlarms()) {
oldAlarmSubExpressions.put(subAlarm.getId(), subAlarm.getExpression());
}
return oldAlarmSubExpressions;
}
return oldAlarmSubExpressions;
}
}

View File

@ -14,38 +14,21 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.thresholding;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static org.mockito.Mockito.reset;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.assertFalse;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import backtype.storm.Constants;
import backtype.storm.Testing;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.testing.MkTupleParam;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;
import com.hpcloud.mon.common.model.alarm.AlarmOperator;
import com.hpcloud.mon.common.model.alarm.AlarmState;
@ -59,6 +42,24 @@ import com.hpcloud.mon.domain.service.SubAlarmDAO;
import com.hpcloud.mon.domain.service.SubAlarmStatsRepository;
import com.hpcloud.streaming.storm.Streams;
import backtype.storm.Constants;
import backtype.storm.Testing;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.testing.MkTupleParam;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
@Test
public class MetricAggregationBoltTest {
private static final String TENANT_ID = "42";
@ -103,11 +104,15 @@ public class MetricAggregationBoltTest {
when(dao.find(any(MetricDefinitionAndTenantId.class))).thenAnswer(new Answer<List<SubAlarm>>() {
@Override
public List<SubAlarm> answer(InvocationOnMock invocation) throws Throwable {
final MetricDefinitionAndTenantId metricDefinitionAndTenantId = (MetricDefinitionAndTenantId) invocation.getArguments()[0];
final MetricDefinitionAndTenantId metricDefinitionAndTenantId =
(MetricDefinitionAndTenantId) invocation.getArguments()[0];
final List<SubAlarm> result = new ArrayList<>();
for (final SubAlarm subAlarm : subAlarms)
if (subAlarm.getExpression().getMetricDefinition().equals(metricDefinitionAndTenantId.metricDefinition))
for (final SubAlarm subAlarm : subAlarms) {
if (subAlarm.getExpression().getMetricDefinition()
.equals(metricDefinitionAndTenantId.metricDefinition)) {
result.add(subAlarm);
}
}
return result;
}
});
@ -121,15 +126,23 @@ public class MetricAggregationBoltTest {
public void shouldAggregateValues() {
long t1 = System.currentTimeMillis() / 1000;
bolt.aggregateValues(new MetricDefinitionAndTenantId(metricDef1, TENANT_ID), new Metric(metricDef1.name, metricDef1.dimensions, t1, 100));
bolt.aggregateValues(new MetricDefinitionAndTenantId(metricDef1, TENANT_ID), new Metric(metricDef1.name, metricDef1.dimensions, t1, 80));
bolt.aggregateValues(new MetricDefinitionAndTenantId(metricDef2, TENANT_ID), new Metric(metricDef2.name, metricDef2.dimensions, t1, 50));
bolt.aggregateValues(new MetricDefinitionAndTenantId(metricDef2, TENANT_ID), new Metric(metricDef2.name, metricDef2.dimensions, t1, 40));
bolt.aggregateValues(new MetricDefinitionAndTenantId(metricDef1, TENANT_ID), new Metric(
metricDef1.name, metricDef1.dimensions, t1, 100));
bolt.aggregateValues(new MetricDefinitionAndTenantId(metricDef1, TENANT_ID), new Metric(
metricDef1.name, metricDef1.dimensions, t1, 80));
bolt.aggregateValues(new MetricDefinitionAndTenantId(metricDef2, TENANT_ID), new Metric(
metricDef2.name, metricDef2.dimensions, t1, 50));
bolt.aggregateValues(new MetricDefinitionAndTenantId(metricDef2, TENANT_ID), new Metric(
metricDef2.name, metricDef2.dimensions, t1, 40));
SubAlarmStats alarmData = bolt.getOrCreateSubAlarmStatsRepo(new MetricDefinitionAndTenantId(metricDef1, TENANT_ID)).get("123");
SubAlarmStats alarmData =
bolt.getOrCreateSubAlarmStatsRepo(new MetricDefinitionAndTenantId(metricDef1, TENANT_ID))
.get("123");
assertEquals(alarmData.getStats().getValue(t1), 90.0);
alarmData = bolt.getOrCreateSubAlarmStatsRepo(new MetricDefinitionAndTenantId(metricDef2, TENANT_ID)).get("456");
alarmData =
bolt.getOrCreateSubAlarmStatsRepo(new MetricDefinitionAndTenantId(metricDef2, TENANT_ID))
.get("456");
assertEquals(alarmData.getStats().getValue(t1), 45.0);
}
@ -154,13 +167,15 @@ public class MetricAggregationBoltTest {
assertEquals(subAlarm3.getState(), AlarmState.UNDETERMINED);
verify(collector, times(1)).emit(new Values(subAlarm1.getAlarmId(), subAlarm1));
// Have to reset the mock so it can tell the difference when subAlarm2 and subAlarm3 are emitted again.
// Have to reset the mock so it can tell the difference when subAlarm2 and subAlarm3 are emitted
// again.
reset(collector);
// Drive subAlarm1 to ALARM
bolt.execute(createMetricTuple(metricDef1, new Metric(metricDef1, t1, 99)));
// Drive subAlarm2 to ALARM and subAlarm3 to OK since they use the same MetricDefinition
bolt.execute(createMetricTuple(metricDef2, new Metric(metricDef2, System.currentTimeMillis() / 1000, 94)));
bolt.execute(createMetricTuple(metricDef2, new Metric(metricDef2,
System.currentTimeMillis() / 1000, 94)));
bolt.execute(tickTuple);
verify(collector, times(1)).ack(tickTuple);
@ -192,8 +207,9 @@ public class MetricAggregationBoltTest {
final MkTupleParam tupleParam = new MkTupleParam();
tupleParam.setFields(EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_FIELDS);
tupleParam.setStream(EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_ID);
final Tuple resendTuple = Testing.testTuple(Arrays.asList(EventProcessingBolt.RESEND,
new MetricDefinitionAndTenantId(metricDef2, TENANT_ID), subAlarm2), tupleParam);
final Tuple resendTuple =
Testing.testTuple(Arrays.asList(EventProcessingBolt.RESEND,
new MetricDefinitionAndTenantId(metricDef2, TENANT_ID), subAlarm2), tupleParam);
bolt.execute(resendTuple);
bolt.execute(createMetricTuple(metricDef2, new Metric(metricDef2, t1, 100)));
@ -241,7 +257,8 @@ public class MetricAggregationBoltTest {
final MkTupleParam tupleParam = new MkTupleParam();
tupleParam.setStream(MetricAggregationBolt.METRIC_AGGREGATION_CONTROL_STREAM);
final Tuple lagTuple = Testing.testTuple(Arrays.asList(MetricAggregationBolt.METRICS_BEHIND), tupleParam);
final Tuple lagTuple =
Testing.testTuple(Arrays.asList(MetricAggregationBolt.METRICS_BEHIND), tupleParam);
bolt.execute(lagTuple);
verify(collector, times(1)).ack(lagTuple);
@ -274,27 +291,30 @@ public class MetricAggregationBoltTest {
tupleParam.setFields(EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_FIELDS);
tupleParam.setStream(EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_ID);
MetricDefinitionAndTenantId metricDefinitionAndTenantId = new MetricDefinitionAndTenantId(metricDef1, TENANT_ID);
MetricDefinitionAndTenantId metricDefinitionAndTenantId =
new MetricDefinitionAndTenantId(metricDef1, TENANT_ID);
assertNull(bolt.subAlarmStatsRepos.get(metricDefinitionAndTenantId));
bolt.execute(Testing.testTuple(Arrays.asList(EventProcessingBolt.CREATED,
metricDefinitionAndTenantId, new SubAlarm("123", "1", subExpr1)), tupleParam));
metricDefinitionAndTenantId, new SubAlarm("123", "1", subExpr1)), tupleParam));
assertNotNull(bolt.subAlarmStatsRepos.get(metricDefinitionAndTenantId).get("123"));
}
public void validateMetricDefUpdatedThreshold() {
final SubAlarmStats stats = updateEnsureMeasurementsKept(subExpr2, "avg(hpcs.compute.mem{id=5}, 60) >= 80");
final SubAlarmStats stats =
updateEnsureMeasurementsKept(subExpr2, "avg(hpcs.compute.mem{id=5}, 60) >= 80");
assertEquals(stats.getSubAlarm().getExpression().getThreshold(), 80.0);
}
public void validateMetricDefUpdatedOperator() {
final SubAlarmStats stats = updateEnsureMeasurementsKept(subExpr2, "avg(hpcs.compute.mem{id=5}, 60) < 80");
final SubAlarmStats stats =
updateEnsureMeasurementsKept(subExpr2, "avg(hpcs.compute.mem{id=5}, 60) < 80");
assertEquals(stats.getSubAlarm().getExpression().getOperator(), AlarmOperator.LT);
}
private SubAlarmStats updateEnsureMeasurementsKept(AlarmSubExpression subExpr,
String newSubExpression) {
String newSubExpression) {
final SubAlarmStats stats = updateSubAlarmsStats(subExpr, newSubExpression);
final double[] values = stats.getStats().getWindowValues();
assertFalse(Double.isNaN(values[0])); // Ensure old measurements weren't flushed
@ -302,49 +322,53 @@ public class MetricAggregationBoltTest {
}
public void validateMetricDefReplacedFunction() {
final SubAlarmStats stats = updateEnsureMeasurementsFlushed(subExpr2, "max(hpcs.compute.mem{id=5}, 60) < 80");
final SubAlarmStats stats =
updateEnsureMeasurementsFlushed(subExpr2, "max(hpcs.compute.mem{id=5}, 60) < 80");
assertEquals(stats.getSubAlarm().getExpression().getOperator(), AlarmOperator.LT);
}
public void validateMetricDefReplacedPeriods() {
final SubAlarmStats stats = updateEnsureMeasurementsFlushed(subExpr2, "avg(hpcs.compute.mem{id=5}, 60) >= 80 times 7");
final SubAlarmStats stats =
updateEnsureMeasurementsFlushed(subExpr2, "avg(hpcs.compute.mem{id=5}, 60) >= 80 times 7");
assertEquals(stats.getSubAlarm().getExpression().getPeriods(), 7);
}
public void validateMetricDefReplacedPeriod() {
final SubAlarmStats stats = updateEnsureMeasurementsFlushed(subExpr2, "avg(hpcs.compute.mem{id=5}, 120) >= 80");
final SubAlarmStats stats =
updateEnsureMeasurementsFlushed(subExpr2, "avg(hpcs.compute.mem{id=5}, 120) >= 80");
assertEquals(stats.getSubAlarm().getExpression().getPeriod(), 120);
}
private SubAlarmStats updateEnsureMeasurementsFlushed(AlarmSubExpression subExpr,
String newSubExpression) {
String newSubExpression) {
final SubAlarmStats stats = updateSubAlarmsStats(subExpr, newSubExpression);
final double[] values = stats.getStats().getWindowValues();
assertTrue(Double.isNaN(values[0])); // Ensure old measurements were flushed
return stats;
}
private SubAlarmStats updateSubAlarmsStats(AlarmSubExpression subExpr,
String newSubExpression) {
private SubAlarmStats updateSubAlarmsStats(AlarmSubExpression subExpr, String newSubExpression) {
final MkTupleParam tupleParam = new MkTupleParam();
tupleParam.setFields(EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_FIELDS);
tupleParam.setStream(EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_ID);
final MetricDefinitionAndTenantId metricDefinitionAndTenantId = new MetricDefinitionAndTenantId(subExpr.getMetricDefinition(), TENANT_ID);
final MetricDefinitionAndTenantId metricDefinitionAndTenantId =
new MetricDefinitionAndTenantId(subExpr.getMetricDefinition(), TENANT_ID);
assertNull(bolt.subAlarmStatsRepos.get(metricDefinitionAndTenantId));
bolt.execute(Testing.testTuple(Arrays.asList(EventProcessingBolt.CREATED,
metricDefinitionAndTenantId, new SubAlarm("123", "1", subExpr)), tupleParam));
final SubAlarmStats oldStats = bolt.subAlarmStatsRepos.get(metricDefinitionAndTenantId).get("123");
metricDefinitionAndTenantId, new SubAlarm("123", "1", subExpr)), tupleParam));
final SubAlarmStats oldStats =
bolt.subAlarmStatsRepos.get(metricDefinitionAndTenantId).get("123");
assertEquals(oldStats.getSubAlarm().getExpression().getThreshold(), 90.0);
assertTrue(oldStats.getStats().addValue(80.0, System.currentTimeMillis()/1000));
assertTrue(oldStats.getStats().addValue(80.0, System.currentTimeMillis() / 1000));
assertFalse(Double.isNaN(oldStats.getStats().getWindowValues()[0]));
assertNotNull(bolt.subAlarmStatsRepos.get(metricDefinitionAndTenantId).get("123"));
final AlarmSubExpression newExpr = AlarmSubExpression.of(newSubExpression);
bolt.execute(Testing.testTuple(Arrays.asList(EventProcessingBolt.UPDATED,
metricDefinitionAndTenantId, new SubAlarm("123", "1", newExpr)), tupleParam));
metricDefinitionAndTenantId, new SubAlarm("123", "1", newExpr)), tupleParam));
return bolt.subAlarmStatsRepos.get(metricDefinitionAndTenantId).get("123");
}
@ -353,7 +377,8 @@ public class MetricAggregationBoltTest {
MkTupleParam tupleParam = new MkTupleParam();
tupleParam.setFields(EventProcessingBolt.METRIC_ALARM_EVENT_STREAM_FIELDS);
tupleParam.setStream(EventProcessingBolt.METRIC_ALARM_EVENT_STREAM_ID);
MetricDefinitionAndTenantId metricDefinitionAndTenantId = new MetricDefinitionAndTenantId(metricDef1, TENANT_ID);
MetricDefinitionAndTenantId metricDefinitionAndTenantId =
new MetricDefinitionAndTenantId(metricDef1, TENANT_ID);
bolt.getOrCreateSubAlarmStatsRepo(metricDefinitionAndTenantId);
assertNotNull(bolt.subAlarmStatsRepos.get(metricDefinitionAndTenantId).get("123"));
@ -365,17 +390,20 @@ public class MetricAggregationBoltTest {
}
public void shouldGetOrCreateSameMetricData() {
SubAlarmStatsRepository data = bolt.getOrCreateSubAlarmStatsRepo(new MetricDefinitionAndTenantId(metricDef1, TENANT_ID));
SubAlarmStatsRepository data =
bolt.getOrCreateSubAlarmStatsRepo(new MetricDefinitionAndTenantId(metricDef1, TENANT_ID));
assertNotNull(data);
assertEquals(bolt.getOrCreateSubAlarmStatsRepo(new MetricDefinitionAndTenantId(metricDef1, TENANT_ID)), data);
assertEquals(
bolt.getOrCreateSubAlarmStatsRepo(new MetricDefinitionAndTenantId(metricDef1, TENANT_ID)),
data);
}
private Tuple createMetricTuple(final MetricDefinition metricDef,
final Metric metric) {
private Tuple createMetricTuple(final MetricDefinition metricDef, final Metric metric) {
final MkTupleParam tupleParam = new MkTupleParam();
tupleParam.setFields(MetricFilteringBolt.FIELDS);
tupleParam.setStream(Streams.DEFAULT_STREAM_ID);
return Testing.testTuple(Arrays.asList(new MetricDefinitionAndTenantId(metricDef, TENANT_ID), metric), tupleParam);
return Testing.testTuple(
Arrays.asList(new MetricDefinitionAndTenantId(metricDef, TENANT_ID), metric), tupleParam);
}
private static class MockMetricAggregationBolt extends MetricAggregationBolt {
@ -384,13 +412,14 @@ public class MetricAggregationBoltTest {
private long currentTime;
public MockMetricAggregationBolt(SubAlarmDAO subAlarmDAO) {
super(subAlarmDAO);
super(subAlarmDAO);
}
@Override
protected long currentTimeSeconds() {
if (currentTime != 0)
if (currentTime != 0) {
return currentTime;
}
return super.currentTimeSeconds();
}

View File

@ -14,6 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.thresholding;
import static org.mockito.Mockito.mock;
@ -23,24 +24,6 @@ import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static org.testng.Assert.assertEquals;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.mockito.verification.VerificationMode;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import backtype.storm.Testing;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.testing.MkTupleParam;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;
import com.hpcloud.mon.common.model.alarm.AlarmExpression;
import com.hpcloud.mon.common.model.alarm.AlarmSubExpression;
import com.hpcloud.mon.common.model.metric.Metric;
@ -51,277 +34,324 @@ import com.hpcloud.mon.domain.service.MetricDefinitionDAO;
import com.hpcloud.mon.domain.service.SubAlarmMetricDefinition;
import com.hpcloud.streaming.storm.Streams;
import backtype.storm.Testing;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.testing.MkTupleParam;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;
import org.mockito.verification.VerificationMode;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
@Test
public class MetricFilteringBoltTest {
private List<SubAlarm> subAlarms;
private List<SubAlarm> duplicateMetricSubAlarms;
private final static String TEST_TENANT_ID = "42";
private long metricTimestamp = System.currentTimeMillis()/1000; // Make sure the metric timestamp is always unique
private List<SubAlarm> subAlarms;
private List<SubAlarm> duplicateMetricSubAlarms;
private final static String TEST_TENANT_ID = "42";
private long metricTimestamp = System.currentTimeMillis() / 1000; // Make sure the metric
// timestamp is always unique
@BeforeMethod
protected void beforeMethod() {
@BeforeMethod
protected void beforeMethod() {
final String expression = "avg(hpcs.compute.cpu{instance_id=123,device=42}, 1) > 5 " +
"and max(hpcs.compute.mem{instance_id=123,device=42}) > 80 " +
"and max(hpcs.compute.load{instance_id=123,device=42}) > 5";
subAlarms = createSubAlarmsForAlarm("111111112222222222233333333334", expression);
final String expression =
"avg(hpcs.compute.cpu{instance_id=123,device=42}, 1) > 5 "
+ "and max(hpcs.compute.mem{instance_id=123,device=42}) > 80 "
+ "and max(hpcs.compute.load{instance_id=123,device=42}) > 5";
subAlarms = createSubAlarmsForAlarm("111111112222222222233333333334", expression);
duplicateMetricSubAlarms = createSubAlarmsForAlarm(UUID.randomUUID().toString(),
"max(hpcs.compute.load{instance_id=123,device=42}) > 8");
subAlarms.addAll(duplicateMetricSubAlarms);
duplicateMetricSubAlarms =
createSubAlarmsForAlarm(UUID.randomUUID().toString(),
"max(hpcs.compute.load{instance_id=123,device=42}) > 8");
subAlarms.addAll(duplicateMetricSubAlarms);
}
private List<SubAlarm> createSubAlarmsForAlarm(final String alarmId, final String expression) {
final AlarmExpression alarmExpression = new AlarmExpression(expression);
final List<AlarmSubExpression> subExpressions = alarmExpression.getSubExpressions();
final List<SubAlarm> result = new ArrayList<SubAlarm>(subExpressions.size());
for (int i = 0; i < subExpressions.size(); i++) {
final SubAlarm subAlarm =
new SubAlarm(UUID.randomUUID().toString(), alarmId, subExpressions.get(i));
result.add(subAlarm);
}
return result;
}
private MockMetricFilteringBolt createBolt(
List<SubAlarmMetricDefinition> initialMetricDefinitions, final OutputCollector collector,
boolean willEmit) {
final MetricDefinitionDAO dao = mock(MetricDefinitionDAO.class);
when(dao.findForAlarms()).thenReturn(initialMetricDefinitions);
MockMetricFilteringBolt bolt = new MockMetricFilteringBolt(dao);
final Map<String, String> config = new HashMap<>();
final TopologyContext context = mock(TopologyContext.class);
bolt.prepare(config, context, collector);
if (willEmit) {
// Validate the prepare emits the initial Metric Definitions
for (final SubAlarmMetricDefinition metricDefinition : initialMetricDefinitions) {
verify(collector, times(1)).emit(
new Values(metricDefinition.getMetricDefinitionAndTenantId(), null));
}
}
return bolt;
}
public void testLagging() {
final OutputCollector collector = mock(OutputCollector.class);
final MockMetricFilteringBolt bolt =
createBolt(new ArrayList<SubAlarmMetricDefinition>(0), collector, true);
final long prepareTime = bolt.getCurrentTime();
final MetricDefinition metricDefinition =
subAlarms.get(0).getExpression().getMetricDefinition();
final long oldestTimestamp = prepareTime - MetricFilteringBolt.LAG_MESSAGE_PERIOD_DEFAULT;
final Tuple lateMetricTuple =
createMetricTuple(metricDefinition, oldestTimestamp, new Metric(metricDefinition,
oldestTimestamp, 42.0));
bolt.execute(lateMetricTuple);
verify(collector, times(1)).ack(lateMetricTuple);
bolt.setCurrentTime(prepareTime + MetricFilteringBolt.LAG_MESSAGE_PERIOD_DEFAULT);
final Tuple lateMetricTuple2 =
createMetricTuple(metricDefinition, prepareTime, new Metric(metricDefinition, prepareTime,
42.0));
bolt.execute(lateMetricTuple2);
verify(collector, times(1)).ack(lateMetricTuple2);
verify(collector, times(1)).emit(MetricAggregationBolt.METRIC_AGGREGATION_CONTROL_STREAM,
new Values(MetricAggregationBolt.METRICS_BEHIND));
bolt.setCurrentTime(prepareTime + 2 * MetricFilteringBolt.LAG_MESSAGE_PERIOD_DEFAULT);
long caughtUpTimestamp = bolt.getCurrentTime() - MetricFilteringBolt.MIN_LAG_VALUE_DEFAULT;
final Tuple metricTuple =
createMetricTuple(metricDefinition, caughtUpTimestamp, new Metric(metricDefinition,
caughtUpTimestamp, 42.0));
bolt.execute(metricTuple);
// Metrics are caught up so there should not be another METRICS_BEHIND message
verify(collector, times(1)).ack(metricTuple);
verify(collector, times(1)).emit(MetricAggregationBolt.METRIC_AGGREGATION_CONTROL_STREAM,
new Values(MetricAggregationBolt.METRICS_BEHIND));
}
public void testLaggingTooLong() {
final OutputCollector collector = mock(OutputCollector.class);
final MockMetricFilteringBolt bolt =
createBolt(new ArrayList<SubAlarmMetricDefinition>(0), collector, true);
long prepareTime = bolt.getCurrentTime();
final MetricDefinition metricDefinition =
subAlarms.get(0).getExpression().getMetricDefinition();
// Fake sending metrics for MetricFilteringBolt.MAX_LAG_MESSAGES_DEFAULT *
// MetricFilteringBolt.LAG_MESSAGE_PERIOD_DEFAULT seconds
boolean first = true;
// Need to send MetricFilteringBolt.MAX_LAG_MESSAGES_DEFAULT + 1 metrics because the lag message
// is not
// output on the first one.
for (int i = 0; i < MetricFilteringBolt.MAX_LAG_MESSAGES_DEFAULT + 1; i++) {
final Tuple lateMetricTuple =
createMetricTuple(metricDefinition, prepareTime, new Metric(metricDefinition,
prepareTime, 42.0));
bolt.setCurrentTime(prepareTime + MetricFilteringBolt.LAG_MESSAGE_PERIOD_DEFAULT);
bolt.execute(lateMetricTuple);
verify(collector, times(1)).ack(lateMetricTuple);
if (!first) {
verify(collector, times(i)).emit(MetricAggregationBolt.METRIC_AGGREGATION_CONTROL_STREAM,
new Values(MetricAggregationBolt.METRICS_BEHIND));
}
first = false;
prepareTime = bolt.getCurrentTime();
}
// One more
long timestamp = bolt.getCurrentTime() - MetricFilteringBolt.LAG_MESSAGE_PERIOD_DEFAULT;
final Tuple metricTuple =
createMetricTuple(metricDefinition, timestamp,
new Metric(metricDefinition, timestamp, 42.0));
bolt.execute(metricTuple);
verify(collector, times(1)).ack(metricTuple);
// Won't be any more of these
verify(collector, times(MetricFilteringBolt.MAX_LAG_MESSAGES_DEFAULT)).emit(
MetricAggregationBolt.METRIC_AGGREGATION_CONTROL_STREAM,
new Values(MetricAggregationBolt.METRICS_BEHIND));
}
private static class MockMetricFilteringBolt extends MetricFilteringBolt {
private static final long serialVersionUID = 1L;
private long currentTimeMillis = System.currentTimeMillis();
public MockMetricFilteringBolt(MetricDefinitionDAO metricDefDAO) {
super(metricDefDAO);
}
private List<SubAlarm> createSubAlarmsForAlarm(final String alarmId,
final String expression) {
final AlarmExpression alarmExpression = new AlarmExpression(expression);
final List<AlarmSubExpression> subExpressions = alarmExpression.getSubExpressions();
final List<SubAlarm> result = new ArrayList<SubAlarm>(subExpressions.size());
for (int i = 0; i < subExpressions.size(); i++) {
final SubAlarm subAlarm = new SubAlarm(UUID.randomUUID().toString(), alarmId, subExpressions.get(i));
result.add(subAlarm);
}
return result;
@Override
protected long getCurrentTime() {
return currentTimeMillis;
}
private MockMetricFilteringBolt createBolt(List<SubAlarmMetricDefinition> initialMetricDefinitions,
final OutputCollector collector, boolean willEmit) {
final MetricDefinitionDAO dao = mock(MetricDefinitionDAO.class);
when(dao.findForAlarms()).thenReturn(initialMetricDefinitions);
MockMetricFilteringBolt bolt = new MockMetricFilteringBolt(dao);
final Map<String, String> config = new HashMap<>();
final TopologyContext context = mock(TopologyContext.class);
bolt.prepare(config, context, collector);
if (willEmit) {
// Validate the prepare emits the initial Metric Definitions
for (final SubAlarmMetricDefinition metricDefinition : initialMetricDefinitions) {
verify(collector, times(1)).emit(new Values(metricDefinition.getMetricDefinitionAndTenantId(), null));
}
}
return bolt;
public void setCurrentTime(final long currentTimeMillis) {
this.currentTimeMillis = currentTimeMillis;
}
}
public void testLagging() {
final OutputCollector collector = mock(OutputCollector.class);
public void testNoInitial() {
MetricFilteringBolt.clearMetricDefinitions();
final OutputCollector collector1 = mock(OutputCollector.class);
final MockMetricFilteringBolt bolt = createBolt(new ArrayList<SubAlarmMetricDefinition>(0), collector, true);
final MetricFilteringBolt bolt1 =
createBolt(new ArrayList<SubAlarmMetricDefinition>(0), collector1, true);
final long prepareTime = bolt.getCurrentTime();
final MetricDefinition metricDefinition = subAlarms.get(0).getExpression().getMetricDefinition();
final long oldestTimestamp = prepareTime - MetricFilteringBolt.LAG_MESSAGE_PERIOD_DEFAULT;
final Tuple lateMetricTuple = createMetricTuple(metricDefinition, oldestTimestamp, new Metric(metricDefinition, oldestTimestamp, 42.0));
bolt.execute(lateMetricTuple);
verify(collector, times(1)).ack(lateMetricTuple);
bolt.setCurrentTime(prepareTime + MetricFilteringBolt.LAG_MESSAGE_PERIOD_DEFAULT);
final Tuple lateMetricTuple2 = createMetricTuple(metricDefinition, prepareTime, new Metric(metricDefinition, prepareTime, 42.0));
bolt.execute(lateMetricTuple2);
verify(collector, times(1)).ack(lateMetricTuple2);
verify(collector, times(1)).emit(MetricAggregationBolt.METRIC_AGGREGATION_CONTROL_STREAM,
new Values(MetricAggregationBolt.METRICS_BEHIND));
bolt.setCurrentTime(prepareTime + 2 * MetricFilteringBolt.LAG_MESSAGE_PERIOD_DEFAULT);
long caughtUpTimestamp = bolt.getCurrentTime() - MetricFilteringBolt.MIN_LAG_VALUE_DEFAULT;
final Tuple metricTuple = createMetricTuple(metricDefinition, caughtUpTimestamp, new Metric(metricDefinition, caughtUpTimestamp, 42.0));
bolt.execute(metricTuple);
// Metrics are caught up so there should not be another METRICS_BEHIND message
verify(collector, times(1)).ack(metricTuple);
verify(collector, times(1)).emit(MetricAggregationBolt.METRIC_AGGREGATION_CONTROL_STREAM,
new Values(MetricAggregationBolt.METRICS_BEHIND));
final OutputCollector collector2 = mock(OutputCollector.class);
final MetricFilteringBolt bolt2 =
createBolt(new ArrayList<SubAlarmMetricDefinition>(0), collector2, false);
// First ensure metrics don't pass the filter
verifyMetricFiltered(collector1, bolt1);
verifyMetricFiltered(collector2, bolt2);
sendMetricCreation(collector1, bolt1);
sendMetricCreation(collector2, bolt2);
testDeleteSubAlarms(bolt1, collector1, bolt2, collector2);
}
private void sendMetricCreation(final OutputCollector collector1, final MetricFilteringBolt bolt1) {
for (final SubAlarm subAlarm : subAlarms) {
final Tuple tuple = createMetricDefinitionTuple(subAlarm);
bolt1.execute(tuple);
verify(collector1, times(1)).ack(tuple);
}
}
public void testLaggingTooLong() {
final OutputCollector collector = mock(OutputCollector.class);
private void verifyMetricFiltered(final OutputCollector collector1,
final MetricFilteringBolt bolt1) {
sendMetricsAndVerify(collector1, bolt1, never());
}
final MockMetricFilteringBolt bolt = createBolt(new ArrayList<SubAlarmMetricDefinition>(0), collector, true);
private void verifyMetricPassed(final OutputCollector collector1, final MetricFilteringBolt bolt1) {
sendMetricsAndVerify(collector1, bolt1, times(1));
}
long prepareTime = bolt.getCurrentTime();
final MetricDefinition metricDefinition = subAlarms.get(0).getExpression().getMetricDefinition();
// Fake sending metrics for MetricFilteringBolt.MAX_LAG_MESSAGES_DEFAULT * MetricFilteringBolt.LAG_MESSAGE_PERIOD_DEFAULT seconds
boolean first = true;
// Need to send MetricFilteringBolt.MAX_LAG_MESSAGES_DEFAULT + 1 metrics because the lag message is not
// output on the first one.
for (int i = 0; i < MetricFilteringBolt.MAX_LAG_MESSAGES_DEFAULT + 1; i++) {
final Tuple lateMetricTuple = createMetricTuple(metricDefinition, prepareTime, new Metric(metricDefinition, prepareTime, 42.0));
bolt.setCurrentTime(prepareTime + MetricFilteringBolt.LAG_MESSAGE_PERIOD_DEFAULT);
bolt.execute(lateMetricTuple);
verify(collector, times(1)).ack(lateMetricTuple);
if (!first) {
verify(collector, times(i)).emit(MetricAggregationBolt.METRIC_AGGREGATION_CONTROL_STREAM,
new Values(MetricAggregationBolt.METRICS_BEHIND));
}
first = false;
prepareTime = bolt.getCurrentTime();
}
// One more
long timestamp = bolt.getCurrentTime() - MetricFilteringBolt.LAG_MESSAGE_PERIOD_DEFAULT;
final Tuple metricTuple = createMetricTuple(metricDefinition, timestamp, new Metric(metricDefinition, timestamp, 42.0));
bolt.execute(metricTuple);
verify(collector, times(1)).ack(metricTuple);
// Won't be any more of these
verify(collector, times(MetricFilteringBolt.MAX_LAG_MESSAGES_DEFAULT)).emit(MetricAggregationBolt.METRIC_AGGREGATION_CONTROL_STREAM,
new Values(MetricAggregationBolt.METRICS_BEHIND));
private void sendMetricsAndVerify(final OutputCollector collector1,
final MetricFilteringBolt bolt1, VerificationMode howMany) {
for (final SubAlarm subAlarm : subAlarms) {
// First do a MetricDefinition that is an exact match
final MetricDefinition metricDefinition = subAlarm.getExpression().getMetricDefinition();
final Tuple exactTuple =
createMetricTuple(metricDefinition, metricTimestamp++, new Metric(metricDefinition,
metricTimestamp, 42.0));
bolt1.execute(exactTuple);
verify(collector1, times(1)).ack(exactTuple);
verify(collector1, howMany).emit(new Values(exactTuple.getValue(0), exactTuple.getValue(2)));
// Now do a MetricDefinition with an extra dimension that should still match the SubAlarm
final Map<String, String> extraDimensions = new HashMap<>(metricDefinition.dimensions);
extraDimensions.put("group", "group_a");
final MetricDefinition inexactMetricDef =
new MetricDefinition(metricDefinition.name, extraDimensions);
Metric inexactMetric = new Metric(inexactMetricDef, metricTimestamp, 42.0);
final Tuple inexactTuple =
createMetricTuple(metricDefinition, metricTimestamp++, inexactMetric);
bolt1.execute(inexactTuple);
verify(collector1, times(1)).ack(inexactTuple);
// We want the MetricDefinitionAndTenantId from the exact tuple, but the inexactMetric
verify(collector1, howMany).emit(new Values(exactTuple.getValue(0), inexactMetric));
}
}
private static class MockMetricFilteringBolt extends MetricFilteringBolt {
private static final long serialVersionUID = 1L;
private long currentTimeMillis = System.currentTimeMillis();
public MockMetricFilteringBolt(MetricDefinitionDAO metricDefDAO) {
super(metricDefDAO);
}
@Override
protected long getCurrentTime() {
return currentTimeMillis;
}
public void setCurrentTime(final long currentTimeMillis) {
this.currentTimeMillis = currentTimeMillis;
}
public void testAllInitial() {
MetricFilteringBolt.clearMetricDefinitions();
final List<SubAlarmMetricDefinition> initialMetricDefinitions =
new ArrayList<>(subAlarms.size());
for (final SubAlarm subAlarm : subAlarms) {
initialMetricDefinitions.add(new SubAlarmMetricDefinition(subAlarm.getId(),
new MetricDefinitionAndTenantId(subAlarm.getExpression().getMetricDefinition(),
TEST_TENANT_ID)));
}
final OutputCollector collector1 = mock(OutputCollector.class);
public void testNoInitial() {
MetricFilteringBolt.clearMetricDefinitions();
final OutputCollector collector1 = mock(OutputCollector.class);
final MetricFilteringBolt bolt1 = createBolt(initialMetricDefinitions, collector1, true);
final MetricFilteringBolt bolt1 = createBolt(new ArrayList<SubAlarmMetricDefinition>(0), collector1, true);
final OutputCollector collector2 = mock(OutputCollector.class);
final OutputCollector collector2 = mock(OutputCollector.class);
final MetricFilteringBolt bolt2 = createBolt(initialMetricDefinitions, collector2, false);
final MetricFilteringBolt bolt2 = createBolt(new ArrayList<SubAlarmMetricDefinition>(0), collector2, false);
testDeleteSubAlarms(bolt1, collector1, bolt2, collector2);
}
// First ensure metrics don't pass the filter
verifyMetricFiltered(collector1, bolt1);
verifyMetricFiltered(collector2, bolt2);
private void testDeleteSubAlarms(MetricFilteringBolt bolt1, OutputCollector collector1,
MetricFilteringBolt bolt2, OutputCollector collector2) {
// Now ensure metrics pass the filter
verifyMetricPassed(collector1, bolt1);
verifyMetricPassed(collector2, bolt2);
sendMetricCreation(collector1, bolt1);
sendMetricCreation(collector2, bolt2);
// Now delete the SubAlarm that duplicated a MetricDefinition
deleteSubAlarms(bolt1, collector1, duplicateMetricSubAlarms);
deleteSubAlarms(bolt2, collector2, duplicateMetricSubAlarms);
testDeleteSubAlarms(bolt1, collector1, bolt2, collector2);
// Ensure metrics still pass the filter
verifyMetricPassed(collector1, bolt1);
verifyMetricPassed(collector2, bolt2);
deleteSubAlarms(bolt1, collector1, subAlarms);
// All MetricDefinitions should be deleted
assertEquals(MetricFilteringBolt.sizeMetricDefinitions(), 0);
deleteSubAlarms(bolt2, collector2, subAlarms);
verifyMetricFiltered(collector1, bolt1);
verifyMetricFiltered(collector2, bolt2);
}
private void deleteSubAlarms(MetricFilteringBolt bolt, OutputCollector collector,
final List<SubAlarm> otherSubAlarms) {
for (final SubAlarm subAlarm : otherSubAlarms) {
final Tuple tuple = createMetricDefinitionDeletionTuple(subAlarm);
bolt.execute(tuple);
verify(collector, times(1)).ack(tuple);
}
}
private void sendMetricCreation(final OutputCollector collector1,
final MetricFilteringBolt bolt1) {
for (final SubAlarm subAlarm : subAlarms) {
final Tuple tuple = createMetricDefinitionTuple(subAlarm);
bolt1.execute(tuple);
verify(collector1, times(1)).ack(tuple);
}
}
private Tuple createMetricDefinitionTuple(final SubAlarm subAlarm) {
final MkTupleParam tupleParam = new MkTupleParam();
tupleParam.setFields(EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_FIELDS);
tupleParam.setStream(EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_ID);
final Tuple tuple =
Testing.testTuple(Arrays.asList(EventProcessingBolt.CREATED,
new MetricDefinitionAndTenantId(subAlarm.getExpression().getMetricDefinition(),
TEST_TENANT_ID), subAlarm), tupleParam);
return tuple;
}
private void verifyMetricFiltered(final OutputCollector collector1,
final MetricFilteringBolt bolt1) {
sendMetricsAndVerify(collector1, bolt1, never());
}
private Tuple createMetricDefinitionDeletionTuple(final SubAlarm subAlarm) {
final MkTupleParam tupleParam = new MkTupleParam();
tupleParam.setFields(EventProcessingBolt.METRIC_ALARM_EVENT_STREAM_FIELDS);
tupleParam.setStream(EventProcessingBolt.METRIC_ALARM_EVENT_STREAM_ID);
final Tuple tuple =
Testing.testTuple(Arrays.asList(EventProcessingBolt.DELETED,
new MetricDefinitionAndTenantId(subAlarm.getExpression().getMetricDefinition(),
TEST_TENANT_ID), subAlarm.getId()), tupleParam);
private void verifyMetricPassed(final OutputCollector collector1,
final MetricFilteringBolt bolt1) {
sendMetricsAndVerify(collector1, bolt1, times(1));
}
return tuple;
}
private void sendMetricsAndVerify(final OutputCollector collector1,
final MetricFilteringBolt bolt1, VerificationMode howMany) {
for (final SubAlarm subAlarm : subAlarms) {
// First do a MetricDefinition that is an exact match
final MetricDefinition metricDefinition = subAlarm.getExpression().getMetricDefinition();
final Tuple exactTuple = createMetricTuple(metricDefinition, metricTimestamp++, new Metric(metricDefinition, metricTimestamp, 42.0));
bolt1.execute(exactTuple);
verify(collector1, times(1)).ack(exactTuple);
verify(collector1, howMany).emit(new Values(exactTuple.getValue(0), exactTuple.getValue(2)));
// Now do a MetricDefinition with an extra dimension that should still match the SubAlarm
final Map<String, String> extraDimensions = new HashMap<>(metricDefinition.dimensions);
extraDimensions.put("group", "group_a");
final MetricDefinition inexactMetricDef = new MetricDefinition(metricDefinition.name, extraDimensions);
Metric inexactMetric = new Metric(inexactMetricDef, metricTimestamp, 42.0);
final Tuple inexactTuple = createMetricTuple(metricDefinition, metricTimestamp++, inexactMetric);
bolt1.execute(inexactTuple);
verify(collector1, times(1)).ack(inexactTuple);
// We want the MetricDefinitionAndTenantId from the exact tuple, but the inexactMetric
verify(collector1, howMany).emit(new Values(exactTuple.getValue(0), inexactMetric));
}
}
public void testAllInitial() {
MetricFilteringBolt.clearMetricDefinitions();
final List<SubAlarmMetricDefinition> initialMetricDefinitions = new ArrayList<>(subAlarms.size());
for (final SubAlarm subAlarm : subAlarms) {
initialMetricDefinitions.add(new SubAlarmMetricDefinition(subAlarm.getId(),
new MetricDefinitionAndTenantId(subAlarm.getExpression().getMetricDefinition(), TEST_TENANT_ID)));
}
final OutputCollector collector1 = mock(OutputCollector.class);
final MetricFilteringBolt bolt1 = createBolt(initialMetricDefinitions, collector1, true);
final OutputCollector collector2 = mock(OutputCollector.class);
final MetricFilteringBolt bolt2 = createBolt(initialMetricDefinitions, collector2, false);
testDeleteSubAlarms(bolt1, collector1, bolt2, collector2);
}
private void testDeleteSubAlarms(MetricFilteringBolt bolt1, OutputCollector collector1, MetricFilteringBolt bolt2, OutputCollector collector2) {
// Now ensure metrics pass the filter
verifyMetricPassed(collector1, bolt1);
verifyMetricPassed(collector2, bolt2);
// Now delete the SubAlarm that duplicated a MetricDefinition
deleteSubAlarms(bolt1, collector1, duplicateMetricSubAlarms);
deleteSubAlarms(bolt2, collector2, duplicateMetricSubAlarms);
// Ensure metrics still pass the filter
verifyMetricPassed(collector1, bolt1);
verifyMetricPassed(collector2, bolt2);
deleteSubAlarms(bolt1, collector1, subAlarms);
// All MetricDefinitions should be deleted
assertEquals(MetricFilteringBolt.sizeMetricDefinitions(), 0);
deleteSubAlarms(bolt2, collector2, subAlarms);
verifyMetricFiltered(collector1, bolt1);
verifyMetricFiltered(collector2, bolt2);
}
private void deleteSubAlarms(MetricFilteringBolt bolt, OutputCollector collector, final List<SubAlarm> otherSubAlarms) {
for (final SubAlarm subAlarm : otherSubAlarms) {
final Tuple tuple = createMetricDefinitionDeletionTuple(subAlarm);
bolt.execute(tuple);
verify(collector, times(1)).ack(tuple);
}
}
private Tuple createMetricDefinitionTuple(final SubAlarm subAlarm) {
final MkTupleParam tupleParam = new MkTupleParam();
tupleParam.setFields(EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_FIELDS);
tupleParam.setStream(EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_ID);
final Tuple tuple = Testing.testTuple(Arrays.asList(EventProcessingBolt.CREATED,
new MetricDefinitionAndTenantId(
subAlarm.getExpression().getMetricDefinition(), TEST_TENANT_ID),
subAlarm), tupleParam);
return tuple;
}
private Tuple createMetricDefinitionDeletionTuple(final SubAlarm subAlarm) {
final MkTupleParam tupleParam = new MkTupleParam();
tupleParam.setFields(EventProcessingBolt.METRIC_ALARM_EVENT_STREAM_FIELDS);
tupleParam.setStream(EventProcessingBolt.METRIC_ALARM_EVENT_STREAM_ID);
final Tuple tuple = Testing.testTuple(Arrays.asList(EventProcessingBolt.DELETED,
new MetricDefinitionAndTenantId(
subAlarm.getExpression().getMetricDefinition(), TEST_TENANT_ID),
subAlarm.getId()), tupleParam);
return tuple;
}
private Tuple createMetricTuple(final MetricDefinition metricDefinition,
final long timestamp,
final Metric metric) {
final MkTupleParam tupleParam = new MkTupleParam();
tupleParam.setFields(MetricSpout.FIELDS);
tupleParam.setStream(Streams.DEFAULT_STREAM_ID);
final Tuple tuple = Testing.testTuple(Arrays.asList(
new MetricDefinitionAndTenantId(metricDefinition, TEST_TENANT_ID),
timestamp, metric), tupleParam);
return tuple;
}
private Tuple createMetricTuple(final MetricDefinition metricDefinition, final long timestamp,
final Metric metric) {
final MkTupleParam tupleParam = new MkTupleParam();
tupleParam.setFields(MetricSpout.FIELDS);
tupleParam.setStream(Streams.DEFAULT_STREAM_ID);
final Tuple tuple =
Testing.testTuple(Arrays.asList(new MetricDefinitionAndTenantId(metricDefinition,
TEST_TENANT_ID), timestamp, metric), tupleParam);
return tuple;
}
}

View File

@ -14,6 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.thresholding;
import static org.testng.Assert.assertEquals;
@ -24,39 +25,43 @@ import org.testng.annotations.Test;
@Test
public class PropertyFinderTest {
private static String PROPERTY_NAME = "com.hpcloud.mon.infrastructure.thresholding.Prop";
private static String PROPERTY_NAME = "com.hpcloud.mon.infrastructure.thresholding.Prop";
@BeforeMethod
public void beforeMethod() {
System.clearProperty(PROPERTY_NAME);
}
@BeforeMethod
public void beforeMethod() {
System.clearProperty(PROPERTY_NAME);
}
public void shouldUseNewValue() {
final int expectedValue = 45;
System.setProperty(PROPERTY_NAME, String.valueOf(expectedValue));
assertEquals(expectedValue, PropertyFinder.getIntProperty(PROPERTY_NAME, 30, 0, Integer.MAX_VALUE));
}
public void shouldUseNewValue() {
final int expectedValue = 45;
System.setProperty(PROPERTY_NAME, String.valueOf(expectedValue));
assertEquals(expectedValue,
PropertyFinder.getIntProperty(PROPERTY_NAME, 30, 0, Integer.MAX_VALUE));
}
public void shouldUseDefaultValueBecausePropertyNotSet() {
final int defaultValue = 45;
assertEquals(defaultValue, PropertyFinder.getIntProperty(PROPERTY_NAME, defaultValue, 0, Integer.MAX_VALUE));
}
public void shouldUseDefaultValueBecausePropertyNotSet() {
final int defaultValue = 45;
assertEquals(defaultValue,
PropertyFinder.getIntProperty(PROPERTY_NAME, defaultValue, 0, Integer.MAX_VALUE));
}
public void shouldUseDefaultValueBecausePropertyNotANumber() {
final int defaultValue = 45;
System.setProperty(PROPERTY_NAME, "AAA");
assertEquals(defaultValue, PropertyFinder.getIntProperty(PROPERTY_NAME, defaultValue, 0, Integer.MAX_VALUE));
}
public void shouldUseDefaultValueBecausePropertyNotANumber() {
final int defaultValue = 45;
System.setProperty(PROPERTY_NAME, "AAA");
assertEquals(defaultValue,
PropertyFinder.getIntProperty(PROPERTY_NAME, defaultValue, 0, Integer.MAX_VALUE));
}
public void shouldUseDefaultValueBecausePropertyTooSmall() {
final int defaultValue = 45;
System.setProperty(PROPERTY_NAME, "0");
assertEquals(defaultValue, PropertyFinder.getIntProperty(PROPERTY_NAME, defaultValue, 1, Integer.MAX_VALUE));
}
public void shouldUseDefaultValueBecausePropertyTooSmall() {
final int defaultValue = 45;
System.setProperty(PROPERTY_NAME, "0");
assertEquals(defaultValue,
PropertyFinder.getIntProperty(PROPERTY_NAME, defaultValue, 1, Integer.MAX_VALUE));
}
public void shouldUseDefaultValueBecausePropertyTooLarge() {
final int defaultValue = 45;
System.setProperty(PROPERTY_NAME, "10");
assertEquals(defaultValue, PropertyFinder.getIntProperty(PROPERTY_NAME, defaultValue, 9, 9));
}
public void shouldUseDefaultValueBecausePropertyTooLarge() {
final int defaultValue = 45;
System.setProperty(PROPERTY_NAME, "10");
assertEquals(defaultValue, PropertyFinder.getIntProperty(PROPERTY_NAME, defaultValue, 9, 9));
}
}

View File

@ -14,24 +14,26 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpcloud.mon.infrastructure.thresholding.deserializer;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNull;
import java.util.Collections;
import org.testng.annotations.Test;
import com.hpcloud.mon.common.event.AlarmCreatedEvent;
import com.hpcloud.mon.common.event.AlarmDeletedEvent;
import com.hpcloud.mon.common.event.AlarmUpdatedEvent;
import com.hpcloud.mon.common.model.alarm.AlarmState;
import com.hpcloud.util.Serialization;
import org.testng.annotations.Test;
import java.util.Collections;
@Test
public class EventDeserializerTest {
private static final String ALARM_EXPRESSION = "avg(hpcs.compute{instance_id=5,metric_name=cpu,device=1}, 1) > 5 times 3 OR avg(hpcs.compute{flavor_id=3,metric_name=mem}, 2) < 4 times 3";
private static final String ALARM_EXPRESSION =
"avg(hpcs.compute{instance_id=5,metric_name=cpu,device=1}, 1) > 5 times 3 OR avg(hpcs.compute{flavor_id=3,metric_name=mem}, 2) < 4 times 3";
private static final String ALARM_NAME = "An Alarm";
private static final String ALARM_DESCRIPTION = "An Alarm Description";
private static final String ALARM_ID = "123";
@ -47,8 +49,8 @@ public class EventDeserializerTest {
}
public void shouldDeserializeAlarmUpdatedEvent() {
roundTrip(new AlarmUpdatedEvent(TENANT_ID, ALARM_ID, ALARM_NAME, ALARM_DESCRIPTION, ALARM_EXPRESSION,
AlarmState.OK, AlarmState.OK, false, null, null, null, null));
roundTrip(new AlarmUpdatedEvent(TENANT_ID, ALARM_ID, ALARM_NAME, ALARM_DESCRIPTION,
ALARM_EXPRESSION, AlarmState.OK, AlarmState.OK, false, null, null, null, null));
}
private void roundTrip(Object event) {
@ -57,7 +59,7 @@ public class EventDeserializerTest {
Object expected = Collections.singletonList(Collections.singletonList(event));
assertEquals(deserialized, expected);
}
public void shouldReturnNullOnDeserializeUnknownEvent() {
String unknownEventJson = "{\"alarm-foo-deleted\":{\"tenantId\":\"abc\",\"alarmId\":\"123\"}}";
assertNull(deserializer.deserialize(unknownEventJson.getBytes()));