Added handling for AlarmUpdatedEvent. This is not yet complete because it depends on a decision about Alarm State after API update and maybe some changes in other places depending on decision.

Added more unit tests.

Fixed ThresholdingEngineTest to work and produce meaningful results.
This commit is contained in:
Craig Bryant 2014-03-27 16:01:09 -06:00
parent 79962d4a57
commit 021ad77f21
14 changed files with 643 additions and 68 deletions

View File

@ -14,7 +14,7 @@
</prerequisites>
<properties>
<mon.common.version>1.0.0.16</mon.common.version>
<mon.common.version>1.0.0.22</mon.common.version>
<dropwizard.version>0.7.0-rc1</dropwizard.version>
<skipITs>true</skipITs>

View File

@ -10,9 +10,11 @@ import backtype.storm.tuple.Fields;
import com.google.inject.AbstractModule;
import com.google.inject.Provides;
import com.hpcloud.mon.infrastructure.thresholding.AlarmEventForwarder;
import com.hpcloud.mon.infrastructure.thresholding.AlarmThresholdingBolt;
import com.hpcloud.mon.infrastructure.thresholding.EventProcessingBolt;
import com.hpcloud.mon.infrastructure.thresholding.EventSpout;
import com.hpcloud.mon.infrastructure.thresholding.KafkaAlarmEventForwarder;
import com.hpcloud.mon.infrastructure.thresholding.MetricAggregationBolt;
import com.hpcloud.mon.infrastructure.thresholding.MetricFilteringBolt;
import com.hpcloud.mon.infrastructure.thresholding.MetricSpout;
@ -29,17 +31,19 @@ public class TopologyModule extends AbstractModule {
private Config stormConfig;
private IRichSpout metricSpout;
private IRichSpout eventSpout;
private AlarmEventForwarder alarmEventForwarder;
public TopologyModule(ThresholdingConfiguration config) {
this.config = config;
}
public TopologyModule(ThresholdingConfiguration threshConfig, Config stormConfig,
IRichSpout metricSpout, IRichSpout eventSpout) {
IRichSpout metricSpout, IRichSpout eventSpout, AlarmEventForwarder alarmEventForwarder) {
this(threshConfig);
this.stormConfig = stormConfig;
this.metricSpout = metricSpout;
this.eventSpout = eventSpout;
this.alarmEventForwarder = alarmEventForwarder;
}
@Override
@ -59,6 +63,11 @@ public class TopologyModule extends AbstractModule {
return stormConfig;
}
@Provides
AlarmEventForwarder alarmEventForwarder() {
return alarmEventForwarder == null ? new KafkaAlarmEventForwarder(config.kafkaProducerConfig) : alarmEventForwarder;
}
@Provides
@Named("metrics")
IRichSpout metricSpout() {
@ -110,7 +119,7 @@ public class TopologyModule extends AbstractModule {
// Aggregation / Event -> Thresholding
builder.setBolt("thresholding-bolt",
new AlarmThresholdingBolt(config.database, config.kafkaProducerConfig),
new AlarmThresholdingBolt(config.database),
config.thresholdingBoltThreads)
.fieldsGrouping("aggregation-bolt", new Fields("alarmId"))
.fieldsGrouping("event-bolt", EventProcessingBolt.ALARM_EVENT_STREAM_ID,

View File

@ -1,13 +1,20 @@
package com.hpcloud.mon.infrastructure.thresholding;
import java.util.HashMap;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.base.BaseRichBolt;
import backtype.storm.tuple.Tuple;
import com.hpcloud.configuration.KafkaProducerConfiguration;
import com.hpcloud.mon.ThresholdingConfiguration;
import com.hpcloud.mon.common.event.AlarmDeletedEvent;
import com.hpcloud.mon.common.event.AlarmUpdatedEvent;
import com.hpcloud.mon.common.model.alarm.AlarmState;
import com.hpcloud.mon.domain.model.Alarm;
import com.hpcloud.mon.domain.model.AlarmStateTransitionEvent;
@ -18,11 +25,6 @@ import com.hpcloud.streaming.storm.Logging;
import com.hpcloud.streaming.storm.Streams;
import com.hpcloud.util.Injector;
import com.hpcloud.util.Serialization;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.Map;
/**
* Determines whether an alarm threshold has been exceeded.
@ -42,7 +44,6 @@ public class AlarmThresholdingBolt extends BaseRichBolt {
private transient Logger LOG;
private DataSourceFactory dbConfig;
private KafkaProducerConfiguration kafkaConfig;
private final Map<String, Alarm> alarms = new HashMap<String, Alarm>();
private String alertExchange;
private String alertRoutingKey;
@ -50,9 +51,8 @@ public class AlarmThresholdingBolt extends BaseRichBolt {
private transient AlarmEventForwarder alarmEventForwarder;
private OutputCollector collector;
public AlarmThresholdingBolt(DataSourceFactory dbConfig, KafkaProducerConfiguration rabbitConfig) {
public AlarmThresholdingBolt(DataSourceFactory dbConfig) {
this.dbConfig = dbConfig;
this.kafkaConfig = rabbitConfig;
}
public AlarmThresholdingBolt(final AlarmDAO alarmDAO,
@ -84,7 +84,9 @@ public class AlarmThresholdingBolt extends BaseRichBolt {
if (AlarmDeletedEvent.class.getSimpleName().equals(eventType))
handleAlarmDeleted(alarmId);
}
else if (AlarmUpdatedEvent.class.getSimpleName().equals(eventType))
handleAlarmUpdated(alarmId);
}
} catch (Exception e) {
LOG.error("Error processing tuple {}", tuple, e);
} finally {
@ -105,8 +107,9 @@ public class AlarmThresholdingBolt extends BaseRichBolt {
Injector.registerIfNotBound(AlarmDAO.class, new PersistenceModule(dbConfig));
alarmDAO = Injector.getInstance(AlarmDAO.class);
}
if (alarmEventForwarder == null)
alarmEventForwarder = new KafkaAlarmEventForwarder(kafkaConfig);
if (alarmEventForwarder == null) {
alarmEventForwarder = Injector.getInstance(AlarmEventForwarder.class);
}
}
void evaluateThreshold(Alarm alarm, SubAlarm subAlarm) {
@ -115,17 +118,22 @@ public class AlarmThresholdingBolt extends BaseRichBolt {
AlarmState initialState = alarm.getState();
if (alarm.evaluate()) {
alarmDAO.updateState(alarm.getId(), alarm.getState());
changeAlarmState(alarm, initialState, alarm.getStateChangeReason());
}
}
LOG.debug("Alarm {} transitioned from {} to {}", alarm, initialState, alarm.getState());
AlarmStateTransitionEvent event = new AlarmStateTransitionEvent(alarm.getTenantId(),
alarm.getId(), alarm.getName(), initialState, alarm.getState(),
alarm.getStateChangeReason(), getTimestamp());
try {
alarmEventForwarder.send(alertExchange, alertRoutingKey, Serialization.toJson(event));
} catch (Exception ignore) {
LOG.debug("Failure sending alarm", ignore);
}
private void changeAlarmState(Alarm alarm, AlarmState initialState,
String stateChangeReason) {
alarmDAO.updateState(alarm.getId(), alarm.getState());
LOG.debug("Alarm {} transitioned from {} to {}", alarm, initialState, alarm.getState());
AlarmStateTransitionEvent event = new AlarmStateTransitionEvent(alarm.getTenantId(),
alarm.getId(), alarm.getName(), initialState, alarm.getState(),
stateChangeReason, getTimestamp());
try {
alarmEventForwarder.send(alertExchange, alertRoutingKey, Serialization.toJson(event));
} catch (Exception ignore) {
LOG.debug("Failure sending alarm", ignore);
}
}
@ -138,6 +146,25 @@ public class AlarmThresholdingBolt extends BaseRichBolt {
alarms.remove(alarmId);
}
void handleAlarmUpdated(String alarmId) {
// Just flush the Alarm from our cache so it gets read fresh from the database
// when a sub alarm is received
LOG.debug("Received AlarmUpdatedEvent for alarm id {}", alarmId);
alarms.remove(alarmId);
final Alarm alarm = alarmDAO.findById(alarmId);
if (alarm == null)
LOG.error("Failed to locate alarm for id {}", alarmId);
else {
// TODO - Should the API be doing this? If so, does it also do the kafka event?
if (alarm.getState() != AlarmState.UNDETERMINED) {
final AlarmState initialState = alarm.getState();
alarm.setState(AlarmState.UNDETERMINED);
changeAlarmState(alarm, initialState, "Alarm updated by User");
}
alarms.put(alarmId, alarm);
}
}
String buildStateChangeReason() {
return null;
}

View File

@ -15,6 +15,7 @@ import backtype.storm.tuple.Values;
import com.hpcloud.mon.common.event.AlarmCreatedEvent;
import com.hpcloud.mon.common.event.AlarmDeletedEvent;
import com.hpcloud.mon.common.event.AlarmUpdatedEvent;
import com.hpcloud.mon.common.model.alarm.AlarmSubExpression;
import com.hpcloud.mon.common.model.metric.MetricDefinition;
import com.hpcloud.mon.domain.model.SubAlarm;
@ -65,6 +66,8 @@ public class EventProcessingBolt extends BaseRichBolt {
handle((AlarmCreatedEvent) event);
else if (event instanceof AlarmDeletedEvent)
handle((AlarmDeletedEvent) event);
else if (event instanceof AlarmUpdatedEvent)
handle((AlarmUpdatedEvent) event);
} catch (Exception e) {
LOG.error("Error processing tuple {}", tuple, e);
} finally {
@ -83,19 +86,37 @@ public class EventProcessingBolt extends BaseRichBolt {
void handle(AlarmCreatedEvent event) {
String eventType = event.getClass().getSimpleName();
for (Map.Entry<String, AlarmSubExpression> subExpressionEntry : event.alarmSubExpressions.entrySet()) {
MetricDefinition metricDef = subExpressionEntry.getValue().getMetricDefinition();
collector.emit(METRIC_SUB_ALARM_EVENT_STREAM_ID, new Values(eventType, metricDef,
new SubAlarm(subExpressionEntry.getKey(), event.alarmId, subExpressionEntry.getValue())));
sendAddSubAlarm(eventType, event.alarmId, subExpressionEntry.getKey(), subExpressionEntry.getValue());
}
}
private void sendAddSubAlarm(String eventType, String alarmId, String subAlarmId, AlarmSubExpression alarmSubExpression) {
MetricDefinition metricDef = alarmSubExpression.getMetricDefinition();
collector.emit(METRIC_SUB_ALARM_EVENT_STREAM_ID, new Values(eventType, metricDef,
new SubAlarm(subAlarmId, alarmId, alarmSubExpression)));
}
void handle(AlarmDeletedEvent event) {
String eventType = event.getClass().getSimpleName();
for (Map.Entry<String, MetricDefinition> entry : event.subAlarmMetricDefinitions.entrySet()) {
MetricDefinition metricDef = entry.getValue();
collector.emit(METRIC_ALARM_EVENT_STREAM_ID, new Values(eventType, metricDef, entry.getKey()));
sendDeletedSubAlarm(eventType, entry.getKey(), entry.getValue());
}
collector.emit(ALARM_EVENT_STREAM_ID, new Values(eventType, event.alarmId));
}
private void sendDeletedSubAlarm(String eventType, String subAlarmId, MetricDefinition metricDef) {
collector.emit(METRIC_ALARM_EVENT_STREAM_ID, new Values(eventType, metricDef, subAlarmId));
}
void handle(AlarmUpdatedEvent event) {
String eventType = event.getClass().getSimpleName();
for (Map.Entry<String, AlarmSubExpression> entry : event.oldAlarmSubExpressions.entrySet()) {
sendDeletedSubAlarm(eventType, entry.getKey(), entry.getValue().getMetricDefinition());
}
for (Map.Entry<String, AlarmSubExpression> entry : event.newAlarmSubExpressions.entrySet()) {
sendAddSubAlarm(eventType, event.alarmId, entry.getKey(), entry.getValue());
}
collector.emit(ALARM_EVENT_STREAM_ID, new Values(eventType, event.alarmId));
}
}

View File

@ -20,6 +20,7 @@ import backtype.storm.tuple.Values;
import com.hpcloud.mon.common.event.AlarmCreatedEvent;
import com.hpcloud.mon.common.event.AlarmDeletedEvent;
import com.hpcloud.mon.common.event.AlarmUpdatedEvent;
import com.hpcloud.mon.common.model.metric.Metric;
import com.hpcloud.mon.common.model.metric.MetricDefinition;
import com.hpcloud.mon.domain.model.SubAlarm;
@ -95,11 +96,13 @@ public class MetricAggregationBolt extends BaseRichBolt {
if (EventProcessingBolt.METRIC_ALARM_EVENT_STREAM_ID.equals(tuple.getSourceStreamId())) {
String subAlarmId = tuple.getString(2);
if (AlarmDeletedEvent.class.getSimpleName().equals(eventType))
if (AlarmDeletedEvent.class.getSimpleName().equals(eventType) ||
AlarmUpdatedEvent.class.getSimpleName().equals(eventType))
handleAlarmDeleted(metricDefinition, subAlarmId);
} else if (EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_ID.equals(tuple.getSourceStreamId())) {
SubAlarm subAlarm = (SubAlarm) tuple.getValue(2);
if (AlarmCreatedEvent.class.getSimpleName().equals(eventType))
if (AlarmCreatedEvent.class.getSimpleName().equals(eventType) ||
AlarmUpdatedEvent.class.getSimpleName().equals(eventType))
handleAlarmCreated(metricDefinition, subAlarm);
}
}

View File

@ -16,6 +16,7 @@ import backtype.storm.tuple.Values;
import com.hpcloud.mon.common.event.AlarmCreatedEvent;
import com.hpcloud.mon.common.event.AlarmDeletedEvent;
import com.hpcloud.mon.common.event.AlarmUpdatedEvent;
import com.hpcloud.mon.common.model.metric.MetricDefinition;
import com.hpcloud.mon.domain.service.MetricDefinitionDAO;
import com.hpcloud.mon.domain.service.SubAlarmDAO;
@ -45,7 +46,7 @@ public class MetricFilteringBolt extends BaseRichBolt {
private static final Object SENTINAL = new Object();
private transient Logger LOG;
private final DataSourceFactory dbConfig;
private DataSourceFactory dbConfig;
private transient MetricDefinitionDAO metricDefDAO;
private OutputCollector collector;
@ -53,6 +54,10 @@ public class MetricFilteringBolt extends BaseRichBolt {
this.dbConfig = dbConfig;
}
public MetricFilteringBolt(MetricDefinitionDAO metricDefDAO) {
this.metricDefDAO = metricDefDAO;
}
@Override
public void declareOutputFields(OutputFieldsDeclarer declarer) {
declarer.declare(new Fields("metricDefinition", "metric"));
@ -74,10 +79,12 @@ public class MetricFilteringBolt extends BaseRichBolt {
LOG.debug("Received {} for {}", eventType, metricDefinition);
if (EventProcessingBolt.METRIC_ALARM_EVENT_STREAM_ID.equals(tuple.getSourceStreamId())) {
if (AlarmDeletedEvent.class.getSimpleName().equals(eventType))
if (AlarmDeletedEvent.class.getSimpleName().equals(eventType) ||
AlarmUpdatedEvent.class.getSimpleName().equals(eventType))
METRIC_DEFS.remove(metricDefinition);
} else if (EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_ID.equals(tuple.getSourceStreamId())) {
if (AlarmCreatedEvent.class.getSimpleName().equals(eventType))
if (AlarmCreatedEvent.class.getSimpleName().equals(eventType) ||
AlarmUpdatedEvent.class.getSimpleName().equals(eventType))
METRIC_DEFS.put(metricDefinition, SENTINAL);
}
}
@ -113,4 +120,11 @@ public class MetricFilteringBolt extends BaseRichBolt {
}
}
}
/**
* Only use for testing.
*/
void clearMetricDefinitions() {
METRIC_DEFS.clear();
}
}

View File

@ -8,6 +8,7 @@ import backtype.storm.tuple.Fields;
import com.hpcloud.mon.common.event.AlarmCreatedEvent;
import com.hpcloud.mon.common.event.AlarmDeletedEvent;
import com.hpcloud.mon.common.event.AlarmUpdatedEvent;
import com.hpcloud.streaming.storm.TupleDeserializer;
import com.hpcloud.util.Serialization;
@ -27,6 +28,7 @@ public class EventDeserializer implements TupleDeserializer, Serializable {
static {
// Register event types
Serialization.registerTarget(AlarmCreatedEvent.class);
Serialization.registerTarget(AlarmUpdatedEvent.class);
Serialization.registerTarget(AlarmDeletedEvent.class);
}

View File

@ -4,26 +4,32 @@ import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import static org.mockito.Mockito.doAnswer;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.testng.annotations.Test;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertTrue;
import backtype.storm.Config;
import backtype.storm.testing.FeederSpout;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Values;
import com.google.inject.AbstractModule;
import com.hpcloud.configuration.KafkaProducerConfiguration;
import com.hpcloud.mon.common.model.alarm.AlarmExpression;
import com.hpcloud.mon.common.model.alarm.AlarmState;
import com.hpcloud.mon.common.model.metric.Metric;
import com.hpcloud.mon.common.model.metric.MetricDefinition;
import com.hpcloud.mon.domain.model.Alarm;
import com.hpcloud.mon.domain.model.AlarmStateTransitionEvent;
import com.hpcloud.mon.domain.model.SubAlarm;
import com.hpcloud.mon.domain.service.AlarmDAO;
import com.hpcloud.mon.domain.service.MetricDefinitionDAO;
@ -32,6 +38,7 @@ import com.hpcloud.mon.infrastructure.thresholding.AlarmEventForwarder;
import com.hpcloud.mon.infrastructure.thresholding.MetricAggregationBolt;
import com.hpcloud.streaming.storm.TopologyTestCase;
import com.hpcloud.util.Injector;
import com.hpcloud.util.Serialization;
/**
* Simulates a real'ish run of the thresholding engine, using seconds instead of minutes for the
@ -41,6 +48,9 @@ import com.hpcloud.util.Injector;
*/
@Test(groups = "integration")
public class ThresholdingEngineTest extends TopologyTestCase {
private static final String TEST_ALARM_TENANT_ID = "bob";
private static final String TEST_ALARM_ID = "1";
private static final String TEST_ALARM_NAME = "test-alarm";
private FeederSpout metricSpout;
private FeederSpout eventSpout;
private AlarmDAO alarmDAO;
@ -48,11 +58,16 @@ public class ThresholdingEngineTest extends TopologyTestCase {
private MetricDefinition cpuMetricDef;
private MetricDefinition memMetricDef;
private MetricDefinitionDAO metricDefinitionDAO;
private final AlarmEventForwarder alarmEventForwarder;
private AlarmState previousState = AlarmState.UNDETERMINED;
private AlarmState expectedState = AlarmState.ALARM;
private volatile int alarmsSent = 0;
public ThresholdingEngineTest() {
// Fixtures
final AlarmExpression expression = new AlarmExpression(
"avg(hpcs.compute.cpu{id=5}, 3) >= 3 times 2 and avg(hpcs.compute.mem{id=5}, 3) >= 5 times 2");
"max(hpcs.compute.cpu{id=5}) >= 3 or max(hpcs.compute.mem{id=5}) >= 5 times 2");
cpuMetricDef = expression.getSubExpressions().get(0).getMetricDefinition();
memMetricDef = expression.getSubExpressions().get(1).getMetricDefinition();
@ -62,8 +77,8 @@ public class ThresholdingEngineTest extends TopologyTestCase {
when(alarmDAO.findById(anyString())).thenAnswer(new Answer<Alarm>() {
@Override
public Alarm answer(InvocationOnMock invocation) throws Throwable {
return new Alarm("1", "bob", "test-alarm", expression, subAlarmsFor(expression),
AlarmState.OK);
return new Alarm(TEST_ALARM_ID, TEST_ALARM_TENANT_ID, TEST_ALARM_NAME, expression, subAlarmsFor(expression),
AlarmState.UNDETERMINED);
}
});
@ -73,9 +88,9 @@ public class ThresholdingEngineTest extends TopologyTestCase {
public List<SubAlarm> answer(InvocationOnMock invocation) throws Throwable {
MetricDefinition metricDef = (MetricDefinition) invocation.getArguments()[0];
if (metricDef.equals(cpuMetricDef))
return Arrays.asList(new SubAlarm("123", "1", expression.getSubExpressions().get(0)));
return Arrays.asList(new SubAlarm("123", TEST_ALARM_ID, expression.getSubExpressions().get(0)));
else if (metricDef.equals(memMetricDef))
return Arrays.asList(new SubAlarm("456", "1", expression.getSubExpressions().get(1)));
return Arrays.asList(new SubAlarm("456", TEST_ALARM_ID, expression.getSubExpressions().get(1)));
return Collections.emptyList();
}
});
@ -84,8 +99,6 @@ public class ThresholdingEngineTest extends TopologyTestCase {
List<MetricDefinition> metricDefs = Arrays.asList(cpuMetricDef, memMetricDef);
when(metricDefinitionDAO.findForAlarms()).thenReturn(metricDefs);
final AlarmEventForwarder alarmEventForwarder = mock(AlarmEventForwarder.class);
// Bindings
Injector.reset();
Injector.registerModules(new AbstractModule() {
@ -93,38 +106,65 @@ public class ThresholdingEngineTest extends TopologyTestCase {
bind(AlarmDAO.class).toInstance(alarmDAO);
bind(SubAlarmDAO.class).toInstance(subAlarmDAO);
bind(MetricDefinitionDAO.class).toInstance(metricDefinitionDAO);
bind(AlarmEventForwarder.class).toInstance(alarmEventForwarder);
}
});
// Config
ThresholdingConfiguration threshConfig = new ThresholdingConfiguration();
threshConfig.sporadicMetricNamespaces = new HashSet<String>();
Serialization.registerTarget(KafkaProducerConfiguration.class);
threshConfig.kafkaProducerConfig = Serialization.fromJson("{\"KafkaProducerConfiguration\":{\"topic\":\"alarm-state-transitions\",\"metadataBrokerList\":\"192.168.10.10:9092\",\"requestRequiredAcks\":1,\"requestTimeoutMs\":10000,\"producerType\":\"sync\",\"serializerClass\":\"kafka.serializer.StringEncoder\",\"keySerializerClass\":\"\",\"partitionerClass\":\"\",\"compressionCodec\":\"none\",\"compressedTopics\":\"\",\"messageSendMaxRetries\":3,\"retryBackoffMs\":100,\"topicMetadataRefreshIntervalMs\":600000,\"queueBufferingMaxMs\":5000,\"queueBufferingMaxMessages\":10000,\"queueEnqueueTimeoutMs\":-1,\"batchNumMessages\":200,\"sendBufferBytes\":102400,\"clientId\":\"Threshold_Engine\"}}");
Config stormConfig = new Config();
stormConfig.setMaxTaskParallelism(1);
metricSpout = new FeederSpout(new Fields("metricDefinition", "metric"));
eventSpout = new FeederSpout(new Fields("event"));
alarmEventForwarder = mock(AlarmEventForwarder.class);
Injector.registerModules(new TopologyModule(threshConfig, stormConfig,
metricSpout, eventSpout));
metricSpout, eventSpout, alarmEventForwarder));
// Evaluate alarm stats every 1 seconds
System.setProperty(MetricAggregationBolt.TICK_TUPLE_SECONDS_KEY, "1");
}
private List<SubAlarm> subAlarmsFor(AlarmExpression expression) {
SubAlarm subAlarm1 = new SubAlarm("123", "1", expression.getSubExpressions().get(0));
SubAlarm subAlarm2 = new SubAlarm("456", "1", expression.getSubExpressions().get(1));
SubAlarm subAlarm1 = new SubAlarm("123", TEST_ALARM_ID, expression.getSubExpressions().get(0));
SubAlarm subAlarm2 = new SubAlarm("456", TEST_ALARM_ID, expression.getSubExpressions().get(1));
return Arrays.asList(subAlarm1, subAlarm2);
}
public void shouldThreshold() throws Exception {
doAnswer(new Answer<Object>() {
public Object answer(InvocationOnMock invocation) {
final Object[] args = invocation.getArguments();
AlarmStateTransitionEvent event = Serialization.fromJson((String)args[2]);
alarmsSent++;
System.out.printf("Alarm transitioned from %s to %s%n", event.oldState, event.newState);
assertEquals(event.alarmName, TEST_ALARM_NAME);
assertEquals(event.alarmId, TEST_ALARM_ID);
assertEquals(event.tenantId, TEST_ALARM_TENANT_ID);
assertEquals(event.oldState, previousState);
assertEquals(event.newState, expectedState);
previousState = event.newState;
if (event.newState == AlarmState.UNDETERMINED) {
expectedState = AlarmState.ALARM;
}
else if (event.newState == AlarmState.ALARM) {
expectedState = AlarmState.UNDETERMINED;
}
return null;
}
}
)
.when(alarmEventForwarder).send(anyString(), anyString(), anyString());
int waitCount = 0;
int feedCount = 5;
int goodValueCount = 0;
for (int i = 1; i < 40; i++) {
for (int i = 1; i < 40 && alarmsSent == 0; i++) {
if (feedCount > 0) {
System.out.println("Feeding metrics...");
long time = System.currentTimeMillis();
long time = System.currentTimeMillis() / 1000;
metricSpout.feed(new Values(cpuMetricDef, new Metric(cpuMetricDef.name,
cpuMetricDef.dimensions, time, (double) (++goodValueCount == 15 ? 1 : 555))));
metricSpout.feed(new Values(memMetricDef, new Metric(memMetricDef.name,
@ -147,5 +187,15 @@ public class ThresholdingEngineTest extends TopologyTestCase {
e.printStackTrace();
}
}
// Give it some extra time if it needs it for the alarm to come out
for (int i = 0; i < 30 && alarmsSent == 0; i++) {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
assertTrue(alarmsSent > 0, "Not enough alarms");
}
}

View File

@ -103,8 +103,6 @@ public class ThresholdingEngineTest1 extends TopologyTestCase {
List<MetricDefinition> metricDefs = Arrays.asList(cpuMetricDef, memMetricDef, customMetricDef);
when(metricDefinitionDAO.findForAlarms()).thenReturn(metricDefs);
final AlarmEventForwarder alarmEventForwarder = mock(AlarmEventForwarder.class);
// Bindings
Injector.reset();
Injector.registerModules(new AbstractModule() {
@ -112,7 +110,6 @@ public class ThresholdingEngineTest1 extends TopologyTestCase {
bind(AlarmDAO.class).toInstance(alarmDAO);
bind(SubAlarmDAO.class).toInstance(subAlarmDAO);
bind(MetricDefinitionDAO.class).toInstance(metricDefinitionDAO);
bind(AlarmEventForwarder.class).toInstance(alarmEventForwarder);
}
});
@ -123,8 +120,10 @@ public class ThresholdingEngineTest1 extends TopologyTestCase {
metricSpout = new FeederSpout(new Fields("metricDefinition", "metric"));
eventSpout = new FeederSpout(new Fields("event"));
final AlarmEventForwarder alarmEventForwarder = mock(AlarmEventForwarder.class);
Injector.registerModules(new TopologyModule(threshConfig, stormConfig,
metricSpout, eventSpout));
metricSpout, eventSpout, alarmEventForwarder));
// Evaluate alarm stats every 1 seconds
System.setProperty(MetricAggregationBolt.TICK_TUPLE_SECONDS_KEY, "1");

View File

@ -6,6 +6,7 @@ import static org.mockito.Mockito.when;
import static org.mockito.Mockito.times;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@ -14,8 +15,10 @@ import org.testng.annotations.BeforeClass;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import backtype.storm.Testing;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.testing.MkTupleParam;
import backtype.storm.tuple.Tuple;
import com.hpcloud.mon.ThresholdingConfiguration;
@ -70,7 +73,7 @@ public class AlarmThresholdingBoltTest {
alarmDAO = mock(AlarmDAO.class);
bolt = new MockAlarmThreshholdBolt(alarmDAO, alarmEventForwarder);
collector = mock(OutputCollector.class);
final Map<String, String> config = new HashMap<String, String>();
final Map<String, String> config = new HashMap<>();
config.put(ThresholdingConfiguration.ALERTS_EXCHANGE, ALERTS_EXCHANGE);
config.put(ThresholdingConfiguration.ALERTS_ROUTING_KEY, ALERT_ROUTING_KEY);
final TopologyContext context = mock(TopologyContext.class);
@ -82,16 +85,14 @@ public class AlarmThresholdingBoltTest {
* Send a SubAlarm with state set to ALARM.
* Ensure that the Alarm was triggered and sent
*/
@Test
public void simpleAlarmCreation() {
final Tuple tuple = mock(Tuple.class);
when(tuple.getSourceStreamId()).thenReturn(Streams.DEFAULT_STREAM_ID);
when(tuple.getString(0)).thenReturn(alarm.getId());
when(tuple.toString()).thenReturn("Test Alarm Tuple");
final SubAlarm subAlarm = subAlarms.get(0);
subAlarm.setState(AlarmState.ALARM);
when(tuple.getValue(1)).thenReturn(subAlarm);
when(alarmDAO.findById(alarm.getId())).thenReturn(alarm);
MkTupleParam tupleParam = new MkTupleParam();
tupleParam.setFields("alarmId", "subAlarm");
tupleParam.setStream(Streams.DEFAULT_STREAM_ID);
final Tuple tuple = Testing.testTuple(Arrays.asList(alarm.getId(), subAlarm), tupleParam);
bolt.execute(tuple);
bolt.execute(tuple);
verify(collector, times(2)).ack(tuple);
@ -106,9 +107,9 @@ public class AlarmThresholdingBoltTest {
// Now clear the alarm and ensure another notification gets sent out
subAlarm.setState(AlarmState.OK);
when(tuple.getValue(1)).thenReturn(subAlarm);
bolt.execute(tuple);
verify(collector, times(3)).ack(tuple);
final Tuple clearTuple = Testing.testTuple(Arrays.asList(alarm.getId(), subAlarm), tupleParam);
bolt.execute(clearTuple);
verify(collector, times(1)).ack(clearTuple);
final String okJson = "{\"alarm-transitioned\":{\"tenantId\":\"AAAAABBBBBBCCCCC\",\"alarmId\":\"111111112222222222233333333334\",\"alarmName\":\"Test CPU Alarm\",\"oldState\":\"ALARM\",\"newState\":\"OK\",\"stateChangeReason\":\"The alarm threshold(s) have not been exceeded\",\"timestamp\":1395587091}}";
verify(alarmEventForwarder, times(1)).send(ALERTS_EXCHANGE, ALERT_ROUTING_KEY, okJson);
verify(alarmDAO, times(1)).updateState(alarm.getId(), AlarmState.OK);

View File

@ -0,0 +1,226 @@
package com.hpcloud.mon.infrastructure.thresholding;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import java.util.AbstractMap.SimpleEntry;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.UUID;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import backtype.storm.Testing;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.testing.MkTupleParam;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;
import com.google.common.collect.Sets;
import com.hpcloud.mon.common.event.AlarmCreatedEvent;
import com.hpcloud.mon.common.event.AlarmDeletedEvent;
import com.hpcloud.mon.common.event.AlarmUpdatedEvent;
import com.hpcloud.mon.common.model.alarm.AlarmExpression;
import com.hpcloud.mon.common.model.alarm.AlarmState;
import com.hpcloud.mon.common.model.alarm.AlarmSubExpression;
import com.hpcloud.mon.common.model.metric.MetricDefinition;
import com.hpcloud.mon.domain.model.Alarm;
import com.hpcloud.mon.domain.model.SubAlarm;
import com.hpcloud.streaming.storm.Streams;
@Test
public class EventProcessingBoltTest {
private EventProcessingBolt bolt;
private OutputCollector collector;
private AlarmExpression alarmExpression;
private Alarm alarm;
private List<SubAlarm> subAlarms;
@BeforeMethod
protected void beforeMethod() {
collector = mock(OutputCollector.class);
bolt = new EventProcessingBolt();
final Map<String, String> config = new HashMap<>();
final TopologyContext context = mock(TopologyContext.class);
bolt.prepare(config, context, collector);
final String alarmId = "111111112222222222233333333334";
final String tenantId = "AAAAABBBBBBCCCCC";
final String name = "Test CPU Alarm";
final String expression = "avg(hpcs.compute.cpu{instance_id=123,device=42}, 1) > 5 " +
"and max(hpcs.compute.mem{instance_id=123,device=42}) > 80 " +
"and max(hpcs.compute.load{instance_id=123,device=42}) > 5";
alarmExpression = new AlarmExpression(expression);
subAlarms = createSubAlarms(alarmId, alarmExpression);
alarm = new Alarm(alarmId, tenantId, name, alarmExpression, subAlarms, AlarmState.UNDETERMINED);
}
private List<SubAlarm> createSubAlarms(final String alarmId,
final AlarmExpression alarmExpression,
String ... ids) {
final List<AlarmSubExpression> subExpressions = alarmExpression.getSubExpressions();
final List<SubAlarm> subAlarms = new ArrayList<SubAlarm>(subExpressions.size());
for (int i = 0; i < subExpressions.size(); i++) {
final String id;
if (i >= ids.length) {
id = UUID.randomUUID().toString();
}
else {
id = ids[i];
}
final SubAlarm subAlarm = new SubAlarm(id, alarmId, subExpressions.get(i));
subAlarms.add(subAlarm);
}
return subAlarms;
}
public void testAlarmCreatedEvent() {
final Map<String, AlarmSubExpression> expressions = createAlarmSubExpressionMap(alarm);
final AlarmCreatedEvent event = new AlarmCreatedEvent(alarm.getTenantId(), alarm.getId(),
alarm.getName(), alarm.getAlarmExpression().getExpression(), expressions);
final Tuple tuple = createTuple(event);
bolt.execute(tuple);
final String eventType = event.getClass().getSimpleName();
for (final SubAlarm subAlarm : subAlarms) {
verifyAddedSubAlarm(eventType, subAlarm);
}
verify(collector, times(1)).ack(tuple);
}
private Tuple createTuple(final Object event) {
MkTupleParam tupleParam = new MkTupleParam();
tupleParam.setFields("event");
tupleParam.setStream(Streams.DEFAULT_STREAM_ID);
final Tuple tuple = Testing.testTuple(Arrays.asList(event), tupleParam);
return tuple;
}
public void testAlarmDeletedEvent() {
final Map<String, MetricDefinition> metricDefinitions = new HashMap<>();
for (final SubAlarm subAlarm : alarm.getSubAlarms()) {
metricDefinitions.put(subAlarm.getId(), subAlarm.getExpression().getMetricDefinition());
}
final AlarmDeletedEvent event = new AlarmDeletedEvent(alarm.getTenantId(), alarm.getId(),
metricDefinitions);
final Tuple tuple = createTuple(event);
bolt.execute(tuple);
final String eventType = event.getClass().getSimpleName();
for (final SubAlarm subAlarm : subAlarms) {
verifyDeletedSubAlarm(eventType, subAlarm);
}
verify(collector, times(1)).emit(EventProcessingBolt.ALARM_EVENT_STREAM_ID,
new Values(event.getClass().getSimpleName(), event.alarmId));
verify(collector, times(1)).ack(tuple);
}
private void verifyDeletedSubAlarm(final String eventType,
final SubAlarm subAlarm) {
verify(collector, times(1)).emit(EventProcessingBolt.METRIC_ALARM_EVENT_STREAM_ID,
new Values(eventType, subAlarm.getExpression().getMetricDefinition(), subAlarm.getId()));
}
public static AlarmUpdatedEvent createAlarmUpdatedEvent(final Alarm alarm,
final AlarmExpression updatedAlarmExpression) {
final Alarm updatedAlarm = new Alarm();
updatedAlarm.setId(alarm.getId());
updatedAlarm.setTenantId(alarm.getTenantId());
updatedAlarm.setName(alarm.getName());
updatedAlarm.setExpression(updatedAlarmExpression.getExpression());
Entry<Map<String, AlarmSubExpression>, Map<String, AlarmSubExpression>> entry =
oldAndNewSubExpressionsFor(createAlarmSubExpressionMap(alarm), updatedAlarmExpression);
final Map<String, AlarmSubExpression> newAlarmSubExpressions = entry.getValue();
final AlarmUpdatedEvent event = new AlarmUpdatedEvent(updatedAlarm.getTenantId(), updatedAlarm.getId(),
updatedAlarm.getName(), updatedAlarm.getAlarmExpression().getExpression(), entry.getKey(),
newAlarmSubExpressions);
return event;
}
public void testAlarmUpdatedEvent() {
final String updatedExpression = "avg(hpcs.compute.cpu{instance_id=123,device=42}, 1) > 5 " +
"and max(hpcs.compute.newMem{instance_id=123,device=42}) > 80 " +
"and max(hpcs.compute.newLoad{instance_id=123,device=42}) > 5";
final AlarmExpression updatedAlarmExpression = new AlarmExpression(updatedExpression);
final AlarmUpdatedEvent event = createAlarmUpdatedEvent(alarm, updatedAlarmExpression);
final Tuple tuple = createTuple(event);
bolt.execute(tuple);
verify(collector, times(1)).emit(EventProcessingBolt.ALARM_EVENT_STREAM_ID,
new Values(event.getClass().getSimpleName(), event.alarmId));
final List<SubAlarm> updatedSubAlarms = new ArrayList<>();
final Map<String, AlarmSubExpression> oldAlarmSubExpressionMap = createAlarmSubExpressionMap(alarm);
for (final AlarmSubExpression alarmExpression : updatedAlarmExpression.getSubExpressions()) {
String id = find(oldAlarmSubExpressionMap, alarmExpression);
if (id == null) {
id = find(event.newAlarmSubExpressions, alarmExpression);
}
final SubAlarm subAlarm = new SubAlarm(id, alarm.getId(), alarmExpression);
updatedSubAlarms.add(subAlarm);
}
final String eventType = event.getClass().getSimpleName();
verifyDeletedSubAlarm(eventType, subAlarms.get(1));
verifyDeletedSubAlarm(eventType, subAlarms.get(2));
verifyAddedSubAlarm(eventType, updatedSubAlarms.get(1));
verifyAddedSubAlarm(eventType, updatedSubAlarms.get(2));
verify(collector, times(1)).ack(tuple);
}
private String find(
final Map<String, AlarmSubExpression> newAlarmSubExpressions,
final AlarmSubExpression alarmExpression) {
String id = null;
for (Entry<String, AlarmSubExpression> entry2 : newAlarmSubExpressions.entrySet()) {
if (entry2.getValue().equals(alarmExpression)) {
id = entry2.getKey();
break;
}
}
return id;
}
private static Entry<Map<String, AlarmSubExpression>, Map<String, AlarmSubExpression>> oldAndNewSubExpressionsFor(
Map<String, AlarmSubExpression> oldSubAlarms,
final AlarmExpression alarmExpression) {
Set<AlarmSubExpression> oldSet = new HashSet<>(oldSubAlarms.values());
Set<AlarmSubExpression> newSet = new HashSet<>(alarmExpression.getSubExpressions());
// Filter old sub expressions
Set<AlarmSubExpression> oldExpressions = Sets.difference(oldSet, newSet);
oldSubAlarms.values().retainAll(oldExpressions);
// Identify new sub expressions
Map<String, AlarmSubExpression> newSubAlarms = new HashMap<>();
Set<AlarmSubExpression> newExpressions = Sets.difference(newSet, oldSet);
for (AlarmSubExpression expression : newExpressions)
newSubAlarms.put(UUID.randomUUID().toString(), expression);
return new SimpleEntry<>(oldSubAlarms, newSubAlarms);
}
private void verifyAddedSubAlarm(final String eventType,
final SubAlarm subAlarm) {
verify(collector, times(1)).emit(EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_ID,
new Values(eventType, subAlarm.getExpression().getMetricDefinition(), subAlarm));
}
private static Map<String, AlarmSubExpression> createAlarmSubExpressionMap(
Alarm alarm) {
final Map<String, AlarmSubExpression> oldAlarmSubExpressions = new HashMap<>();
for (final SubAlarm subAlarm : alarm.getSubAlarms()) {
oldAlarmSubExpressions.put(subAlarm.getId(), subAlarm.getExpression());
}
return oldAlarmSubExpressions;
}
}

View File

@ -27,6 +27,7 @@ import backtype.storm.testing.MkTupleParam;
import com.hpcloud.mon.common.event.AlarmCreatedEvent;
import com.hpcloud.mon.common.event.AlarmDeletedEvent;
import com.hpcloud.mon.common.event.AlarmUpdatedEvent;
import com.hpcloud.mon.common.model.alarm.AlarmState;
import com.hpcloud.mon.common.model.alarm.AlarmSubExpression;
import com.hpcloud.mon.common.model.metric.Metric;
@ -67,7 +68,7 @@ public class MetricAggregationBoltTest {
// Fixtures
subAlarm1 = new SubAlarm("123", "1", subExpr1, AlarmState.OK);
subAlarm2 = new SubAlarm("456", "1", subExpr2, AlarmState.OK);
subAlarms = new HashMap<MetricDefinition, SubAlarm>();
subAlarms = new HashMap<>();
subAlarms.put(subExpr1.getMetricDefinition(), subAlarm1);
subAlarms.put(subExpr2.getMetricDefinition(), subAlarm2);
@ -122,19 +123,35 @@ public class MetricAggregationBoltTest {
}
public void shouldHandleAlarmCreated() {
validateMetricDefAdded(AlarmCreatedEvent.class.getSimpleName());
}
public void shouldHandleAlarmUpdatedAdd() {
validateMetricDefAdded(AlarmUpdatedEvent.class.getSimpleName());
}
private void validateMetricDefAdded(String eventName) {
MkTupleParam tupleParam = new MkTupleParam();
tupleParam.setFields("eventType", "metricDefinition", "subAlarm");
tupleParam.setStream(EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_ID);
assertNull(bolt.subAlarmStatsRepos.get(metricDef1));
bolt.execute(Testing.testTuple(Arrays.asList(AlarmCreatedEvent.class.getSimpleName(),
bolt.execute(Testing.testTuple(Arrays.asList(eventName,
metricDef1, new SubAlarm("123", "1", subExpr1)), tupleParam));
assertNotNull(bolt.subAlarmStatsRepos.get(metricDef1).get("123"));
}
public void shouldHandleAlarmDeleted() {
validateMetricDefDeleted(AlarmDeletedEvent.class.getSimpleName());
}
public void shouldHandleAlarmUpdatedDelete() {
validateMetricDefDeleted(AlarmUpdatedEvent.class.getSimpleName());
}
private void validateMetricDefDeleted(String eventName) {
MkTupleParam tupleParam = new MkTupleParam();
tupleParam.setFields("eventType", "metricDefinition", "alarmId");
tupleParam.setStream(EventProcessingBolt.METRIC_ALARM_EVENT_STREAM_ID);
@ -143,7 +160,7 @@ public class MetricAggregationBoltTest {
assertNotNull(bolt.subAlarmStatsRepos.get(metricDef1).get("123"));
bolt.execute(Testing.testTuple(
Arrays.asList(AlarmDeletedEvent.class.getSimpleName(), metricDef1, "123"), tupleParam));
Arrays.asList(eventName, metricDef1, "123"), tupleParam));
assertNull(bolt.subAlarmStatsRepos.get(metricDef1));
}

View File

@ -0,0 +1,189 @@
package com.hpcloud.mon.infrastructure.thresholding;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import backtype.storm.Testing;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.testing.MkTupleParam;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;
import com.hpcloud.mon.common.event.AlarmCreatedEvent;
import com.hpcloud.mon.common.event.AlarmDeletedEvent;
import com.hpcloud.mon.common.event.AlarmUpdatedEvent;
import com.hpcloud.mon.common.model.alarm.AlarmExpression;
import com.hpcloud.mon.common.model.alarm.AlarmSubExpression;
import com.hpcloud.mon.common.model.metric.Metric;
import com.hpcloud.mon.common.model.metric.MetricDefinition;
import com.hpcloud.mon.domain.model.SubAlarm;
import com.hpcloud.mon.domain.service.MetricDefinitionDAO;
import com.hpcloud.streaming.storm.Streams;
@Test
public class MetricFilteringBoltTest {
private OutputCollector collector;
private AlarmExpression alarmExpression;
private List<SubAlarm> subAlarms;
private MetricFilteringBolt bolt;
@BeforeMethod
protected void beforeMethod() {
collector = mock(OutputCollector.class);
final String alarmId = "111111112222222222233333333334";
final String expression = "avg(hpcs.compute.cpu{instance_id=123,device=42}, 1) > 5 " +
"and max(hpcs.compute.mem{instance_id=123,device=42}) > 80 " +
"and max(hpcs.compute.load{instance_id=123,device=42}) > 5";
alarmExpression = new AlarmExpression(expression);
subAlarms = createSubAlarms(alarmId, alarmExpression);
}
private void createBolt(List<MetricDefinition> initialMetricDefinitions) {
final MetricDefinitionDAO dao = mock(MetricDefinitionDAO.class);
when(dao.findForAlarms()).thenReturn(initialMetricDefinitions);
bolt = new MetricFilteringBolt(dao);
bolt.clearMetricDefinitions();
final Map<String, String> config = new HashMap<>();
final TopologyContext context = mock(TopologyContext.class);
bolt.prepare(config, context, collector);
// Validate the prepare emits the initial Metric Definitions
for (final MetricDefinition metricDefinition : initialMetricDefinitions) {
verify(collector, times(1)).emit(new Values(metricDefinition, null));
}
}
private List<SubAlarm> createSubAlarms(final String alarmId,
final AlarmExpression alarmExpression,
String ... ids) {
final List<AlarmSubExpression> subExpressions = alarmExpression.getSubExpressions();
final List<SubAlarm> subAlarms = new ArrayList<SubAlarm>(subExpressions.size());
int subAlarmId = 4242;
for (int i = 0; i < subExpressions.size(); i++) {
final String id;
if (i >= ids.length) {
id = String.valueOf(subAlarmId++);
}
else {
id = ids[i];
}
final SubAlarm subAlarm = new SubAlarm(id, alarmId, subExpressions.get(i));
subAlarms.add(subAlarm);
}
return subAlarms;
}
public void testAlarmCreatedEvent() {
testAddFilter(new AlarmCreatedEvent());
}
public void testAlarmUpdatedEvent() {
testAddFilter(new AlarmUpdatedEvent());
}
private void testAddFilter(final Object event) {
createBolt(new ArrayList<MetricDefinition>(0));
// First ensure metrics don't pass the filter
for (final SubAlarm subAlarm : subAlarms) {
final Tuple tuple = createMetricTuple(subAlarm);
bolt.execute(tuple);
verify(collector, times(1)).ack(tuple);
verify(collector, never()).emit(tuple, tuple.getValues());
}
for (final SubAlarm subAlarm : subAlarms) {
final Tuple tuple = createMetricDefinitionTuple(event, subAlarm);
bolt.execute(tuple);
verify(collector, times(1)).ack(tuple);
}
// Now ensure metrics pass the filter
for (final SubAlarm subAlarm : subAlarms) {
final Tuple tuple = createMetricTuple(subAlarm);
bolt.execute(tuple);
verify(collector, times(1)).ack(tuple);
verify(collector, times(1)).emit(tuple, tuple.getValues());
}
}
public void testAlarmDeletedEvent() {
testDeleteFilter(new AlarmDeletedEvent());
}
public void testAlarmUpdatedEventDeletions() {
testDeleteFilter(new AlarmUpdatedEvent());
}
private void testDeleteFilter(final Object event) {
final List<MetricDefinition> initialMetricDefinitions = new ArrayList<MetricDefinition>(subAlarms.size());
for (final SubAlarm subAlarm : subAlarms) {
initialMetricDefinitions.add(subAlarm.getExpression().getMetricDefinition());
}
createBolt(initialMetricDefinitions);
// First ensure metrics pass the filter
for (final SubAlarm subAlarm : subAlarms) {
final Tuple tuple = createMetricTuple(subAlarm);
bolt.execute(tuple);
verify(collector, times(1)).ack(tuple);
verify(collector, times(1)).emit(tuple, tuple.getValues());
}
for (final SubAlarm subAlarm : subAlarms) {
final Tuple tuple = createMetricDefinitionDeletionTuple(event, subAlarm);
bolt.execute(tuple);
verify(collector, times(1)).ack(tuple);
}
// Now ensure metrics don't pass the filter
for (final SubAlarm subAlarm : subAlarms) {
final Tuple tuple = createMetricTuple(subAlarm);
bolt.execute(tuple);
verify(collector, times(1)).ack(tuple);
verify(collector, never()).emit(tuple, tuple.getValues());
}
}
private Tuple createMetricDefinitionTuple(final Object event,
final SubAlarm subAlarm) {
final MkTupleParam tupleParam = new MkTupleParam();
tupleParam.setFields("eventType", "metricDefinition", "subAlarm");
tupleParam.setStream(EventProcessingBolt.METRIC_SUB_ALARM_EVENT_STREAM_ID);
final Tuple tuple = Testing.testTuple(Arrays.asList(event.getClass().getSimpleName(),
subAlarm.getExpression().getMetricDefinition(), subAlarm), tupleParam);
return tuple;
}
private Tuple createMetricDefinitionDeletionTuple(final Object event,
final SubAlarm subAlarm) {
final MkTupleParam tupleParam = new MkTupleParam();
tupleParam.setFields("eventType", "metricDefinition", "subAlarm");
tupleParam.setStream(EventProcessingBolt.METRIC_ALARM_EVENT_STREAM_ID);
final Tuple tuple = Testing.testTuple(Arrays.asList(event.getClass().getSimpleName(),
subAlarm.getExpression().getMetricDefinition(), subAlarm.getId()), tupleParam);
return tuple;
}
private Tuple createMetricTuple(final SubAlarm subAlarm) {
final MkTupleParam tupleParam = new MkTupleParam();
tupleParam.setFields("metricDefinition", "metric");
tupleParam.setStream(Streams.DEFAULT_STREAM_ID);
MetricDefinition metricDefinition = subAlarm.getExpression().getMetricDefinition();
final Metric metric = new Metric(metricDefinition, System.currentTimeMillis()/1000, 42.0);
final Tuple tuple = Testing.testTuple(Arrays.asList(metricDefinition, metric), tupleParam);
return tuple;
}
}

View File

@ -7,7 +7,9 @@ import java.util.Collections;
import org.testng.annotations.Test;
import com.hpcloud.mon.common.event.AlarmCreatedEvent;
import com.hpcloud.mon.common.event.AlarmDeletedEvent;
import com.hpcloud.mon.common.event.AlarmUpdatedEvent;
import com.hpcloud.util.Serialization;
/**
@ -15,10 +17,25 @@ import com.hpcloud.util.Serialization;
*/
@Test
public class EventDeserializerTest {
private EventDeserializer deserializer = new EventDeserializer();
private static final String ALARM_EXPRESSION = "avg(hpcs.compute{instance_id=5,metric_name=cpu,device=1}, 1) > 5 times 3 OR avg(hpcs.compute{flavor_id=3,metric_name=mem}, 2) < 4 times 3";
private static final String ALARM_NAME = "An Alarm";
private static final String ALARM_ID = "123";
private static final String TENANT_ID = "abc";
private EventDeserializer deserializer = new EventDeserializer();
public void shouldDeserialize() {
Object event = new AlarmDeletedEvent("abc", "123", null);
public void shouldDeserializeAlarmDeletedEvent() {
roundTrip(new AlarmDeletedEvent(TENANT_ID, ALARM_ID, null));
}
public void shouldDeserializeAlarmCreatedEvent() {
roundTrip(new AlarmCreatedEvent(TENANT_ID, ALARM_ID, ALARM_NAME, ALARM_EXPRESSION, null));
}
public void shouldDeserializeAlarmUpdatedEvent() {
roundTrip(new AlarmUpdatedEvent(TENANT_ID, ALARM_ID, ALARM_NAME, ALARM_EXPRESSION, null, null));
}
private void roundTrip(Object event) {
String serialized = Serialization.toJson(event);
Object deserialized = deserializer.deserialize(serialized.getBytes());
Object expected = Collections.singletonList(Collections.singletonList(event));