Initial commit

This commit is contained in:
Jonathan Halterman 2014-02-18 11:58:15 -08:00
commit 13c126dcfe
79 changed files with 6188 additions and 0 deletions

10
.gitignore vendored Executable file
View File

@ -0,0 +1,10 @@
*.cache
*.classpath
*.project
*.target/
*.settings/
target/
test-output/
test-config.yml
*.swp
*.iml

1
java/mon-collectd/.gitignore vendored Normal file
View File

@ -0,0 +1 @@
/target

25
java/mon-collectd/pom.xml Normal file
View File

@ -0,0 +1,25 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.hpcloud</groupId>
<artifactId>mon-common</artifactId>
<version>${computedVersion}</version>
</parent>
<artifactId>mon-collectd</artifactId>
<packaging>jar</packaging>
<dependencies>
<dependency>
<groupId>com.hpcloud</groupId>
<artifactId>mon-util</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.hpcloud</groupId>
<artifactId>collectd-api</artifactId>
<version>5.1.0</version>
</dependency>
</dependencies>
</project>

View File

@ -0,0 +1,182 @@
package com.hpcloud.collectd;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.IOException;
import java.net.DatagramPacket;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.ArrayList;
import java.util.List;
import org.collectd.api.DataSource;
import org.collectd.api.DataSet;
import org.collectd.api.ValueList;
/**
* CollectdBinaryPacket
*
* @author Cindy O'Neill
*/
public class CollectdBinaryPacket {
public static final int TYPE_HOST = 0x0000;
public static final int TYPE_TIME = 0x0001;
public static final int TYPE_PLUGIN = 0x0002;
public static final int TYPE_PLUGIN_INSTANCE = 0x0003;
public static final int TYPE_TYPE = 0x0004;
public static final int TYPE_TYPE_INSTANCE = 0x0005;
public static final int TYPE_VALUES = 0x0006;
public static final int TYPE_INTERVAL = 0x0007;
public static final int TYPE_HIGH_RES_TIME = 0x0008;
public static final int TYPE_HIGH_RES_INTERVAL = 0x0009;
public static final int TYPE_MESSAGE = 0x0100;
public static final int TYPE_SEVERITY = 0x0101;
public static final int UINT8_LEN = 1;
public static final int UINT16_LEN = UINT8_LEN * 2;
public static final int UINT32_LEN = UINT16_LEN * 2;
public static final int UINT64_LEN = UINT32_LEN * 2;
public static final int HEADER_LEN = UINT16_LEN * 2;
public static final int BUFFER_SIZE = 1500;
public static final long HIGH_RES_TIME_2_30TH = 1073741824;
public static CollectdTypesDB cbTypes = CollectdTypesDB.getInstance();
/**
* parse - input a DatagramPacket received from a collectd agent (via UDP or Multicast).
*
* returns a List of ValueList objects. Each ValueList represents one collectd metric.
* Throws an exception when it has a parsing error.
*/
public List<ValueList> parse(DatagramPacket packet) throws Exception {
byte[] pdata = packet.getData();
int total = pdata.length;
ByteArrayInputStream buffer = new ByteArrayInputStream(pdata);
DataInputStream is = new DataInputStream(buffer);
List<ValueList> metrics = new ArrayList<ValueList>();
ValueList vl = new ValueList();
while ((0 < total) && (total > CollectdBinaryPacket.HEADER_LEN)) {
int type = is.readUnsignedShort();
int len = is.readUnsignedShort();
if (len < CollectdBinaryPacket.HEADER_LEN) {
break;
}
total -= len;
len -= CollectdBinaryPacket.HEADER_LEN;
if (len > total) {
throw new Exception("Can't parse. Invalid header len = " + len + " is greater that packet data total = " + total);
}
if (type == CollectdBinaryPacket.TYPE_VALUES) {
// read values into ValueList
readValues(is, vl);
ValueList valueList = new ValueList(vl);
// add to List of ValueList
metrics.add(valueList);
// clear re-usable valueList
vl.clearValues();
} else if (type == CollectdBinaryPacket.TYPE_TIME) {
long ltime = is.readLong();
// convert time to collectd format
vl.setTime(ltime * CollectdBinaryPacket.HIGH_RES_TIME_2_30TH);
// when outputting, shift right by 2^30 to get seconds since UTC (Jan 1,1970)
} else if (type == CollectdBinaryPacket.TYPE_HIGH_RES_TIME) {
vl.setTime(is.readLong());
} else if (type == CollectdBinaryPacket.TYPE_HIGH_RES_INTERVAL) {
vl.setInterval(is.readLong());
// when outputting, shift right by 2^30 to get seconds since UTC (Jan 1,1970)
} else if (type == CollectdBinaryPacket.TYPE_INTERVAL) {
long linterval = is.readLong();
// convert interval to collectd format
vl.setInterval(linterval * CollectdBinaryPacket.HIGH_RES_TIME_2_30TH);
} else if (type == CollectdBinaryPacket.TYPE_HOST) {
String host = readString(is, len);
vl.setHost(host);
} else if (type == CollectdBinaryPacket.TYPE_PLUGIN) {
String plugin = readString(is, len);
vl.setPlugin(plugin);
} else if (type == CollectdBinaryPacket.TYPE_PLUGIN_INSTANCE) {
String pluginInstance = readString(is, len);
vl.setPluginInstance(pluginInstance);
} else if (type == CollectdBinaryPacket.TYPE_TYPE) {
String mtype = readString(is, len);
vl.setType(mtype);
// init DataSources
List<DataSource> ds = cbTypes.getType(mtype);
if (ds == null) {
System.out.println("Failed to parse: Metric type was not found in types.db file(s):" + mtype);
}
vl.setDataSet(new DataSet(mtype, ds));
} else if (type == CollectdBinaryPacket.TYPE_TYPE_INSTANCE) {
String typeInstance = readString(is, len);
vl.setTypeInstance(typeInstance);
} else {
System.out.println("Unsupported collectd type received: " + type);
for (int i = 0; i < len; i++) {
// just moving the buffer pointer
is.readByte();
}
}
} // end while
return metrics;
}
public static String dataSourceTypeToString(int dstype) {
switch (dstype) {
case DataSource.TYPE_COUNTER:
return "counter";
case DataSource.TYPE_ABSOLUTE:
return "absolute";
case DataSource.TYPE_DERIVE:
return "derive";
case DataSource.TYPE_GAUGE:
return "gauge";
default:
return "unknown";
}
}
private String readString(DataInputStream is, int len) throws IOException {
byte[] buf = new byte[len];
is.read(buf, 0, len);
return new String(buf, 0, len - 1); // -1 -> skip \0
}
/**
* readValues - reads value part(s) into ValueList
*/
private void readValues(DataInputStream is, ValueList vl) throws IOException {
int nvalues = is.readUnsignedShort();
int[] types = new int[nvalues];
for (int i = 0; i < nvalues; i++) {
types[i] = is.readByte();
}
for (int i = 0; i < nvalues; i++) {
Number val;
if (types[i] == DataSource.TYPE_COUNTER) {
val = new Double(is.readLong());
} else if (types[i] == DataSource.TYPE_ABSOLUTE) {
val = new Double(is.readLong());
} else if (types[i] == DataSource.TYPE_DERIVE) {
val = new Double(is.readLong());
} else if (types[i] == DataSource.TYPE_GAUGE) {
// documentation says collectd uses x86 host order for this type
// dbuff holds the 64 bit value
byte[] dbuff = new byte[8];
is.read(dbuff);
ByteBuffer bb = ByteBuffer.wrap(dbuff);
bb.order(ByteOrder.LITTLE_ENDIAN);
val = new Double(bb.getDouble());
} else {
System.out.println("Unknown DataSource type found: " + types[i]);
val = new Double(0);
}
vl.addValue(val);
} // end for
}
}

View File

@ -0,0 +1,62 @@
package com.hpcloud.collectd;
import java.io.IOException;
import java.net.URL;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.collectd.api.DataSet;
import org.collectd.api.DataSource;
import com.google.common.base.Charsets;
import com.google.common.io.Resources;
import com.hpcloud.util.Exceptions;
/**
* CollectdTypesDB is a singleton that loads the types.db and hpcs_types.db resources into a map.
* types.db contains number of values, dsnames, and dstypes, accessed by metric "type".
*/
public class CollectdTypesDB {
private static volatile CollectdTypesDB TYPES_DB;
private Map<String, List<DataSource>> typesMap = new HashMap<String, List<DataSource>>();
public static CollectdTypesDB getInstance() {
if (TYPES_DB == null) {
synchronized (CollectdTypesDB.class) {
if (TYPES_DB == null) {
TYPES_DB = new CollectdTypesDB();
try {
TYPES_DB.load(Resources.getResource("collectd/types.db"));
TYPES_DB.load(Resources.getResource("collectd/hpcs_types.db"));
} catch (IOException e) {
throw Exceptions.uncheck(e, "Error while loading collectd types");
}
}
}
}
return TYPES_DB;
}
public List<DataSource> getType(String name) {
return typesMap.get(name);
}
public Map<String, List<DataSource>> getTypes() {
return typesMap;
}
public void load(URL url) throws IOException {
List<String> lines = Resources.readLines(url, Charsets.UTF_8);
for (String line : lines) {
DataSet dataSet = DataSet.parseDataSet(line);
if (dataSet != null) {
String type = dataSet.getType();
List<DataSource> dsrc = dataSet.getDataSources();
this.typesMap.put(type, dsrc);
}
}
}
}

View File

@ -0,0 +1,9 @@
tail_counter_default value:COUNTER:U:U
tail_counter_io value:COUNTER:U:U
tail_counter_stacked value:COUNTER:U:U
tail_derive_default value:DERIVE:0:U
tail_derive_io value:DERIVE:0:U
tail_derive_stacked value:DERIVE:0:U
tail_gauge_default value:GAUGE:0:U
tail_gauge_io value:GAUGE:0:U
tail_gauge_stacked value:GAUGE:0:U

View File

@ -0,0 +1,191 @@
absolute value:ABSOLUTE:0:U
apache_bytes value:DERIVE:0:U
apache_connections value:GAUGE:0:65535
apache_idle_workers value:GAUGE:0:65535
apache_requests value:DERIVE:0:U
apache_scoreboard value:GAUGE:0:65535
ath_nodes value:GAUGE:0:65535
ath_stat value:DERIVE:0:U
bitrate value:GAUGE:0:4294967295
bytes value:GAUGE:0:U
cache_eviction value:DERIVE:0:U
cache_operation value:DERIVE:0:U
cache_ratio value:GAUGE:0:100
cache_result value:DERIVE:0:U
cache_size value:GAUGE:0:4294967295
charge value:GAUGE:0:U
compression_ratio value:GAUGE:0:2
compression uncompressed:DERIVE:0:U, compressed:DERIVE:0:U
connections value:DERIVE:0:U
conntrack value:GAUGE:0:4294967295
contextswitch value:DERIVE:0:U
counter value:COUNTER:U:U
cpufreq value:GAUGE:0:U
cpu value:DERIVE:0:U
current_connections value:GAUGE:0:U
current_sessions value:GAUGE:0:U
current value:GAUGE:U:U
delay value:GAUGE:-1000000:1000000
derive value:DERIVE:0:U
df_complex value:GAUGE:0:U
df_inodes value:GAUGE:0:U
df used:GAUGE:0:1125899906842623, free:GAUGE:0:1125899906842623
disk_latency read:GAUGE:0:U, write:GAUGE:0:U
disk_merged read:DERIVE:0:U, write:DERIVE:0:U
disk_octets read:DERIVE:0:U, write:DERIVE:0:U
disk_ops_complex value:DERIVE:0:U
disk_ops read:DERIVE:0:U, write:DERIVE:0:U
disk_time read:DERIVE:0:U, write:DERIVE:0:U
dns_answer value:DERIVE:0:U
dns_notify value:DERIVE:0:U
dns_octets queries:DERIVE:0:U, responses:DERIVE:0:U
dns_opcode value:DERIVE:0:U
dns_qtype_cached value:GAUGE:0:4294967295
dns_qtype value:DERIVE:0:U
dns_query value:DERIVE:0:U
dns_question value:DERIVE:0:U
dns_rcode value:DERIVE:0:U
dns_reject value:DERIVE:0:U
dns_request value:DERIVE:0:U
dns_resolver value:DERIVE:0:U
dns_response value:DERIVE:0:U
dns_transfer value:DERIVE:0:U
dns_update value:DERIVE:0:U
dns_zops value:DERIVE:0:U
email_check value:GAUGE:0:U
email_count value:GAUGE:0:U
email_size value:GAUGE:0:U
entropy value:GAUGE:0:4294967295
fanspeed value:GAUGE:0:U
file_size value:GAUGE:0:U
files value:GAUGE:0:U
fork_rate value:DERIVE:0:U
frequency value:GAUGE:0:U
frequency_offset value:GAUGE:-1000000:1000000
fscache_stat value:DERIVE:0:U
gauge value:GAUGE:U:U
hash_collisions value:DERIVE:0:U
http_request_methods value:DERIVE:0:U
http_requests value:DERIVE:0:U
http_response_codes value:DERIVE:0:U
humidity value:GAUGE:0:100
if_collisions value:DERIVE:0:U
if_dropped rx:DERIVE:0:U, tx:DERIVE:0:U
if_errors rx:DERIVE:0:U, tx:DERIVE:0:U
if_multicast value:DERIVE:0:U
if_octets rx:DERIVE:0:U, tx:DERIVE:0:U
if_packets rx:DERIVE:0:U, tx:DERIVE:0:U
if_rx_errors value:DERIVE:0:U
if_tx_errors value:DERIVE:0:U
invocations value:DERIVE:0:U
io_octets rx:DERIVE:0:U, tx:DERIVE:0:U
io_packets rx:DERIVE:0:U, tx:DERIVE:0:U
ipt_bytes value:DERIVE:0:U
ipt_packets value:DERIVE:0:U
irq value:DERIVE:0:U
latency value:GAUGE:0:65535
links value:GAUGE:0:U
load shortterm:GAUGE:0:100, midterm:GAUGE:0:100, longterm:GAUGE:0:100
md_disks value:GAUGE:0:U
memcached_command value:DERIVE:0:U
memcached_connections value:GAUGE:0:U
memcached_items value:GAUGE:0:U
memcached_octets rx:DERIVE:0:U, tx:DERIVE:0:U
memcached_ops value:DERIVE:0:U
memory value:GAUGE:0:281474976710656
multimeter value:GAUGE:U:U
mutex_operations value:DERIVE:0:U
mysql_commands value:DERIVE:0:U
mysql_handler value:DERIVE:0:U
mysql_locks value:DERIVE:0:U
mysql_log_position value:DERIVE:0:U
mysql_octets rx:DERIVE:0:U, tx:DERIVE:0:U
nfs_procedure value:DERIVE:0:U
nginx_connections value:GAUGE:0:U
nginx_requests value:DERIVE:0:U
node_octets rx:DERIVE:0:U, tx:DERIVE:0:U
node_rssi value:GAUGE:0:255
node_stat value:DERIVE:0:U
node_tx_rate value:GAUGE:0:127
operations value:DERIVE:0:U
percent value:GAUGE:0:100.1
pg_blks value:DERIVE:0:U
pg_db_size value:GAUGE:0:U
pg_n_tup_c value:DERIVE:0:U
pg_n_tup_g value:GAUGE:0:U
pg_numbackends value:GAUGE:0:U
pg_scan value:DERIVE:0:U
pg_xact value:DERIVE:0:U
ping_droprate value:GAUGE:0:100
ping value:GAUGE:0:65535
ping_stddev value:GAUGE:0:65535
players value:GAUGE:0:1000000
power value:GAUGE:0:U
protocol_counter value:DERIVE:0:U
ps_code value:GAUGE:0:9223372036854775807
ps_count processes:GAUGE:0:1000000, threads:GAUGE:0:1000000
ps_cputime user:DERIVE:0:U, syst:DERIVE:0:U
ps_data value:GAUGE:0:9223372036854775807
ps_disk_octets read:DERIVE:0:U, write:DERIVE:0:U
ps_disk_ops read:DERIVE:0:U, write:DERIVE:0:U
ps_pagefaults minflt:DERIVE:0:U, majflt:DERIVE:0:U
ps_rss value:GAUGE:0:9223372036854775807
ps_stacksize value:GAUGE:0:9223372036854775807
ps_state value:GAUGE:0:65535
ps_vm value:GAUGE:0:9223372036854775807
queue_length value:GAUGE:0:U
records value:GAUGE:0:U
requests value:GAUGE:0:U
response_time value:GAUGE:0:U
route_etx value:GAUGE:0:U
route_metric value:GAUGE:0:U
routes value:GAUGE:0:U
serial_octets rx:DERIVE:0:U, tx:DERIVE:0:U
signal_noise value:GAUGE:U:0
signal_power value:GAUGE:U:0
signal_quality value:GAUGE:0:U
snr value:GAUGE:0:U
spam_check value:GAUGE:0:U
spam_score value:GAUGE:U:U
swap_io value:DERIVE:0:U
swap value:GAUGE:0:1099511627776
tcp_connections value:GAUGE:0:4294967295
temperature value:GAUGE:-273.15:U
threads value:GAUGE:0:U
time_dispersion value:GAUGE:-1000000:1000000
timeleft value:GAUGE:0:3600
time_offset value:GAUGE:-1000000:1000000
total_bytes value:DERIVE:0:U
total_connections value:DERIVE:0:U
total_operations value:DERIVE:0:U
total_requests value:DERIVE:0:U
total_sessions value:DERIVE:0:U
total_threads value:DERIVE:0:U
total_time_in_ms value:DERIVE:0:U
total_values value:DERIVE:0:U
uptime value:GAUGE:0:4294967295
users value:GAUGE:0:65535
vcpu value:GAUGE:0:U
virt_cpu_total value:DERIVE:0:U
virt_vcpu value:DERIVE:0:U
vmpage_action value:DERIVE:0:U
vmpage_faults minflt:DERIVE:0:U, majflt:DERIVE:0:U
vmpage_io in:DERIVE:0:U, out:DERIVE:0:U
vmpage_number value:GAUGE:0:4294967295
volatile_changes value:GAUGE:0:U
voltage_threshold value:GAUGE:U:U, threshold:GAUGE:U:U
voltage value:GAUGE:U:U
vs_memory value:GAUGE:0:9223372036854775807
vs_processes value:GAUGE:0:65535
vs_threads value:GAUGE:0:65535
#
# Legacy types
# (required for the v5 upgrade target)
#
arc_counts demand_data:COUNTER:0:U, demand_metadata:COUNTER:0:U, prefetch_data:COUNTER:0:U, prefetch_metadata:COUNTER:0:U
arc_l2_bytes read:COUNTER:0:U, write:COUNTER:0:U
arc_l2_size value:GAUGE:0:U
arc_ratio value:GAUGE:0:U
arc_size current:GAUGE:0:U, target:GAUGE:0:U, minlimit:GAUGE:0:U, maxlimit:GAUGE:0:U
mysql_qcache hits:COUNTER:0:U, inserts:COUNTER:0:U, not_cached:COUNTER:0:U, lowmem_prunes:COUNTER:0:U, queries_in_cache:GAUGE:0:U
mysql_threads running:GAUGE:0:U, connected:GAUGE:0:U, cached:GAUGE:0:U, created:COUNTER:0:U

1
java/mon-dropwizard/.gitignore vendored Normal file
View File

@ -0,0 +1 @@
/target

View File

@ -0,0 +1,49 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.hpcloud</groupId>
<artifactId>mon-common</artifactId>
<version>${computedVersion}</version>
</parent>
<artifactId>mon-dropwizard</artifactId>
<packaging>jar</packaging>
<properties>
<dropwizard.version>0.6.2</dropwizard.version>
</properties>
<dependencies>
<dependency>
<groupId>com.hpcloud</groupId>
<artifactId>mon-util</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.yammer.dropwizard</groupId>
<artifactId>dropwizard-core</artifactId>
<version>${dropwizard.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.yammer.dropwizard</groupId>
<artifactId>dropwizard-db</artifactId>
<version>${dropwizard.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.yammer.dropwizard</groupId>
<artifactId>dropwizard-jdbi</artifactId>
<version>${dropwizard.version}</version>
<scope>provided</scope>
</dependency>
<!-- Test dependencies -->
<dependency>
<groupId>com.hpcloud</groupId>
<artifactId>ps-testing</artifactId>
<version>${project.version}</version>
<scope>test</scope>
</dependency>
</dependencies>
</project>

View File

@ -0,0 +1,42 @@
package com.hpcloud.dropwizard.persistence;
import org.skife.jdbi.v2.DBI;
import com.google.inject.AbstractModule;
import com.google.inject.Provider;
import com.google.inject.ProvisionException;
import com.google.inject.Scopes;
import com.yammer.dropwizard.config.Environment;
import com.yammer.dropwizard.db.DatabaseConfiguration;
import com.yammer.dropwizard.jdbi.DBIFactory;
/**
* Module that binds DBI types to a single database instance provided for the specified environment
* and database configuration.
*
* @author Jonathan Halterman
*/
public class DatabaseModule extends AbstractModule {
private final Environment environment;
private final DatabaseConfiguration config;
public DatabaseModule(Environment environment, DatabaseConfiguration config) {
this.environment = environment;
this.config = config;
}
@Override
protected void configure() {
bind(DatabaseConfiguration.class).toInstance(config);
bind(DBI.class).toProvider(new Provider<DBI>() {
@Override
public DBI get() {
try {
return new DBIFactory().build(environment, config, "platform");
} catch (ClassNotFoundException e) {
throw new ProvisionException("Failed to provision DBI", e);
}
}
}).in(Scopes.SINGLETON);
}
}

View File

@ -0,0 +1,79 @@
package com.hpcloud.dropwizard;
import java.io.File;
import java.net.URL;
import org.testng.annotations.AfterSuite;
import org.testng.annotations.BeforeSuite;
import org.testng.annotations.Test;
import com.yammer.dropwizard.Service;
import com.yammer.dropwizard.cli.Cli;
import com.yammer.dropwizard.config.Bootstrap;
import com.yammer.dropwizard.config.Configuration;
/**
* Extend and have at it. Note, this class is in the "integration" group. So the server will only
* start when running "integration" grouped tests.
*
* @author Jonathan Halterman
* @param <S> service type
* @param <C> configuration type
*/
@Test(groups = "integration")
public abstract class DropwizardTestCase<S extends Service<C>, C extends Configuration> {
protected static volatile Service<?> service;
private static TestableServerCommand<?> command;
private final Class<S> serviceType;
private final Class<C> configurationType;
private final String configPath;
protected DropwizardTestCase(Class<S> serviceType, Class<C> configurationType, String configPath) {
this.serviceType = serviceType;
this.configurationType = configurationType;
this.configPath = configPath;
}
@BeforeSuite
public void startService() throws Exception {
if (service == null) {
synchronized (DropwizardTestCase.class) {
if (service == null) {
try {
File configFile = new File(configPath);
if (!configFile.exists()) {
URL configURL = DropwizardTestCase.class.getResource(configPath);
if (configURL != null)
configFile = new File(configURL.getFile());
if (!configFile.exists()) {
System.err.println("Could not file config file: " + configPath);
System.exit(1);
}
}
S localService = serviceType.newInstance();
service = localService;
final Bootstrap<C> bootstrap = new Bootstrap<C>(localService);
command = new TestableServerCommand<C>(localService, configurationType);
bootstrap.addCommand(command);
localService.initialize(bootstrap);
final Cli cli = new Cli(this.getClass(), bootstrap);
cli.run(new String[] { "test-server", configFile.getAbsolutePath() });
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
}
}
@AfterSuite
public static void stopService() throws Throwable {
if (command != null) {
synchronized (DropwizardTestCase.class) {
command.stop();
}
}
}
}

View File

@ -0,0 +1,122 @@
package com.hpcloud.dropwizard;
import static com.google.common.base.Preconditions.checkArgument;
import java.lang.management.ManagementFactory;
import java.net.URI;
import java.net.URISyntaxException;
import javax.management.MBeanServer;
import javax.management.ObjectName;
import net.sourceforge.argparse4j.inf.Namespace;
import org.eclipse.jetty.server.Connector;
import org.eclipse.jetty.server.Server;
import com.yammer.dropwizard.Service;
import com.yammer.dropwizard.cli.ConfiguredCommand;
import com.yammer.dropwizard.config.Bootstrap;
import com.yammer.dropwizard.config.Configuration;
import com.yammer.dropwizard.config.Environment;
import com.yammer.dropwizard.config.ServerFactory;
import com.yammer.dropwizard.lifecycle.ServerLifecycleListener;
import com.yammer.dropwizard.validation.Validator;
/**
* Normally ServerCommand is in charge of starting the service, but that's not particularly well
* suited for integration testing as it joins the current thread and keeps the Server instance to
* itself.
*
* This implementation is based on the original ServerCommand, but in addition to being stoppable it
* provides a few convenience methods for tests.
*
* @author Kim A. Betti <kim@developer-b.com>
* @author Jonathan Halterman
*/
public class TestableServerCommand<C extends Configuration> extends ConfiguredCommand<C> {
private final Service<C> service;
private final Class<C> configurationType;
private Server server;
public TestableServerCommand(Service<C> service, Class<C> configurationType) {
super("test-server", "Starts an HTTP test-server running the service");
this.service = service;
this.configurationType = configurationType;
}
@Override
protected Class<C> getConfigurationClass() {
return configurationType;
}
@Override
protected void run(Bootstrap<C> bootstrap, Namespace namespace, C configuration) throws Exception {
Environment environment = new Environment(bootstrap.getName(), configuration,
bootstrap.getObjectMapperFactory().copy(), new Validator());
bootstrap.runWithBundles(configuration, environment);
ServerFactory serverFactory = new ServerFactory(configuration.getHttpConfiguration(),
environment.getName());
Server server = serverFactory.buildServer(environment);
service.run(configuration, environment);
try {
for (ServerLifecycleListener listener : environment.getServerListeners())
listener.serverStarted(server);
server.start();
} catch (Exception e) {
System.out.println("Unable to start test-server, shutting down");
e.printStackTrace();
server.stop();
}
}
public void stop() throws Exception {
try {
stopJetty();
} finally {
unRegisterLoggingMBean();
}
}
/**
* We won't be able to run more then a single test in the same JVM instance unless we do some
* tidying and un-register a logging m-bean added by Dropwizard.
*/
private void unRegisterLoggingMBean() throws Exception {
MBeanServer server = ManagementFactory.getPlatformMBeanServer();
ObjectName loggerObjectName = new ObjectName("com.yammer:type=Logging");
if (server.isRegistered(loggerObjectName))
server.unregisterMBean(loggerObjectName);
}
private void stopJetty() throws Exception {
if (server != null) {
server.stop();
checkArgument(server.isStopped());
}
}
public boolean isRunning() {
return server.isRunning();
}
public URI getRootUriForConnector(String connectorName) {
try {
Connector connector = getConnectorNamed(connectorName);
String host = connector.getHost() != null ? connector.getHost() : "localhost";
return new URI("http://" + host + ":" + connector.getPort());
} catch (URISyntaxException e) {
throw new IllegalStateException(e);
}
}
private Connector getConnectorNamed(String name) {
Connector[] connectors = server.getConnectors();
for (Connector connector : connectors)
if (connector.getName().equals(name))
return connector;
throw new IllegalStateException("No connector named " + name);
}
}

View File

@ -0,0 +1,22 @@
package com.hpcloud.dropwizard.test;
import static org.testng.Assert.assertEquals;
import org.testng.annotations.Test;
import com.hpcloud.dropwizard.DropwizardTestCase;
@Test(groups = "integration")
public class DropwizardTestCaseTest extends DropwizardTestCase<TestService, TestConfiguration> {
public DropwizardTestCaseTest() {
super(TestService.class, TestConfiguration.class, "test-config.yml");
}
public void test1() {
assertEquals(((TestService) service).initialized.get(), 1);
}
public void test2() {
assertEquals(((TestService) service).initialized.get(), 1);
}
}

View File

@ -0,0 +1,7 @@
package com.hpcloud.dropwizard.test;
import com.yammer.dropwizard.config.Configuration;
public class TestConfiguration extends Configuration {
public String name;
}

View File

@ -0,0 +1,39 @@
package com.hpcloud.dropwizard.test;
import static org.testng.Assert.assertEquals;
import java.util.concurrent.atomic.AtomicInteger;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import com.yammer.dropwizard.Service;
import com.yammer.dropwizard.config.Bootstrap;
import com.yammer.dropwizard.config.Environment;
public class TestService extends Service<TestConfiguration> {
public AtomicInteger initialized = new AtomicInteger();
public static void main(String[] args) throws Exception {
new TestService().run(args);
}
@Path("/")
@Produces(MediaType.APPLICATION_JSON)
public static class TestResource {
}
@Override
public void initialize(Bootstrap<TestConfiguration> bootstrap) {
bootstrap.setName("test-as-a-service");
}
@Override
public void run(TestConfiguration config, Environment environment) throws Exception {
environment.addResource(new TestResource());
initialized.incrementAndGet();
assertEquals(config.name, "test");
}
}

1
java/mon-model/.gitignore vendored Normal file
View File

@ -0,0 +1 @@
/target

149
java/mon-model/pom.xml Normal file
View File

@ -0,0 +1,149 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.hpcloud</groupId>
<artifactId>mon-common</artifactId>
<version>${computedVersion}</version>
</parent>
<artifactId>mon-model</artifactId>
<packaging>jar</packaging>
<properties>
<antlr.version>4.2</antlr.version>
<jackson.version>2.3.1</jackson.version>
</properties>
<dependencies>
<dependency>
<groupId>com.hpcloud</groupId>
<artifactId>mon-util</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>org.antlr</groupId>
<artifactId>antlr4-runtime</artifactId>
<version>${antlr.version}</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<version>3.2.1</version>
</dependency>
<!-- Test dependencies -->
<dependency>
<groupId>com.hpcloud</groupId>
<artifactId>mon-testing</artifactId>
<version>${project.version}</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>1.7</source>
<target>1.7</target>
</configuration>
</plugin>
<plugin>
<groupId>org.antlr</groupId>
<artifactId>antlr4-maven-plugin</artifactId>
<version>${antlr.version}</version>
<configuration>
<sourceDirectory>src/main/resources</sourceDirectory>
<outputDirectory>${project.build.directory}/generated-sources</outputDirectory>
<grammars>AlarmParser.g4</grammars>
</configuration>
<executions>
<execution>
<id>antlr</id>
<phase>generate-sources</phase>
<goals>
<goal>antlr4</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>build-helper-maven-plugin</artifactId>
<executions>
<execution>
<id>add-source</id>
<phase>generate-sources</phase>
<goals>
<goal>add-source</goal>
</goals>
<configuration>
<sources>
<source>${project.build.directory}/generated-sources</source>
</sources>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<excludedGroups>performance,functional,integration,database,slow</excludedGroups>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-failsafe-plugin</artifactId>
<configuration>
<groups>performance,functional,integration,database,slow</groups>
<skipTests>${skipITs}</skipTests>
</configuration>
<executions>
<execution>
<goals>
<goal>integration-test</goal>
</goals>
<configuration>
<includes>
<include>**/*.class</include>
</includes>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<executions>
<execution>
<id>attach-sources</id>
<goals>
<goal>jar</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-scm-plugin</artifactId>
<configuration>
<tag>${project.version}</tag>
</configuration>
</plugin>
</plugins>
</build>
</project>

View File

@ -0,0 +1,91 @@
package com.hpcloud.mon.common.event;
import java.io.Serializable;
import java.util.Map;
import com.fasterxml.jackson.annotation.JsonRootName;
import com.hpcloud.mon.common.model.alarm.AlarmSubExpression;
/**
* Represents an alarm having been created.
*
* @author Jonathan Halterman
*/
@JsonRootName(value = "alarm-created")
public class AlarmCreatedEvent implements Serializable {
private static final long serialVersionUID = -2971178340115415059L;
public String tenantId;
public String alarmId;
public String alarmName;
public String alarmExpression;
public Map<String, AlarmSubExpression> alarmSubExpressions;
public AlarmCreatedEvent() {
}
public AlarmCreatedEvent(String tenantId, String alarmId, String alarmName,
String alarmExpression, Map<String, AlarmSubExpression> alarmSubExpressions) {
this.tenantId = tenantId;
this.alarmId = alarmId;
this.alarmName = alarmName;
this.alarmExpression = alarmExpression;
this.alarmSubExpressions = alarmSubExpressions;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
AlarmCreatedEvent other = (AlarmCreatedEvent) obj;
if (alarmExpression == null) {
if (other.alarmExpression != null)
return false;
} else if (!alarmExpression.equals(other.alarmExpression))
return false;
if (alarmId == null) {
if (other.alarmId != null)
return false;
} else if (!alarmId.equals(other.alarmId))
return false;
if (alarmName == null) {
if (other.alarmName != null)
return false;
} else if (!alarmName.equals(other.alarmName))
return false;
if (alarmSubExpressions == null) {
if (other.alarmSubExpressions != null)
return false;
} else if (!alarmSubExpressions.equals(other.alarmSubExpressions))
return false;
if (tenantId == null) {
if (other.tenantId != null)
return false;
} else if (!tenantId.equals(other.tenantId))
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((alarmExpression == null) ? 0 : alarmExpression.hashCode());
result = prime * result + ((alarmId == null) ? 0 : alarmId.hashCode());
result = prime * result + ((alarmName == null) ? 0 : alarmName.hashCode());
result = prime * result + ((alarmSubExpressions == null) ? 0 : alarmSubExpressions.hashCode());
result = prime * result + ((tenantId == null) ? 0 : tenantId.hashCode());
return result;
}
@Override
public String toString() {
return String.format(
"AlarmCreatedEvent [tenantId=%s, alarmId=%s, alarmName=%s, expression=%s]", tenantId,
alarmId, alarmName, alarmExpression);
}
}

View File

@ -0,0 +1,75 @@
package com.hpcloud.mon.common.event;
import java.io.Serializable;
import java.util.Map;
import com.fasterxml.jackson.annotation.JsonRootName;
import com.hpcloud.mon.common.model.metric.MetricDefinition;
/**
* Represents an alarm having been deleted.
*
* @author Jonathan Halterman
*/
@JsonRootName(value = "alarm-deleted")
public class AlarmDeletedEvent implements Serializable {
private static final long serialVersionUID = -845914476456541787L;
public String tenantId;
public String alarmId;
public Map<String, MetricDefinition> subAlarmMetricDefinitions;
public AlarmDeletedEvent() {
}
public AlarmDeletedEvent(String tenantId, String alarmId,
Map<String, MetricDefinition> subAlarmMetricDefinitions) {
this.tenantId = tenantId;
this.alarmId = alarmId;
this.subAlarmMetricDefinitions = subAlarmMetricDefinitions;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
AlarmDeletedEvent other = (AlarmDeletedEvent) obj;
if (alarmId == null) {
if (other.alarmId != null)
return false;
} else if (!alarmId.equals(other.alarmId))
return false;
if (subAlarmMetricDefinitions == null) {
if (other.subAlarmMetricDefinitions != null)
return false;
} else if (!subAlarmMetricDefinitions.equals(other.subAlarmMetricDefinitions))
return false;
if (tenantId == null) {
if (other.tenantId != null)
return false;
} else if (!tenantId.equals(other.tenantId))
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((alarmId == null) ? 0 : alarmId.hashCode());
result = prime * result
+ ((subAlarmMetricDefinitions == null) ? 0 : subAlarmMetricDefinitions.hashCode());
result = prime * result + ((tenantId == null) ? 0 : tenantId.hashCode());
return result;
}
@Override
public String toString() {
return String.format("AlarmDeletedEvent [tenantId=%s, alarmId=%s, subAlarmIds=%s]", tenantId,
alarmId, subAlarmMetricDefinitions);
}
}

View File

@ -0,0 +1,60 @@
package com.hpcloud.mon.common.event;
import java.io.Serializable;
import com.fasterxml.jackson.annotation.JsonRootName;
/**
* @author Jonathan Halterman
*/
@JsonRootName(value = "endpoint-deleted")
public class EndpointDeletedEvent implements Serializable {
private static final long serialVersionUID = -2100681808766534155L;
public String tenantId;
public String endpointId;
public EndpointDeletedEvent() {
}
public EndpointDeletedEvent(String tenantId, String endpointId) {
this.tenantId = tenantId;
this.endpointId = endpointId;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
EndpointDeletedEvent other = (EndpointDeletedEvent) obj;
if (endpointId == null) {
if (other.endpointId != null)
return false;
} else if (!endpointId.equals(other.endpointId))
return false;
if (tenantId == null) {
if (other.tenantId != null)
return false;
} else if (!tenantId.equals(other.tenantId))
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((endpointId == null) ? 0 : endpointId.hashCode());
result = prime * result + ((tenantId == null) ? 0 : tenantId.hashCode());
return result;
}
@Override
public String toString() {
return String.format("EndpointDeletedEvent [tenantId=%s, endpointId=%s]", tenantId, endpointId);
}
}

View File

@ -0,0 +1,61 @@
package com.hpcloud.mon.common.event;
import java.io.Serializable;
import com.fasterxml.jackson.annotation.JsonRootName;
/**
* @author Todd Walk
*/
@JsonRootName(value = "metering-change")
public class MeteringChangeEvent implements Serializable {
private static final long serialVersionUID = 4380444000660995888L;
public String tenantId;
public String monitoringLevel;
public MeteringChangeEvent() {
}
public MeteringChangeEvent(String tenantId, String monitoringLevel) {
this.tenantId = tenantId;
this.monitoringLevel = monitoringLevel;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
MeteringChangeEvent other = (MeteringChangeEvent) obj;
if (monitoringLevel == null) {
if (other.monitoringLevel != null)
return false;
} else if (!monitoringLevel.equals(other.monitoringLevel))
return false;
if (tenantId == null) {
if (other.tenantId != null)
return false;
} else if (!tenantId.equals(other.tenantId))
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((monitoringLevel == null) ? 0 : monitoringLevel.hashCode());
result = prime * result + ((tenantId == null) ? 0 : tenantId.hashCode());
return result;
}
@Override
public String toString() {
return String.format("MeteringChangeEvent [tenantId=%s, monitoringLevel=%s]", tenantId,
monitoringLevel);
}
}

View File

@ -0,0 +1,82 @@
package com.hpcloud.mon.common.event;
import java.io.Serializable;
import java.util.Map;
import javax.annotation.Nullable;
import com.fasterxml.jackson.annotation.JsonRootName;
/**
* @author Jonathan Halterman
*/
@JsonRootName(value = "subscription-created")
public class SubscriptionCreatedEvent implements Serializable {
private static final long serialVersionUID = -9048721263249931364L;
public String tenantId;
public String endpointId;
public String namespace;
public Map<String, String> dimensions;
public SubscriptionCreatedEvent() {
}
public SubscriptionCreatedEvent(String tenantId, String endpointId, String namespace,
@Nullable Map<String, String> dimensions) {
this.tenantId = tenantId;
this.endpointId = endpointId;
this.namespace = namespace;
this.dimensions = dimensions;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
SubscriptionCreatedEvent other = (SubscriptionCreatedEvent) obj;
if (dimensions == null) {
if (other.dimensions != null)
return false;
} else if (!dimensions.equals(other.dimensions))
return false;
if (endpointId == null) {
if (other.endpointId != null)
return false;
} else if (!endpointId.equals(other.endpointId))
return false;
if (namespace == null) {
if (other.namespace != null)
return false;
} else if (!namespace.equals(other.namespace))
return false;
if (tenantId == null) {
if (other.tenantId != null)
return false;
} else if (!tenantId.equals(other.tenantId))
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((dimensions == null) ? 0 : dimensions.hashCode());
result = prime * result + ((endpointId == null) ? 0 : endpointId.hashCode());
result = prime * result + ((namespace == null) ? 0 : namespace.hashCode());
result = prime * result + ((tenantId == null) ? 0 : tenantId.hashCode());
return result;
}
@Override
public String toString() {
return String.format(
"SubscriptionCreatedEvent [tenantId=%s, endpointId=%s, namespace=%s, dimensions=%s]",
tenantId, endpointId, namespace, dimensions);
}
}

View File

@ -0,0 +1,88 @@
package com.hpcloud.mon.common.event;
import java.io.Serializable;
import java.util.Map;
import com.fasterxml.jackson.annotation.JsonRootName;
/**
* @author Jonathan Halterman
*/
@JsonRootName(value = "subscription-deleted")
public class SubscriptionDeletedEvent implements Serializable {
private static final long serialVersionUID = 4380444000660995762L;
public String tenantId;
public String subscriptionId;
public String endpointId;
public String namespace;
public Map<String, String> dimensions;
public SubscriptionDeletedEvent() {
}
public SubscriptionDeletedEvent(String tenantId, String subscriptionId, String endpointId,
String namespace, Map<String, String> dimensions) {
this.tenantId = tenantId;
this.subscriptionId = subscriptionId;
this.endpointId = endpointId;
this.namespace = namespace;
this.dimensions = dimensions;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
SubscriptionDeletedEvent other = (SubscriptionDeletedEvent) obj;
if (dimensions == null) {
if (other.dimensions != null)
return false;
} else if (!dimensions.equals(other.dimensions))
return false;
if (endpointId == null) {
if (other.endpointId != null)
return false;
} else if (!endpointId.equals(other.endpointId))
return false;
if (namespace == null) {
if (other.namespace != null)
return false;
} else if (!namespace.equals(other.namespace))
return false;
if (subscriptionId == null) {
if (other.subscriptionId != null)
return false;
} else if (!subscriptionId.equals(other.subscriptionId))
return false;
if (tenantId == null) {
if (other.tenantId != null)
return false;
} else if (!tenantId.equals(other.tenantId))
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((dimensions == null) ? 0 : dimensions.hashCode());
result = prime * result + ((endpointId == null) ? 0 : endpointId.hashCode());
result = prime * result + ((namespace == null) ? 0 : namespace.hashCode());
result = prime * result + ((subscriptionId == null) ? 0 : subscriptionId.hashCode());
result = prime * result + ((tenantId == null) ? 0 : tenantId.hashCode());
return result;
}
@Override
public String toString() {
return String.format(
"SubscriptionDeletedEvent [tenantId=%s, subscriptionId=%s, endpointId=%s, namespace=%s, dimensions=%s]",
tenantId, subscriptionId, endpointId, namespace, dimensions);
}
}

View File

@ -0,0 +1,305 @@
package com.hpcloud.mon.common.model;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Pattern;
import com.google.common.primitives.Ints;
/**
* Utilities for working with Namespaces.
*
* @author Jonathan Halterman
* @author Todd Walk
*/
public class Namespaces {
public static final String COMPUTE_NAMESPACE = "hpcs.compute";
public static final String VOLUME_NAMESPACE = "hpcs.volume";
public static final String RESERVED_NAMESPACE_PREFIX = "hpcs.";
public static final String OBJECT_STORE_NAMESPACE = "hpcs.object-store";
private static final Pattern UUID_PATTERN = Pattern.compile("\\w{8}-\\w{4}-\\w{4}-\\w{4}-\\w{12}");
private static final Map<String, List<String>> NAMESPACE_SUPPORTED_DIMENSIONS;
private static final Map<String, List<String>> NAMESPACE_REQUIRED_DIMENSIONS;
private static final Map<String, List<String>> NAMESPACE_METRICS;
private static final Map<String, NamespaceInfo> NAMESPACE_INFO;
private static final NamespaceInfo COMPUTE_NAMESPACE_INFO = new NamespaceInfo() {
// US West
private final List<String> requiredDimsFor11 = Arrays.asList("instance_id", "instance_uuid",
"az");
// US East
private final List<String> requiredDimsFor2 = Arrays.asList("instance_id");
@Override
public List<String> getRequiredDimensions(String serviceVersion) {
if ("2".equals(serviceVersion))
return requiredDimsFor2;
else
return requiredDimsFor11;
}
@Override
public String getResourceIdDimension(String serviceVersion) {
if ("2".equals(serviceVersion))
return "instance_id";
else
return "instance_uuid";
}
@Override
public String getSecondaryResourceIdDimension(String serviceVersion) {
if ("1.1".equals(serviceVersion))
return "instance_id";
return null;
}
@Override
public boolean isValid(String dimensionName, String dimensionValue) {
if ("instance_id".equals(dimensionName))
return dimensionValue.length() != 36 || UUID_PATTERN.matcher(dimensionValue).matches();
if ("instance_uuid".equals(dimensionName))
return UUID_PATTERN.matcher(dimensionValue).matches();
if ("az".equals(dimensionName))
return Ints.tryParse(dimensionValue) != null;
return true;
}
};
private static final NamespaceInfo VOLUME_NAMESPACE_INFO = new NamespaceInfo() {
// US West is 1.1
private final List<String> requiredDimsFor11 = Arrays.asList("instance_id", "instance_uuid",
"az");
// US East is 1.0
private final List<String> requiredDimsFor10 = Arrays.asList("instance_id");
// US East & West (nova 13.5+)
private final List<String> requiredDimsFor2 = Arrays.asList("instance_id", "disk");
@Override
public List<String> getRequiredDimensions(String serviceVersion) {
if ("1.0".equals(serviceVersion))
return requiredDimsFor10;
else if ("1.1".equals(serviceVersion))
return requiredDimsFor11;
else
return requiredDimsFor2;
}
@Override
public String getResourceIdDimension(String serviceVersion) {
if ("1.1".equals(serviceVersion))
return "instance_uuid";
else
return "instance_id";
}
@Override
public String getSecondaryResourceIdDimension(String serviceVersion) {
if ("1.1".equals(serviceVersion))
return "instance_id";
return null;
}
@Override
public boolean isValid(String dimensionName, String dimensionValue) {
if ("instance_id".equals(dimensionName))
return dimensionValue.length() != 36 || UUID_PATTERN.matcher(dimensionValue).matches();
if ("instance_uuid".equals(dimensionName))
return UUID_PATTERN.matcher(dimensionValue).matches();
if ("az".equals(dimensionName))
return Ints.tryParse(dimensionValue) != null;
return true;
}
};
private static final NamespaceInfo OBJECT_STORE_NAMESPACE_INFO = new NamespaceInfo() {
// Loosen restrictions for swift (until we re-write all of this code)
private final List<String> requiredDims = Collections.emptyList();
@Override
public List<String> getRequiredDimensions(String serviceVersion) {
return requiredDims;
}
@Override
public String getResourceIdDimension(String serviceVersion) {
return "container";
}
@Override
public String getSecondaryResourceIdDimension(String serviceVersion) {
return null;
}
@Override
public boolean isValid(String dimensionName, String dimensionValue) {
// Commented out to loosen that restriction for swift (until we re-write all of this code)
// if ("container".equals(dimensionName))
// return dimensionValue.length() < 256 || !dimensionValue.contains("/");
return true;
}
};
static {
NAMESPACE_SUPPORTED_DIMENSIONS = new HashMap<String, List<String>>();
NAMESPACE_REQUIRED_DIMENSIONS = new HashMap<String, List<String>>();
NAMESPACE_METRICS = new HashMap<String, List<String>>();
NAMESPACE_INFO = new HashMap<String, NamespaceInfo>();
// Compute
NAMESPACE_SUPPORTED_DIMENSIONS.put(COMPUTE_NAMESPACE,
Arrays.asList("instance_uuid", "instance_id", "az"));
NAMESPACE_REQUIRED_DIMENSIONS.put(COMPUTE_NAMESPACE,
Arrays.asList("instance_id", "instance_uuid", "az"));
NAMESPACE_METRICS.put(COMPUTE_NAMESPACE, Arrays.asList("cpu_total_time",
"cpu_total_utilization", "disk_read_ops", "disk_read_ops_count", "disk_write_ops",
"disk_write_ops_count", "disk_read_bytes", "disk_read_bytes_count", "disk_write_bytes",
"disk_write_bytes_count", "net_in_bytes", "net_in_bytes_count", "net_out_bytes",
"net_out_bytes_count", "net_in_packets", "net_in_packets_count", "net_out_packets",
"net_out_packets_count", "net_in_dropped", "net_in_dropped_count", "net_out_dropped",
"net_out_dropped_count", "net_in_errors", "net_in_errors_count", "net_out_errors",
"net_out_errors_count"));
NAMESPACE_INFO.put(COMPUTE_NAMESPACE, COMPUTE_NAMESPACE_INFO);
// Volume
NAMESPACE_SUPPORTED_DIMENSIONS.put(VOLUME_NAMESPACE,
Arrays.asList("instance_uuid", "instance_id", "az", "disk"));
NAMESPACE_REQUIRED_DIMENSIONS.put(VOLUME_NAMESPACE,
Arrays.asList("instance_id", "instance_uuid", "az"));
NAMESPACE_METRICS.put(VOLUME_NAMESPACE, Arrays.asList("volume_read_ops", "volume_write_ops",
"volume_read_bytes", "volume_write_bytes", "volume_read_time", "volume_write_time",
"volume_idle_time"));
NAMESPACE_INFO.put(VOLUME_NAMESPACE, VOLUME_NAMESPACE_INFO);
// Object Store
// Commented out to loosen that restrictions for swift (until we re-write all of this code)
// NAMESPACE_SUPPORTED_DIMENSIONS.put(OBJECT_STORE_NAMESPACE, Arrays.asList("container"));
// NAMESPACE_REQUIRED_DIMENSIONS.put(OBJECT_STORE_NAMESPACE, Arrays.asList("container"));
NAMESPACE_METRICS.put(OBJECT_STORE_NAMESPACE,
Arrays.asList("project_write_bytes", "project_read_bytes", "project_put_ops",
"project_get_ops", "container_write_bytes", "container_read_bytes",
"container_put_ops", "container_get_ops", "container_write_bytes_proxy",
"container_read_bytes_proxy", "container_put_ops_proxy", "container_get_ops_proxy",
"project_bytes_used", "container_bytes_used", "number_of_objects",
"number_of_containers", "projects_bytes_used_replica", "container_bytes_used_replica",
"number_of_objects_replica", "number_of_containers_replica"));
NAMESPACE_INFO.put(OBJECT_STORE_NAMESPACE, OBJECT_STORE_NAMESPACE_INFO);
}
/**
* Provides information for a namespace.
*/
private interface NamespaceInfo {
/**
* Returns the required dimensions for the {@code serviceVersion}.
*/
List<String> getRequiredDimensions(String serviceVersion);
/**
* Returns the dimension name that represents the resource id for the {@code serviceVersion},
* else {@code null}.
*/
String getResourceIdDimension(String serviceVersion);
/**
* Returns the dimension name that represents the secondary resource id for the
* {@code serviceVersion}, else {@code null}.
*/
String getSecondaryResourceIdDimension(String serviceVersion);
/**
* Returns whether the {@code dimensionValue} is valid for the {@code dimensionName}.
*/
boolean isValid(String dimensionName, String dimensionValue);
}
/**
* Returns the required dimensions for the {@code namespace} and {@code serviceVersion}, else
* empty list.
*/
public static List<String> getRequiredDimensions(String namespace, String serviceVersion) {
NamespaceInfo info = NAMESPACE_INFO.get(namespace);
if (info != null)
return info.getRequiredDimensions(serviceVersion);
return Collections.emptyList();
}
/**
* Returns the dimension name that represents the resource id for the {@code namespace}, else
* {@code null}.
*/
public static String getResourceIdDimension(String namespace, String serviceVersion) {
NamespaceInfo info = NAMESPACE_INFO.get(namespace);
if (info != null)
return info.getResourceIdDimension(serviceVersion);
return null;
}
/**
* Returns the dimension name that represents the secondary resource id for the {@code namespace},
* else {@code null}.
*/
public static String getSecondaryResourceIdDimension(String namespace, String serviceVersion) {
NamespaceInfo info = NAMESPACE_INFO.get(namespace);
if (info != null)
return info.getSecondaryResourceIdDimension(serviceVersion);
return null;
}
/**
* Returns whether the {@code namespace} is "core", which should correspond to metrics containing
* an instance_id dimension.
*/
public static boolean isCore(String namespace) {
return NAMESPACE_METRICS.get(namespace) != null;
}
/**
* Returns whether the {@code namespace} is reserved (hpcs).
*/
public static boolean isReserved(String namespace) {
return namespace.toLowerCase().startsWith(RESERVED_NAMESPACE_PREFIX);
}
/**
* Returns whether the {@code dimensionName} is supported for the {@code namespace}.
*/
public static boolean isValidDimensionName(String namespace, String dimensionName) {
List<String> list = NAMESPACE_SUPPORTED_DIMENSIONS.get(namespace);
return list == null || list.isEmpty() || list.contains(dimensionName)
|| dimensionName.equals("metric_name") || dimensionName.equals("device");
}
/**
* Returns whether the {@code dimensionValue} is valid for the {@code namespace} and
* {@code dimensionName}.
*/
public static boolean isValidDimensionValue(String namespace, String dimensionName,
String dimensionValue) {
NamespaceInfo info = NAMESPACE_INFO.get(namespace);
if (info != null)
return info.isValid(dimensionName, dimensionValue);
return true;
}
// TODO remove this when 1.0 API is removed
public static void putDimensionForSubject(String namespace, String subject,
Map<String, String> dimensions) {
if (COMPUTE_NAMESPACE.equals(namespace))
dimensions.put("device", subject);
else if (VOLUME_NAMESPACE.equals(namespace))
dimensions.put("disk", subject);
else
dimensions.put("subject", subject);
}
/**
* Returns whether the {@code metricName} is supported for the {@code namespace}.
*/
public static boolean isValidMetricname(String namespace, String metricName) {
List<String> list = NAMESPACE_METRICS.get(namespace);
return list == null || list.isEmpty() || list.contains(metricName);
}
}

View File

@ -0,0 +1,40 @@
package com.hpcloud.mon.common.model.alarm;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.hpcloud.util.stats.Statistic;
import com.hpcloud.util.stats.Statistics.Average;
import com.hpcloud.util.stats.Statistics.Count;
import com.hpcloud.util.stats.Statistics.Max;
import com.hpcloud.util.stats.Statistics.Min;
import com.hpcloud.util.stats.Statistics.Sum;
/**
* @author Jonathan Halterman
*/
public enum AggregateFunction {
MIN, MAX, SUM, COUNT, AVG;
@JsonCreator
public static AggregateFunction fromJson(String text) {
return valueOf(text.toUpperCase());
}
@Override
public String toString() {
return name().toLowerCase();
}
public Class<? extends Statistic> toStatistic(AggregateFunction aggregateFunction) {
if (AggregateFunction.AVG.equals(aggregateFunction))
return Average.class;
if (AggregateFunction.COUNT.equals(aggregateFunction))
return Count.class;
if (AggregateFunction.SUM.equals(aggregateFunction))
return Sum.class;
if (AggregateFunction.MIN.equals(aggregateFunction))
return Min.class;
if (AggregateFunction.MAX.equals(aggregateFunction))
return Max.class;
return null;
}
}

View File

@ -0,0 +1,135 @@
package com.hpcloud.mon.common.model.alarm;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.antlr.v4.runtime.ANTLRInputStream;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.ParserRuleContext;
import org.antlr.v4.runtime.tree.ParseTreeWalker;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.hpcloud.mon.common.model.alarm.AlarmExpressionLexer;
import com.hpcloud.mon.common.model.alarm.AlarmExpressionParser;
import com.hpcloud.util.Stack;
/**
* Alarm expression value object.
*
* @author Todd Walk
* @author Jonathan Halterman
*/
public class AlarmExpression {
private final String expression;
/** Postfix list of expression elements. */
private final List<Object> elements;
private volatile List<AlarmSubExpression> subExpressions;
/**
* Creates an AlarmExpression for the {@code expression} string.
*
* @throws IllegalArgumentException if the {@code expression} is invalid
*/
public AlarmExpression(String expression) {
this.expression = expression;
AlarmExpressionParser parser = new AlarmExpressionParser(new CommonTokenStream(
new AlarmExpressionLexer(new ANTLRInputStream(expression))));
parser.removeErrorListeners();
parser.addErrorListener(new AlarmExpressionErrorListener());
parser.setBuildParseTree(true);
ParserRuleContext tree = parser.start();
AlarmSubExpressionListener listener = new AlarmSubExpressionListener(false);
ParseTreeWalker walker = new ParseTreeWalker();
walker.walk(listener, tree);
elements = listener.getElements();
}
/**
* Creates an AlarmExpression for the {@code expression} string.
*
* @throws IllegalArgumentException if the {@code expression} is invalid
*/
@JsonCreator
public static AlarmExpression of(String expression) {
return new AlarmExpression(expression);
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
AlarmExpression other = (AlarmExpression) obj;
if (elements == null) {
if (other.elements != null)
return false;
} else if (!elements.equals(other.elements))
return false;
return true;
}
/**
* Evaluates the {@code subExpressionValues} against the expression, returning true if the values
* evaluate to true for the expression, else false.
*
* @throws IllegalArgumentException if any of the expected sub-expressions cannot be found in
* {@code subExpressionValues}
*/
public boolean evaluate(Map<AlarmSubExpression, Boolean> subExpressionValues) {
Stack<Object> stack = new Stack<Object>();
for (Object element : elements) {
if (element instanceof AlarmSubExpression) {
Boolean value = subExpressionValues.get(element);
if (value == null)
throw new IllegalArgumentException("Expected sub-expression was not found for " + element);
stack.push(value);
} else {
BooleanOperator operator = (BooleanOperator) element;
Boolean operandA = (Boolean) stack.pop();
Boolean operandB = (Boolean) stack.pop();
stack.push(operator.evaluate(operandA, operandB));
}
}
return (Boolean) stack.pop();
}
/**
* Returns the alarm's expression.
*/
public String getExpression() {
return expression;
}
/**
* Returns the sub expressions for the expression in the order that they appear.
*/
public List<AlarmSubExpression> getSubExpressions() {
if (subExpressions != null)
return subExpressions;
List<AlarmSubExpression> subExpressions = new ArrayList<AlarmSubExpression>();
for (Object element : elements)
if (element instanceof AlarmSubExpression)
subExpressions.add((AlarmSubExpression) element);
this.subExpressions = subExpressions;
return subExpressions;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((elements == null) ? 0 : elements.hashCode());
return result;
}
@Override
public String toString() {
return String.format("AlarmExpression [elements=%s]", elements);
}
}

View File

@ -0,0 +1,14 @@
package com.hpcloud.mon.common.model.alarm;
import org.antlr.v4.runtime.BaseErrorListener;
import org.antlr.v4.runtime.RecognitionException;
import org.antlr.v4.runtime.Recognizer;
class AlarmExpressionErrorListener extends BaseErrorListener {
@Override
public void syntaxError(Recognizer<?, ?> recognizer, Object offendingSymbol, int line,
int charPositionInLine, String msg, RecognitionException e) {
throw new IllegalArgumentException(String.format("Syntax Error [%d] %s: %s",
charPositionInLine, msg, offendingSymbol));
}
}

View File

@ -0,0 +1,53 @@
package com.hpcloud.mon.common.model.alarm;
import com.fasterxml.jackson.annotation.JsonCreator;
/**
* Alarm operator.
*
* @author Jonathan Halterman
*/
public enum AlarmOperator {
LT("<"), LTE("<="), GT(">"), GTE(">=");
private final String operatorSymbols;
private AlarmOperator(String operatorSymbols) {
this.operatorSymbols = operatorSymbols;
}
@JsonCreator
public static AlarmOperator fromJson(String text) {
return valueOf(text.toUpperCase());
}
public static AlarmOperator reverseOperator(AlarmOperator op) {
if (op == LT)
return GT;
if (op == GT)
return LT;
if (op == LTE)
return GTE;
return LTE;
}
public boolean evaluate(double lhs, double rhs) {
switch (this) {
case LT:
return lhs < rhs;
case LTE:
return lhs <= rhs;
case GT:
return lhs > rhs;
case GTE:
return lhs >= rhs;
default:
return false;
}
}
@Override
public String toString() {
return operatorSymbols;
}
}

View File

@ -0,0 +1,15 @@
package com.hpcloud.mon.common.model.alarm;
import com.fasterxml.jackson.annotation.JsonCreator;
/**
* @author Jonathan Halterman
*/
public enum AlarmState {
UNDETERMINED, OK, ALARM;
@JsonCreator
public static AlarmState fromJson(String text) {
return valueOf(text.toUpperCase());
}
}

View File

@ -0,0 +1,180 @@
package com.hpcloud.mon.common.model.alarm;
import java.io.Serializable;
import org.antlr.v4.runtime.ANTLRInputStream;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.ParserRuleContext;
import org.antlr.v4.runtime.tree.ParseTreeWalker;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.hpcloud.mon.common.model.alarm.AlarmExpressionLexer;
import com.hpcloud.mon.common.model.alarm.AlarmExpressionParser;
import com.hpcloud.mon.common.model.metric.MetricDefinition;
/**
* Alarm sub expression value object.
*
* @author Todd Walk
* @author Jonathan Halterman
*/
public class AlarmSubExpression implements Serializable {
private static final long serialVersionUID = -7458129503846747592L;
public static final int DEFAULT_PERIOD = 60;
public static final int DEFAULT_PERIODS = 1;
private AggregateFunction function;
private MetricDefinition metricDefinition;
private AlarmOperator operator;
private double threshold;
private int period;
private int periods;
public AlarmSubExpression(AggregateFunction function, MetricDefinition metricDefinition,
AlarmOperator operator, double threshold, int period, int periods) {
this.function = function;
this.metricDefinition = metricDefinition;
this.operator = operator;
this.threshold = threshold;
this.period = period;
this.periods = periods;
}
AlarmSubExpression() {
}
/**
* Returns an AlarmSubExpression for the {@code expression} string.
*
* @throws IllegalArgumentException if the {@code expression} is invalid
*/
@JsonCreator
public static AlarmSubExpression of(String expression) {
AlarmExpressionParser parser = new AlarmExpressionParser(new CommonTokenStream(
new AlarmExpressionLexer(new ANTLRInputStream(expression))));
parser.removeErrorListeners();
parser.addErrorListener(new AlarmExpressionErrorListener());
parser.setBuildParseTree(true);
ParserRuleContext tree = parser.start();
AlarmSubExpressionListener listener = new AlarmSubExpressionListener(true);
ParseTreeWalker walker = new ParseTreeWalker();
walker.walk(listener, tree);
return (AlarmSubExpression) listener.getElements().get(0);
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
AlarmSubExpression other = (AlarmSubExpression) obj;
if (function != other.function)
return false;
if (metricDefinition == null) {
if (other.metricDefinition != null)
return false;
} else if (!metricDefinition.equals(other.metricDefinition))
return false;
if (operator != other.operator)
return false;
if (period != other.period)
return false;
if (periods != other.periods)
return false;
if (Double.doubleToLongBits(threshold) != Double.doubleToLongBits(other.threshold))
return false;
return true;
}
/**
* Evaluates the {@code value} against the threshold and returns the result.
*/
public boolean evaluate(double value) {
return operator.evaluate(value, threshold);
}
/**
* Returns the sub-alarm's expression.
*/
public String getExpression() {
StringBuilder sb = new StringBuilder();
sb.append(function).append('(').append(metricDefinition.toExpression());
if (period != 60)
sb.append(", ").append(period);
sb.append(") ").append(operator).append(' ').append(threshold);
if (periods != 1)
sb.append(" times ").append(periods);
return sb.toString();
}
public AggregateFunction getFunction() {
return function;
}
public MetricDefinition getMetricDefinition() {
return metricDefinition;
}
public AlarmOperator getOperator() {
return operator;
}
public int getPeriod() {
return period;
}
public int getPeriods() {
return periods;
}
public double getThreshold() {
return threshold;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((function == null) ? 0 : function.hashCode());
result = prime * result + ((metricDefinition == null) ? 0 : metricDefinition.hashCode());
result = prime * result + ((operator == null) ? 0 : operator.hashCode());
result = prime * result + period;
result = prime * result + periods;
long temp;
temp = Double.doubleToLongBits(threshold);
result = prime * result + (int) (temp ^ (temp >>> 32));
return result;
}
public void setFunction(AggregateFunction function) {
this.function = function;
}
public void setMetricDefinition(MetricDefinition metricDefinition) {
this.metricDefinition = metricDefinition;
}
public void setOperator(AlarmOperator operator) {
this.operator = operator;
}
public void setPeriod(int period) {
this.period = period;
}
public void setPeriods(int periods) {
this.periods = periods;
}
public void setThreshold(double threshold) {
this.threshold = threshold;
}
@Override
public String toString() {
return getExpression();
}
}

View File

@ -0,0 +1,158 @@
package com.hpcloud.mon.common.model.alarm;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.hpcloud.mon.common.model.metric.MetricDefinition;
/**
* Complex alarm parser lister for sub expression extraction.
*
* @author Todd Walk
*/
class AlarmSubExpressionListener extends AlarmExpressionBaseListener {
private final boolean simpleExpression;
private AggregateFunction function;
private String namespace;
private Map<String, String> dimensions;
private AlarmOperator operator;
private double threshold;
private int period = AlarmSubExpression.DEFAULT_PERIOD;
private int periods = AlarmSubExpression.DEFAULT_PERIODS;
private List<Object> elements = new ArrayList<Object>();
AlarmSubExpressionListener(boolean simpleExpression) {
this.simpleExpression = simpleExpression;
}
private void saveSubExpression() {
AlarmSubExpression subExpression = new AlarmSubExpression(function, new MetricDefinition(
namespace, dimensions), operator, threshold, period, periods);
elements.add(subExpression);
function = null;
namespace = null;
dimensions = null;
operator = null;
threshold = 0;
period = AlarmSubExpression.DEFAULT_PERIOD;
periods = AlarmSubExpression.DEFAULT_PERIODS;
}
@Override
public void exitRelationalExprFwd(AlarmExpressionParser.RelationalExprFwdContext ctx) {
// This is *right now* basically the same as a min or max function, convert it
if (operator == AlarmOperator.GT || operator == AlarmOperator.GTE)
function = AggregateFunction.MAX;
else
function = AggregateFunction.MIN;
saveSubExpression();
}
@Override
public void exitRelationalExprFuncFwd(AlarmExpressionParser.RelationalExprFuncFwdContext ctx) {
saveSubExpression();
}
@Override
public void exitRelationalExprBwd(AlarmExpressionParser.RelationalExprBwdContext ctx) {
operator = AlarmOperator.reverseOperator(operator);
// This is *right now* basically the same as a min or max function, convert it
if (operator == AlarmOperator.GT || operator == AlarmOperator.GTE)
function = AggregateFunction.MAX;
else
function = AggregateFunction.MIN;
saveSubExpression();
}
@Override
public void exitRelationalExprFuncBwd(AlarmExpressionParser.RelationalExprFuncBwdContext ctx) {
operator = AlarmOperator.reverseOperator(operator);
saveSubExpression();
}
@Override
public void enterFunctionType(AlarmExpressionParser.FunctionTypeContext ctx) {
function = AggregateFunction.valueOf(ctx.getChild(0).getText().toUpperCase());
}
@Override
public void enterNamespace(AlarmExpressionParser.NamespaceContext ctx) {
namespace = ctx.getChild(0).getText();
}
@Override
public void enterDimension(AlarmExpressionParser.DimensionContext ctx) {
if (dimensions == null)
dimensions = new HashMap<String, String>();
String dimensionName = ctx.getChild(0).getText();
if (dimensions.put(dimensionName, ctx.getChild(2).getText()) != null)
throw new IllegalArgumentException("More than one value was given for dimension "
+ dimensionName);
}
@Override
public void enterPeriod(AlarmExpressionParser.PeriodContext ctx) {
period = Integer.valueOf(ctx.getChild(0).getText());
}
@Override
public void enterRepeat(AlarmExpressionParser.RepeatContext ctx) {
periods = Integer.valueOf(ctx.getChild(0).getText());
}
@Override
public void enterLt(AlarmExpressionParser.LtContext ctx) {
assertSimpleExpression();
operator = AlarmOperator.LT;
}
@Override
public void enterLte(AlarmExpressionParser.LteContext ctx) {
assertSimpleExpression();
operator = AlarmOperator.LTE;
}
@Override
public void enterGt(AlarmExpressionParser.GtContext ctx) {
assertSimpleExpression();
operator = AlarmOperator.GT;
}
@Override
public void enterGte(AlarmExpressionParser.GteContext ctx) {
assertSimpleExpression();
operator = AlarmOperator.GTE;
}
@Override
public void exitLiteral(AlarmExpressionParser.LiteralContext ctx) {
threshold = Long.valueOf(ctx.getChild(0).getText());
}
@Override
public void exitOrExpr(AlarmExpressionParser.OrExprContext ctx) {
elements.add(BooleanOperator.OR);
}
@Override
public void exitAndExpr(AlarmExpressionParser.AndExprContext ctx) {
elements.add(BooleanOperator.AND);
}
/**
* Returns the operator and operand elements of the expression in postfix order. Elements will be
* of types AlarmSubExpression and BooleanOperator.
*/
List<Object> getElements() {
return elements;
}
private void assertSimpleExpression() {
if (simpleExpression && !elements.isEmpty())
throw new IllegalArgumentException("Expected a simple expression");
}
}

View File

@ -0,0 +1,17 @@
package com.hpcloud.mon.common.model.alarm;
/**
* Boolean operator.
*
* @author Todd Walk
* @author Jonathan Halterman
*/
public enum BooleanOperator {
AND, OR;
public boolean evaluate(boolean lhs, boolean rhs) {
if (AND.equals(this))
return lhs && rhs;
return lhs || rhs;
}
}

View File

@ -0,0 +1,111 @@
package com.hpcloud.mon.common.model.metric;
import java.util.Arrays;
/**
* Collectd Metric.
*
* @author Jonathan Halterman
*/
public class CollectdMetric {
public String host;
public String plugin;
public String pluginInstance;
public String type;
public String typeInstance;
public long time;
public long interval;
public String[] dsnames;
public String[] dstypes;
public double[] values;
public CollectdMetric() {
}
public CollectdMetric(String host, String plugin, String pluginInstance, String type,
String typeInstance, long time, long interval, String[] dsnames, String[] dstypes,
double[] values) {
this.host = host;
this.plugin = plugin;
this.pluginInstance = pluginInstance;
this.type = type;
this.typeInstance = typeInstance;
this.time = time;
this.interval = interval;
this.dsnames = dsnames;
this.dstypes = dstypes;
this.values = values;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
CollectdMetric other = (CollectdMetric) obj;
if (!Arrays.equals(dsnames, other.dsnames))
return false;
if (!Arrays.equals(dstypes, other.dstypes))
return false;
if (host == null) {
if (other.host != null)
return false;
} else if (!host.equals(other.host))
return false;
if (interval != other.interval)
return false;
if (plugin == null) {
if (other.plugin != null)
return false;
} else if (!plugin.equals(other.plugin))
return false;
if (pluginInstance == null) {
if (other.pluginInstance != null)
return false;
} else if (!pluginInstance.equals(other.pluginInstance))
return false;
if (time != other.time)
return false;
if (type == null) {
if (other.type != null)
return false;
} else if (!type.equals(other.type))
return false;
if (typeInstance == null) {
if (other.typeInstance != null)
return false;
} else if (!typeInstance.equals(other.typeInstance))
return false;
if (!Arrays.equals(values, other.values))
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + Arrays.hashCode(dsnames);
result = prime * result + Arrays.hashCode(dstypes);
result = prime * result + ((host == null) ? 0 : host.hashCode());
result = prime * result + (int) (interval ^ (interval >>> 32));
result = prime * result + ((plugin == null) ? 0 : plugin.hashCode());
result = prime * result + ((pluginInstance == null) ? 0 : pluginInstance.hashCode());
result = prime * result + (int) (time ^ (time >>> 32));
result = prime * result + ((type == null) ? 0 : type.hashCode());
result = prime * result + ((typeInstance == null) ? 0 : typeInstance.hashCode());
result = prime * result + Arrays.hashCode(values);
return result;
}
@Override
public String toString() {
return String.format(
"CollectdMetric [host=%s, plugin=%s, pluginInstance=%s, type=%s, typeInstance=%s, time=%s, interval=%s, dsnames=%s, dstypes=%s, values=%s]",
host, plugin, pluginInstance, type, typeInstance, time, interval, Arrays.toString(dsnames),
Arrays.toString(dstypes), Arrays.toString(values));
}
}

View File

@ -0,0 +1,432 @@
package com.hpcloud.mon.common.model.metric;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.UUID;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import com.fasterxml.jackson.databind.PropertyNamingStrategy;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.google.common.base.Joiner;
import com.google.common.base.Splitter;
import com.google.common.base.Strings;
import com.google.common.collect.Iterables;
import com.hpcloud.mon.common.model.Namespaces;
import com.hpcloud.mon.common.model.alarm.AlarmExpression;
import com.hpcloud.mon.common.model.alarm.AlarmSubExpression;
import com.hpcloud.util.Exceptions;
/**
* Utilities for working with collectd metrics.
*
* @author Jonathan Halterman
*/
public final class CollectdMetrics {
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private static final ObjectReader COLLECTD_METRIC_READER;
private static final String PUTVAL_ELEMENT_KEY = "putval";
private static final String HOST_ELEMENT_KEY = "host";
private static final String PLUGIN_ELEMENT_KEY = "plugin";
private static final String PLUGIN_INSTANCE_ELEMENT_KEY = "plugin_instance";
private static final String TYPE_ELEMENT_KEY = "type";
private static final String TYPE_INSTANCE_ELEMENT_KEY = "type_instance";
private static final String VALUES_ELEMENT_KEY = "values";
private static final String TIME_ELEMENT_KEY = "time";
private static final String DSNAMES_ELEMENT_KEY = "dsnames";
private static final String LIBVIRT_PLUGIN = "libvirt";
private static final String BOCK_PLUGIN = "bock";
public static final String METRIC_NAME_DIM = "metric_name";
private static final String AZ_DIM = "az";
public static final String DEVICE_DIM = "device";
public static final String DISK_DIM = "disk";
private static final String INSTANCE_ID_DIM = "instance_id";
private static final Map<String, MetricDefinitionDecoder> METRIC_DEFINITION_DECODERS;
private static final Map<String, MetricDefinitionEncoder> METRIC_DEFINITION_ENCODERS;
private static final Pattern AZ_PATTERN = Pattern.compile("(az([1-3]){1})",
Pattern.CASE_INSENSITIVE);
private static final Joiner COLON_JOINER = Joiner.on(':');
private static final Splitter COLON_SPLITTER = Splitter.on(':');
static {
OBJECT_MAPPER.setPropertyNamingStrategy(PropertyNamingStrategy.CAMEL_CASE_TO_LOWER_CASE_WITH_UNDERSCORES);
COLLECTD_METRIC_READER = OBJECT_MAPPER.reader(CollectdMetric.class);
METRIC_DEFINITION_DECODERS = new HashMap<String, MetricDefinitionDecoder>();
METRIC_DEFINITION_ENCODERS = new HashMap<String, MetricDefinitionEncoder>();
// Decodes libvirt metric definitions
METRIC_DEFINITION_DECODERS.put(LIBVIRT_PLUGIN, new MetricDefinitionDecoder() {
{
metricNames.put("virt_cpu_total:value", "cpu_total_time");
metricNames.put("disk_ops:read", "disk_read_ops_count");
metricNames.put("disk_ops:write", "disk_write_ops_count");
metricNames.put("disk_octets:read", "disk_read_bytes_count");
metricNames.put("disk_octets:write", "disk_write_bytes_count");
metricNames.put("if_octets:rx", "net_in_bytes_count");
metricNames.put("if_octets:tx", "net_out_bytes_count");
metricNames.put("if_packets:rx", "net_in_packets_count");
metricNames.put("if_packets:tx", "net_out_packets_count");
metricNames.put("if_dropped:rx", "net_in_dropped_count");
metricNames.put("if_dropped:tx", "net_out_dropped_count");
metricNames.put("if_errors:rx", "net_in_errors_count");
metricNames.put("if_errors:tx", "net_out_errors_count");
}
@Override
SortedMap<String, String> dimensionsFor(String host, String pluginInstance, String type,
String typeInstance) {
SortedMap<String, String> dimensions = super.dimensionsFor(host, pluginInstance, type,
typeInstance);
if (!Strings.isNullOrEmpty(type) && !Strings.isNullOrEmpty(typeInstance))
dimensions.put(DEVICE_DIM, type.startsWith("if") ? "eth0" : typeInstance);
return dimensions;
}
@Override
String namespace() {
return Namespaces.COMPUTE_NAMESPACE;
}
});
METRIC_DEFINITION_ENCODERS.put(Namespaces.COMPUTE_NAMESPACE, new MetricDefinitionEncoder() {
{
metricNames.put("cpu_total_time", new String[] { "virt_cpu_total", "value" });
metricNames.put("disk_read_ops_count", new String[] { "disk_ops", "read" });
metricNames.put("disk_write_ops_count", new String[] { "disk_ops", "write" });
metricNames.put("disk_read_bytes_count", new String[] { "disk_octets", "read" });
metricNames.put("disk_write_bytes_count", new String[] { "disk_octets", "write" });
metricNames.put("net_in_bytes_count", new String[] { "if_octets", "rx" });
metricNames.put("net_out_bytes_count", new String[] { "if_octets", "tx" });
metricNames.put("net_in_packets_count", new String[] { "if_packets", "rx" });
metricNames.put("net_out_packets_count", new String[] { "if_packets", "tx" });
metricNames.put("net_in_dropped_count", new String[] { "if_dropped", "rx" });
metricNames.put("net_out_dropped_count", new String[] { "if_dropped", "tx" });
metricNames.put("net_in_errors_count", new String[] { "if_errors", "rx" });
metricNames.put("net_out_errors_count", new String[] { "if_errors", "tx" });
}
});
// Decodes bock metric definitions
METRIC_DEFINITION_DECODERS.put(BOCK_PLUGIN, new MetricDefinitionDecoder() {
{
metricNames.put("disk_ops:read", "volume_read_ops");
metricNames.put("disk_ops:write", "volume_write_ops");
metricNames.put("disk_octets:read", "volume_read_bytes");
metricNames.put("disk_octets:write", "volume_write_bytes");
metricNames.put("disk_time:read", "volume_read_time");
metricNames.put("disk_time:write", "volume_write_time");
metricNames.put("counter:value", "volume_idle_time");
}
@Override
SortedMap<String, String> dimensionsFor(String host, String pluginInstance, String type,
String typeInstance) {
SortedMap<String, String> dimensions = super.dimensionsFor(host, pluginInstance, type,
typeInstance);
if (pluginInstance != null)
dimensions.put(DISK_DIM, pluginInstance);
return dimensions;
}
@Override
String namespace() {
return Namespaces.VOLUME_NAMESPACE;
}
});
METRIC_DEFINITION_ENCODERS.put(Namespaces.VOLUME_NAMESPACE, new MetricDefinitionEncoder() {
{
metricNames.put("volume_read_ops", new String[] { "disk_ops", "read" });
metricNames.put("volume_write_ops", new String[] { "disk_ops", "write" });
metricNames.put("volume_read_bytes", new String[] { "disk_octets", "read" });
metricNames.put("volume_write_bytes", new String[] { "disk_octets", "write" });
metricNames.put("volume_read_time", new String[] { "disk_time", "read" });
metricNames.put("volume_write_time", new String[] { "disk_time", "write" });
metricNames.put("volume_idle_time", new String[] { "counter", "value" });
}
});
}
private CollectdMetrics() {
}
/** Decodes metric types. */
static abstract class MetricDefinitionDecoder {
protected final Map<String, String> metricNames = new HashMap<String, String>();
/**
* Returns a metricType intended to be composed of plugin, pluginInstance, type, typeInstance,
* dsName - in that order, else {@code null} if no metric type could be decoded for the
* {@code components}.
*/
protected String decodeMetricTypeFor(String... components) {
return metricNames.get(COLON_JOINER.join(components));
}
SortedMap<String, String> dimensionsFor(String host, String pluginInstance, String type,
String typeInstance) {
SortedMap<String, String> dimensions = new TreeMap<String, String>();
if (host.contains("instance-")) {
String az = azForHost(host);
if (az != null)
dimensions.put("az", az);
String instanceId = instanceIdForHost(host);
if (instanceId != null)
dimensions.put(INSTANCE_ID_DIM, instanceId);
} else if (host.length() == 36) {
try {
UUID.fromString(host);
dimensions.put(INSTANCE_ID_DIM, host);
} catch (IllegalArgumentException e) {
}
}
return dimensions;
}
String metricNameFor(String pluginInstance, String type, String typeInstance, String dsName) {
return decodeMetricTypeFor(type, dsName);
}
abstract String namespace();
}
/** Encodes metric types. */
static abstract class MetricDefinitionEncoder {
protected final Map<String, String[]> metricNames = new HashMap<String, String[]>();
}
/**
* Returns the collectd type and ds_name for the {@code namespace} and {@code metricName}, else
* returns null.
*/
public static String[] collectdNamesFor(String namespace, String metricName) {
MetricDefinitionEncoder encoder = METRIC_DEFINITION_ENCODERS.get(namespace);
return encoder == null ? null : encoder.metricNames.get(metricName);
}
/**
* Returns a CollectdMetric instance for the {@code collectdMetricJson}.
*
* @throws RuntimeException if an error occurs while parsing the {@code collectdMetricJson}
*/
public static CollectdMetric fromJson(byte[] collectdMetricJson) {
try {
JsonNode rootNode = OBJECT_MAPPER.readTree(collectdMetricJson);
JsonNode putvalNode = rootNode.get(PUTVAL_ELEMENT_KEY);
return COLLECTD_METRIC_READER.readValue(putvalNode);
} catch (Exception e) {
throw Exceptions.uncheck(e, "Failed to parse collectd metric json: %s", new String(
collectdMetricJson));
}
}
/**
* Returns a host, consisting of the {@code instanceId} left padded with '0' characters to a
* length of 8. Example: instance-000afabb
*
* <p>
* Note: The 8 character length hex encoding is intended to match the encoding that nova uses as
* per: https://noc-aw2az1-server01.uswest.hpcloud.net/tools/nova_lookup.php
*/
public static String hostForInstanceId(int instanceId) {
String hex = Integer.toHexString(Integer.valueOf(instanceId));
String instancePrefix = "instance-";
StringBuilder sb = new StringBuilder(instancePrefix.length() + 8);
sb.append(instancePrefix);
for (int i = hex.length(); i < 8; i++)
sb.append('0');
sb.append(hex);
return sb.toString();
}
/**
* Returns true if the {@code namespace} is supported by collectd, else false.
*/
public static boolean isCollectdNamespace(String namespace) {
return (namespace.equalsIgnoreCase(Namespaces.COMPUTE_NAMESPACE) || namespace.equalsIgnoreCase(Namespaces.VOLUME_NAMESPACE));
}
/**
* Returns whether a reserved namespace dimension is supported.
*/
public static boolean isSupportedDimension(String dimension) {
return INSTANCE_ID_DIM.equals(dimension) || AZ_DIM.equals(dimension)
|| METRIC_NAME_DIM.equals(dimension) || DEVICE_DIM.equals(dimension)
|| DISK_DIM.equals(dimension);
}
/**
* Returns true if the {@code dimension} for the {@code namespace} is supported by collectd, else
* false.
*/
public static boolean isSupportedDimension(String namespace, String dimension) {
return !Namespaces.isReserved(namespace) || isSupportedDimension(dimension);
}
/**
* Returns the collectd plugin for the namespace, else null.
*/
public static String pluginForNamespace(String namespace) {
if (Namespaces.COMPUTE_NAMESPACE.equals(namespace))
return LIBVIRT_PLUGIN;
if (Namespaces.VOLUME_NAMESPACE.equals(namespace))
return BOCK_PLUGIN;
return null;
}
/**
* Removes dimensions from the {@code expression} that are not supported by collectd.
*/
public static void removeUnsupportedDimensions(AlarmExpression expression) {
for (AlarmSubExpression subExpression : expression.getSubExpressions())
removeUnsupportedDimensions(subExpression.getMetricDefinition());
}
/**
* Removes dimensions from the {@code metricDefinition} that are not supported by collectd.
*/
public static void removeUnsupportedDimensions(MetricDefinition metricDefinition) {
if (Namespaces.isReserved(metricDefinition.namespace) && metricDefinition.dimensions != null) {
for (Iterator<String> it = metricDefinition.dimensions.keySet().iterator(); it.hasNext();) {
String dim = it.next();
if (!isSupportedDimension(dim))
it.remove();
}
metricDefinition.setDimensions(metricDefinition.dimensions);
}
}
/**
* Returns the collectd subject that originated from the plugin_instance or type_instance for the
* {@code namespace} and {@code dimensions}, else null.
*/
public static String subjectFor(String namespace, Map<String, String> dimensions) {
if (Namespaces.COMPUTE_NAMESPACE.equals(namespace))
return dimensions.get(DEVICE_DIM);
if (Namespaces.VOLUME_NAMESPACE.equals(namespace))
return dimensions.get(DISK_DIM);
return null;
}
/**
* Returns flat metrics converted from the {@code collectdMetricJson} else {@code null} if the
* metric type is not supported.
*/
public static List<FlatMetric> toFlatMetrics(byte[] collectdMetricJson) {
return toMetrics(collectdMetricJson, FlatMetric.class);
}
/**
* Returns flat metrics JSON converted from the {@code collectdMetricJson} else {@code null} if
* the metric type is not supported.
*/
public static List<String> toFlatMetricsJson(byte[] collectdMetricJson) {
List<FlatMetric> flatMetrics = toFlatMetrics(collectdMetricJson);
if (flatMetrics.isEmpty())
return null;
List<String> jsons = new ArrayList<String>(flatMetrics.size());
for (FlatMetric flatMetric : flatMetrics) {
String json = FlatMetrics.toJson(flatMetric);
if (json != null)
jsons.add(json);
}
return jsons;
}
/**
* Returns metrics converted from the {@code collectdMetricJson} else {@code null} if the metric
* type is not supported.
*/
public static List<Metric> toMetrics(byte[] collectdMetricJson) {
return toMetrics(collectdMetricJson, Metric.class);
}
/**
* Returns the AZ for the collectd {@code host} value.
*/
private static String azForHost(String host) {
Matcher matcher = AZ_PATTERN.matcher(host);
return matcher.find() ? matcher.group(2) : null;
}
/**
* Returns the user-facing nova instance id for the collectd {@code host} value.
*/
private static String instanceIdForHost(String host) {
String hosts[] = Iterables.toArray(COLON_SPLITTER.split(host), String.class);
String novaInstanceId = hosts.length > 1 ? hosts[1] : hosts[0];
novaInstanceId = novaInstanceId.substring(novaInstanceId.indexOf('-') + 1);
return Integer.valueOf(novaInstanceId, 16).toString();
}
/**
* Returns flat metrics converted from the {@code collectdMetricJson} else {@code null} if the
* metric type is not supported.
*
* @throws RuntimeException if an error occurs while converting the {@code collectdMetricJson}
*/
@SuppressWarnings("unchecked")
private static <T> List<T> toMetrics(byte[] collectdMetricJson, Class<T> metricClass) {
List<T> metrics = null;
try {
JsonNode rootNode = OBJECT_MAPPER.readTree(collectdMetricJson);
JsonNode putvalNode = rootNode.get(PUTVAL_ELEMENT_KEY);
String host = putvalNode.get(HOST_ELEMENT_KEY).asText();
String plugin = putvalNode.get(PLUGIN_ELEMENT_KEY).asText();
MetricDefinitionDecoder decoder = METRIC_DEFINITION_DECODERS.get(plugin);
if (decoder == null)
throw new IllegalArgumentException("No metric decoder could be found for the " + plugin);
String pluginInstance = putvalNode.get(PLUGIN_INSTANCE_ELEMENT_KEY).asText();
String type = putvalNode.get(TYPE_ELEMENT_KEY).asText();
String typeInstance = putvalNode.get(TYPE_INSTANCE_ELEMENT_KEY).asText();
ArrayNode dsNames = (ArrayNode) putvalNode.get(DSNAMES_ELEMENT_KEY);
ArrayNode valuesNode = (ArrayNode) putvalNode.get(VALUES_ELEMENT_KEY);
long timestamp = putvalNode.get(TIME_ELEMENT_KEY).asLong();
String namespace = decoder.namespace();
for (int i = 0; i < valuesNode.size(); i++) {
String dsName = dsNames.size() > 0 ? dsNames.get(i).asText() : null;
String metricName = decoder.metricNameFor(pluginInstance, type, typeInstance, dsName);
if (metricName == null)
continue;
SortedMap<String, String> dimensions = decoder.dimensionsFor(host, pluginInstance, type,
typeInstance);
dimensions.put(METRIC_NAME_DIM, metricName);
long value = valuesNode.get(i).asLong();
T metric = null;
if (FlatMetric.class.equals(metricClass))
metric = (T) new FlatMetric(namespace, dimensions, timestamp, value);
else
metric = (T) new Metric(new MetricDefinition(namespace, dimensions), timestamp, value);
if (metrics == null)
metrics = new ArrayList<T>(valuesNode.size());
metrics.add(metric);
}
} catch (IOException e) {
throw Exceptions.uncheck(e, "Failed to convert collectd metric json to %s: %s",
metricClass.getSimpleName(), new String(collectdMetricJson));
}
return metrics;
}
}

View File

@ -0,0 +1,131 @@
package com.hpcloud.mon.common.model.metric;
import java.util.Arrays;
import java.util.Map;
import java.util.SortedMap;
import javax.annotation.Nullable;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Charsets;
import com.google.common.base.Preconditions;
import com.google.common.hash.HashCode;
import com.google.common.hash.HashFunction;
import com.google.common.hash.Hashing;
/**
* MaaS Metric with definition information flattened alongside value information.
*
* @author Jonathan Halterman
*/
public class FlatMetric {
public String namespace;
public SortedMap<String, String> dimensions;
public long timestamp;
public double value;
public double[][] timeValues;
public FlatMetric() {
}
public FlatMetric(String namespace, @Nullable SortedMap<String, String> dimensions,
long timestamp, double value) {
this.namespace = Preconditions.checkNotNull(namespace, "namespace");
setDimensions(dimensions);
this.timestamp = Preconditions.checkNotNull(timestamp, "timestamp");
this.value = Preconditions.checkNotNull(value, "value");
}
public FlatMetric(String namespace, @Nullable SortedMap<String, String> dimensions,
long timestamp, double[][] timeValues) {
this.namespace = Preconditions.checkNotNull(namespace, "namespace");
setDimensions(dimensions);
this.timestamp = Preconditions.checkNotNull(timestamp, "timestamp");
this.timeValues = Preconditions.checkNotNull(timeValues, "timeValues");
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
FlatMetric other = (FlatMetric) obj;
if (dimensions == null) {
if (other.dimensions != null)
return false;
} else if (!dimensions.equals(other.dimensions))
return false;
if (namespace == null) {
if (other.namespace != null)
return false;
} else if (!namespace.equals(other.namespace))
return false;
// Note - Deep Equals is used here
if (!Arrays.deepEquals(timeValues, other.timeValues))
return false;
if (timestamp != other.timestamp)
return false;
if (Double.doubleToLongBits(value) != Double.doubleToLongBits(other.value))
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((dimensions == null) ? 0 : dimensions.hashCode());
result = prime * result + ((namespace == null) ? 0 : namespace.hashCode());
// Note Deep hash code is used here
result = prime * result + Arrays.deepHashCode(timeValues);
result = prime * result + (int) (timestamp ^ (timestamp >>> 32));
long temp;
temp = Double.doubleToLongBits(value);
result = prime * result + (int) (temp ^ (temp >>> 32));
return result;
}
/**
* Create a string that is unique for the namespace + dimensions combo. Convert that string to a
* UTF-8 character encoded language neutral UUID. This encoding must be reproduceable in other
* languages. Using 128 bit MD5.
*
* @return MD5 128 bit hash of namespace + sorted dimensions
*/
public HashCode definitionHashCode() {
StringBuilder sb = new StringBuilder(namespace);
sb.append('=');
if (dimensions != null) {
for (Map.Entry<String, String> dimension : dimensions.entrySet()) {
sb.append(dimension.getKey()).append(':').append(dimension.getValue()).append(':');
}
}
HashFunction hf = Hashing.md5();
HashCode hc = hf.newHasher().putString(sb.toString(), Charsets.UTF_8).hash();
return hc;
}
@JsonProperty
public void setDimensions(SortedMap<String, String> dimensions) {
if (this.dimensions == null)
this.dimensions = dimensions;
else
this.dimensions.putAll(dimensions);
}
/** Returns a Metric for the FlatMetric. */
public Metric toMetric() {
MetricDefinition metricDef = new MetricDefinition(namespace, dimensions);
return timeValues == null ? new Metric(metricDef, timestamp, value) : new Metric(metricDef,
timestamp, timeValues);
}
@Override
public String toString() {
return String.format("FlatMetric [namespace=%s, dimensions=%s, timestamp=%s, value=%s]",
namespace, dimensions, timestamp, timeValues == null ? value : Arrays.toString(timeValues));
}
}

View File

@ -0,0 +1,93 @@
package com.hpcloud.mon.common.model.metric;
import java.io.IOException;
import org.apache.commons.lang3.StringEscapeUtils;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonSerializer;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.PropertyNamingStrategy;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.module.SimpleModule;
import com.hpcloud.util.Exceptions;
/**
* Utilities for working with FlatMetrics.
*
* @author Jonathan Halterman
*/
public final class FlatMetrics {
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
static {
OBJECT_MAPPER.setPropertyNamingStrategy(PropertyNamingStrategy.CAMEL_CASE_TO_LOWER_CASE_WITH_UNDERSCORES);
SimpleModule module = new SimpleModule();
module.addSerializer(new FlatMetricSerializer());
OBJECT_MAPPER.registerModule(module);
}
/** FlatMetric serializer */
private static class FlatMetricSerializer extends JsonSerializer<FlatMetric> {
@Override
public Class<FlatMetric> handledType() {
return FlatMetric.class;
}
public void serialize(FlatMetric value, JsonGenerator jgen, SerializerProvider provider)
throws IOException, JsonProcessingException {
jgen.writeStartObject();
jgen.writeStringField("namespace", value.namespace);
if (value.dimensions != null && !value.dimensions.isEmpty())
jgen.writeObjectField("dimensions", value.dimensions);
jgen.writeNumberField("timestamp", value.timestamp);
if (value.timeValues == null)
jgen.writeNumberField("value", value.value);
else {
jgen.writeArrayFieldStart("time_values");
for (double[] timeValue : value.timeValues) {
jgen.writeStartArray();
jgen.writeNumber((long) timeValue[0]); // Write timestamp as a long
jgen.writeNumber(timeValue[1]);
jgen.writeEndArray();
}
jgen.writeEndArray();
}
jgen.writeEndObject();
}
}
private FlatMetrics() {
}
/**
* Returns the FlatMetric for the {@code flatMetricJson}.
*
* @throws RuntimeException if an error occurs while parsing {@code flatMetricJson}
*/
public static FlatMetric fromJson(byte[] flatMetricJson) {
try {
String jsonStr = StringEscapeUtils.unescapeJava(new String(flatMetricJson, "UTF-8"));
return OBJECT_MAPPER.readValue(jsonStr, FlatMetric.class);
} catch (Exception e) {
throw Exceptions.uncheck(e, "Failed to parse flat metric json: %s",
new String(flatMetricJson));
}
}
/**
* Returns the JSON representation of the {@code flatMetric} else null if it could not be
* converted to JSON.
*/
public static String toJson(FlatMetric flatMetric) {
try {
return OBJECT_MAPPER.writeValueAsString(flatMetric);
} catch (JsonProcessingException e) {
return null;
}
}
}

View File

@ -0,0 +1,79 @@
package com.hpcloud.mon.common.model.metric;
import java.io.Serializable;
import java.util.Arrays;
import com.google.common.base.Preconditions;
/**
* MaaS Metric.
*
* @author Jonathan Halterman
*/
public class Metric implements Serializable {
private static final long serialVersionUID = 5977725053565324274L;
public MetricDefinition definition;
public long timestamp;
public double value;
public double[][] timeValues;
public Metric() {
}
public Metric(MetricDefinition definition, long timestamp, double value) {
this.definition = Preconditions.checkNotNull(definition, "definition");
this.timestamp = Preconditions.checkNotNull(timestamp, "timestamp");
this.value = Preconditions.checkNotNull(value, "value");
}
public Metric(MetricDefinition definition, long timestamp, double[][] timeValues) {
this.definition = Preconditions.checkNotNull(definition, "definition");
this.timestamp = Preconditions.checkNotNull(timestamp, "timestamp");
this.timeValues = Preconditions.checkNotNull(timeValues, "timeValues");
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
Metric other = (Metric) obj;
if (definition == null) {
if (other.definition != null)
return false;
} else if (!definition.equals(other.definition))
return false;
// Note - deep equals is used here
if (!Arrays.deepEquals(timeValues, other.timeValues))
return false;
if (timestamp != other.timestamp)
return false;
if (Double.doubleToLongBits(value) != Double.doubleToLongBits(other.value))
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((definition == null) ? 0 : definition.hashCode());
// Note - deep hash code is used here
result = prime * result + Arrays.deepHashCode(timeValues);
result = prime * result + (int) (timestamp ^ (timestamp >>> 32));
long temp;
temp = Double.doubleToLongBits(value);
result = prime * result + (int) (temp ^ (temp >>> 32));
return result;
}
@Override
public String toString() {
return String.format("Metric [definition=%s, timestamp=%s, value=%s]", definition, timestamp,
timeValues == null ? value : Arrays.toString(timeValues));
}
}

View File

@ -0,0 +1,83 @@
package com.hpcloud.mon.common.model.metric;
import java.io.Serializable;
import java.util.Map;
import javax.annotation.Nullable;
import com.google.common.base.Preconditions;
/**
* Metric definition.
*
* @author Jonathan Halterman
*/
public class MetricDefinition implements Serializable {
private static final long serialVersionUID = -3074228641225201445L;
public String namespace;
public Map<String, String> dimensions;
public MetricDefinition() {
}
public MetricDefinition(String namespace, @Nullable Map<String, String> dimensions) {
this.namespace = Preconditions.checkNotNull(namespace, "namespace");
setDimensions(dimensions);
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
MetricDefinition other = (MetricDefinition) obj;
if (dimensions == null) {
if (other.dimensions != null)
return false;
} else if (!dimensions.equals(other.dimensions))
return false;
if (namespace == null) {
if (other.namespace != null)
return false;
} else if (!namespace.equals(other.namespace))
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((dimensions == null) ? 0 : dimensions.hashCode());
result = prime * result + ((namespace == null) ? 0 : namespace.hashCode());
return result;
}
public void setDimensions(Map<String, String> dimensions) {
this.dimensions = dimensions == null || dimensions.isEmpty() ? null : dimensions;
}
/**
* Returns an expression representation of the metric definition.
*/
public String toExpression() {
StringBuilder b = new StringBuilder();
b.append(namespace);
if (dimensions != null)
b.append(dimensions);
return b.toString();
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("MetricDefinition [").append(namespace);
if (dimensions != null && !dimensions.isEmpty())
sb.append(dimensions);
return sb.append(']').toString();
}
}

View File

@ -0,0 +1,22 @@
package com.hpcloud.mon.common.model.metric;
import java.util.Map;
/**
* Utilities for working with Metrics.
*
* @author Jonathan Halterman
*/
public final class Metrics {
private Metrics() {
}
/**
* Returns a metric for the {@code flatMetric} and {@code dimensions}.
*/
public static Metric of(FlatMetric flatMetric, Map<String, String> dimensions) {
return flatMetric.timeValues == null ? new Metric(new MetricDefinition(flatMetric.namespace,
dimensions), flatMetric.timestamp, flatMetric.value) : new Metric(new MetricDefinition(
flatMetric.namespace, dimensions), flatMetric.timestamp, flatMetric.timeValues);
}
}

View File

@ -0,0 +1,44 @@
package com.hpcloud.mon.domain.common;
import java.io.Serializable;
/**
* Defines an entity with a surrogate key.
*
* @author Jonathan Halterman
* @see http://domaindrivendesign.org/search/node/Entity
*/
public abstract class AbstractEntity implements Serializable {
private static final long serialVersionUID = -7055330640094842914L;
protected String id;
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
AbstractEntity other = (AbstractEntity) obj;
if (id == null) {
if (other.id != null)
return false;
} else if (!id.equals(other.id))
return false;
return true;
}
public String getId() {
return id;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((id == null) ? 0 : id.hashCode());
return result;
}
}

View File

@ -0,0 +1,236 @@
/*
* Alarm expression parser (for maas-api support)
* Todd Walk, Hewlett Packard Cloud Services, 2013
*/
grammar AlarmExpression;
start
: expression EOF
;
expression
: compoundIdentifier relational_operator literal # relationalExprFwd
| function relational_operator literal ('times' repeat)? # relationalExprFuncFwd
| literal relational_operator compoundIdentifier # relationalExprBwd
| literal relational_operator function ('times' repeat)? # relationalExprFuncBwd
| expression and expression # andExpr
| expression or expression # orExpr
| '(' expression ')' # parenExpr
;
function
: functionType '(' compoundIdentifier (',' period)? ')'
;
relational_operator
: lt
| lte
| gt
| gte
;
lt
: LT
| LT_S
;
lte
: LTE
| LTE_S
;
gt
: GT
| GT_S
;
gte
: GTE
| GTE_S
;
and
: AND
| AND_S
;
or
: OR
| OR_S
;
functionType
: MIN
| MAX
| SUM
| CNT
| AVG
;
primary
: literal
| compoundIdentifier
;
compoundIdentifier
: namespace ('{' (dimensionList)? '}')?
;
namespace
: identifier
;
dimensionList
: dimension (',' dimension)*
;
dimension
: identifier '=' ext_identifier
;
identifier
: IDENTIFIER
| keyword
;
ext_identifier
: IDENTIFIER
| EXT_IDENTIFIER
| INTEGER
| keyword
;
keyword
: LT
| LTE
| GT
| GTE
| AND
| OR
| MIN
| MAX
| SUM
| CNT
| AVG
;
literal
: INTEGER
;
period
: INTEGER
;
repeat
: INTEGER
;
LT
: [lL][tT]
;
LT_S
: '<'
;
LTE
: [lL][tT][eE]
;
LTE_S
: '<='
;
GT
: [gG][tT]
;
GT_S
: '>'
;
GTE
: [gG][tT][eE]
;
GTE_S
: '>='
;
AND
: [aA][nN][dD]
;
AND_S
: '&&'
;
OR
: [oO][rR]
;
OR_S
: '||'
;
MIN
: [mM][iI][nN]
;
MAX
: [mM][aA][xX]
;
SUM
: [sS][uU][mM]
;
CNT
: [cC][oO][uU][nN][tT]
;
AVG
: [aA][vV][gG]
;
INTEGER
: DIGIT+
;
IDENTIFIER
: (LETTER|UNDERSCORE) (LETTER|DIGIT|UNDERSCORE|DASH|PERIOD)*
;
EXT_IDENTIFIER
: (LETTER|DIGIT|UNDERSCORE|DASH|PERIOD)+
;
fragment
LETTER
: '\u0041'..'\u005a' // A-Z
| '\u0061'..'\u007a' // a-z
;
fragment
DIGIT
: '\u0030'..'\u0039' // 0-9
;
fragment
UNDERSCORE
: '\u005f' // _
;
fragment
DASH
: '-'
;
fragment
PERIOD
: '.'
;
WS : [ \t\r\n]+ -> skip ;

View File

@ -0,0 +1,29 @@
package com.hpcloud.mon.common.model;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertTrue;
import java.util.List;
import org.testng.annotations.Test;
import com.hpcloud.mon.common.model.Namespaces;
@Test
public class NamespacesTest {
public void shouldReturnEmptyForRequiredUserDefinedDimensions() {
List<String> list = Namespaces.getRequiredDimensions("userdefined", null);
assertTrue(list.isEmpty());
}
public void shouldValidateComputeInstanceIds() {
assertTrue(Namespaces.isValidDimensionValue("hpcs.compute", "instance_id",
"1830d423-83cb-4958-b273-e84bafebf14e"));
assertFalse(Namespaces.isValidDimensionValue("hpcs.compute", "instance_id",
"aaaaaaaaaaaaaaaaaaab273ddddddddddddd"));
}
public void shouldValidateObjectStoreMetricName() {
assertTrue(Namespaces.isValidMetricname("hpcs.object-store", "project_write_bytes"));
}
}

View File

@ -0,0 +1,133 @@
package com.hpcloud.mon.common.model.alarm;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertNotEquals;
import static org.testng.Assert.assertTrue;
import java.util.List;
import org.testng.annotations.Test;
import com.google.common.collect.ImmutableMap;
import com.hpcloud.mon.common.model.alarm.AggregateFunction;
import com.hpcloud.mon.common.model.alarm.AlarmExpression;
import com.hpcloud.mon.common.model.alarm.AlarmOperator;
import com.hpcloud.mon.common.model.alarm.AlarmSubExpression;
import com.hpcloud.mon.common.model.metric.MetricDefinition;
/**
* @author Jonathan Halterman
*/
@Test
public class AlarmExpressionTest {
public void shouldParseExpression() {
AlarmExpression expr = new AlarmExpression(
"avg(hpcs.compute{instance_id=5,metric_name=cpu,device=1}, 1) > 5 times 3 and avg(hpcs.compute{flavor_id=3,metric_name=mem}, 2) < 4 times 3");
List<AlarmSubExpression> alarms = expr.getSubExpressions();
AlarmSubExpression expected1 = new AlarmSubExpression(AggregateFunction.AVG,
new MetricDefinition("hpcs.compute", ImmutableMap.<String, String>builder()
.put("instance_id", "5")
.put("metric_name", "cpu")
.put("device", "1")
.build()), AlarmOperator.GT, 5, 1, 3);
AlarmSubExpression expected2 = new AlarmSubExpression(AggregateFunction.AVG,
new MetricDefinition("hpcs.compute", ImmutableMap.<String, String>builder()
.put("flavor_id", "3")
.put("metric_name", "mem")
.build()), AlarmOperator.LT, 4, 2, 3);
assertEquals(alarms.get(0), expected1);
assertEquals(alarms.get(1), expected2);
}
public void shouldParseExpressionWithoutType() {
AlarmExpression expr = new AlarmExpression(
"avg(hpcs.compute{instance_id=5,metric_name=cpu,device=1}, 1) > 5 times 3 and avg(hpcs.compute{flavor_id=3,metric_name=mem}, 2) < 4 times 3");
List<AlarmSubExpression> alarms = expr.getSubExpressions();
AlarmSubExpression expected1 = new AlarmSubExpression(AggregateFunction.AVG,
new MetricDefinition("hpcs.compute", ImmutableMap.<String, String>builder()
.put("instance_id", "5")
.put("metric_name", "cpu")
.put("device", "1")
.build()), AlarmOperator.GT, 5, 1, 3);
AlarmSubExpression expected2 = new AlarmSubExpression(AggregateFunction.AVG,
new MetricDefinition("hpcs.compute", ImmutableMap.<String, String>builder()
.put("flavor_id", "3")
.put("metric_name", "mem")
.build()), AlarmOperator.LT, 4, 2, 3);
assertEquals(alarms.get(0), expected1);
assertEquals(alarms.get(1), expected2);
}
public void shouldEvaluateExpression() {
AlarmExpression expr = new AlarmExpression(
"sum(hpcs.compute{instance_id=5,metric_name=disk}, 1) > 33 or (avg(hpcs.compute{instance_id=5,metric_name=cpu,device=1}, 1) > 5 times 3 and avg(hpcs.compute{flavor_id=3,metric_name=mem}, 2) < 4 times 3)");
List<AlarmSubExpression> alarms = expr.getSubExpressions();
AlarmSubExpression alarm1 = alarms.get(0);
AlarmSubExpression alarm2 = alarms.get(1);
AlarmSubExpression alarm3 = alarms.get(2);
assertTrue(expr.evaluate(ImmutableMap.<AlarmSubExpression, Boolean>builder()
.put(alarm1, true)
.put(alarm2, false)
.put(alarm3, false)
.build()));
assertTrue(expr.evaluate(ImmutableMap.<AlarmSubExpression, Boolean>builder()
.put(alarm1, false)
.put(alarm2, true)
.put(alarm3, true)
.build()));
assertFalse(expr.evaluate(ImmutableMap.<AlarmSubExpression, Boolean>builder()
.put(alarm1, false)
.put(alarm2, false)
.put(alarm3, true)
.build()));
assertFalse(expr.evaluate(ImmutableMap.<AlarmSubExpression, Boolean>builder()
.put(alarm1, false)
.put(alarm2, true)
.put(alarm3, false)
.build()));
}
public void shouldDefaultPeriodAndPeriods() {
AlarmExpression expr = new AlarmExpression("avg(hpcs.compute{instance_id=5,metric_name=cpu,device=1}) > 5");
AlarmSubExpression alarm = expr.getSubExpressions().get(0);
assertEquals(alarm.getPeriod(), 60);
assertEquals(alarm.getPeriods(), 1);
}
@Test(expectedExceptions = IllegalArgumentException.class)
public void shouldThrowOnEvaluateInvalidSubExpressions() {
AlarmExpression expr = new AlarmExpression(
"avg(hpcs.compute{instance_id=5,metric_name=cpu,device=2}, 1) > 5 times 3 and avg(hpcs.compute{flavor_id=3,metric_name=mem}, 2) < 4 times 3");
expr.evaluate(ImmutableMap.<AlarmSubExpression, Boolean>builder()
.put(
new AlarmSubExpression(AggregateFunction.AVG, new MetricDefinition("hpcs.compute",
ImmutableMap.<String, String>builder()
.put("flavor_id", "3")
.put("metric_name", "mem")
.build()), AlarmOperator.LT, 4, 2, 3), true)
.build());
}
@Test(enabled = false)
public void testExpressionEquality() {
AlarmExpression expr1 = new AlarmExpression(
"avg(hpcs.compute{instance_id=5,metric_name=cpu,device=a}, 1) lt 5 times 3 and avg(hpcs.compute{flavor_id=3,metric_name=mem}, 2) < 4 times 3");
AlarmExpression expr2 = new AlarmExpression(
"avg(hpcs.compute{flavor_id=3,metric_name=mem}, 2) gt 3 times 3 && avg(hpcs.compute{instance_id=5,metric_name=cpu,device=a}, 1) lt 5 times 3");
assertEquals(expr1, expr2);
AlarmExpression expr3 = new AlarmExpression(
"avg(hpcs.compute{instance_id=5,metric_name=cpu,device=a}, 1) lt 5 times 444 and avg(hpcs.compute{flavor_id=3,metric_name=mem}, 2) < 4 times 3");
assertNotEquals(expr1, expr3);
}
}

View File

@ -0,0 +1,152 @@
package com.hpcloud.mon.common.model.alarm;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertTrue;
import org.testng.annotations.Test;
import com.google.common.collect.ImmutableMap;
import com.hpcloud.mon.common.model.alarm.AggregateFunction;
import com.hpcloud.mon.common.model.alarm.AlarmExpression;
import com.hpcloud.mon.common.model.alarm.AlarmOperator;
import com.hpcloud.mon.common.model.alarm.AlarmSubExpression;
import com.hpcloud.mon.common.model.metric.MetricDefinition;
/**
* @author Jonathan Halterman
*/
@Test
public class AlarmSubExpressionTest {
public void shouldParseExpression() {
AlarmSubExpression expr = AlarmSubExpression.of("avg(hpcs.compute{metric_name=cpu, device=1, instance_id=5}, 1) > 5 times 3");
AlarmSubExpression expected = new AlarmSubExpression(AggregateFunction.AVG,
new MetricDefinition("hpcs.compute", ImmutableMap.<String, String>builder()
.put("instance_id", "5")
.put("metric_name", "cpu")
.put("device", "1")
.build()), AlarmOperator.GT, 5, 1, 3);
assertEquals(expr, expected);
}
public void shouldParseExpressionNoType() {
AlarmSubExpression expr = AlarmSubExpression.of("avg(hpcs.compute{instance_id=5,metric_name=cpu,device=1}, 1) > 5 times 3");
AlarmSubExpression expected = new AlarmSubExpression(AggregateFunction.AVG,
new MetricDefinition("hpcs.compute", ImmutableMap.<String, String>builder()
.put("instance_id", "5")
.put("metric_name", "cpu")
.put("device", "1")
.build()), AlarmOperator.GT, 5, 1, 3);
assertEquals(expr, expected);
}
public void shouldParseExpressionWithoutFunctionGT() {
AlarmSubExpression expr = AlarmSubExpression.of("hpcs.compute{metric_name=cpu, device=1, instance_id=5} > 5");
AlarmSubExpression expected = new AlarmSubExpression(AggregateFunction.MAX,
new MetricDefinition("hpcs.compute", ImmutableMap.<String, String>builder()
.put("instance_id", "5")
.put("metric_name", "cpu")
.put("device", "1")
.build()), AlarmOperator.GT, 5, 60, 1);
assertEquals(expr, expected);
}
public void shouldParseExpressionWithoutFunctionLT() {
AlarmSubExpression expr = AlarmSubExpression.of("hpcs.compute{metric_name=cpu, device=1, instance_id=5} < 5");
AlarmSubExpression expected = new AlarmSubExpression(AggregateFunction.MIN,
new MetricDefinition("hpcs.compute", ImmutableMap.<String, String>builder()
.put("instance_id", "5")
.put("metric_name", "cpu")
.put("device", "1")
.build()), AlarmOperator.LT, 5, 60, 1);
assertEquals(expr, expected);
}
public void shouldEvaluateExpression() {
AlarmSubExpression expr = AlarmSubExpression.of("avg(hpcs.compute{metric_name=cpu, device=1, instance_id=5}, 1) > 5 times 3");
assertTrue(expr.evaluate(6));
assertFalse(expr.evaluate(4));
}
public void shouldParseExpressionWithoutSubject() {
AlarmSubExpression expr = AlarmSubExpression.of("avg(hpcs.compute{metric_name=cpu, instance_id=5}, 1) > 5 times 3");
assertEquals(expr,
new AlarmSubExpression(AggregateFunction.AVG, new MetricDefinition("hpcs.compute",
ImmutableMap.<String, String>builder()
.put("instance_id", "5")
.put("metric_name", "cpu")
.build()), AlarmOperator.GT, 5, 1, 3));
}
public void shouldParseExpressionCaseInsensitiveFunc() {
AlarmSubExpression expr = AlarmSubExpression.of("AvG(hpcs.compute{metric_name=cpu, instance_id=5}, 1) > 5 times 3");
assertEquals(expr,
new AlarmSubExpression(AggregateFunction.AVG, new MetricDefinition("hpcs.compute",
ImmutableMap.<String, String>builder()
.put("instance_id", "5")
.put("metric_name", "cpu")
.build()), AlarmOperator.GT, 5, 1, 3));
}
public void shouldParseExpressionCaseInsensitiveOp() {
AlarmSubExpression expr = AlarmSubExpression.of("avg(hpcs.compute{metric_name=cpu, instance_id=5}, 1) Gt 5 times 3");
assertEquals(expr,
new AlarmSubExpression(AggregateFunction.AVG, new MetricDefinition("hpcs.compute",
ImmutableMap.<String, String>builder()
.put("instance_id", "5")
.put("metric_name", "cpu")
.build()), AlarmOperator.GT, 5, 1, 3));
}
public void shouldParseExpressionKeywordNamespace() {
AlarmSubExpression expr = AlarmSubExpression.of("avg(avg{metric_name=cpu, instance_id=5}, 1) > 5 times 3");
assertEquals(expr, new AlarmSubExpression(AggregateFunction.AVG, new MetricDefinition("avg",
ImmutableMap.<String, String>builder()
.put("instance_id", "5")
.put("metric_name", "cpu")
.build()), AlarmOperator.GT, 5, 1, 3));
}
public void shouldParseExpressionKeywordMetricType() {
AlarmSubExpression expr = AlarmSubExpression.of("avg(hpcs.compute{metric_name=avg, instance_id=5}, 1) > 5 times 3");
assertEquals(expr,
new AlarmSubExpression(AggregateFunction.AVG, new MetricDefinition("hpcs.compute",
ImmutableMap.<String, String>builder()
.put("instance_id", "5")
.put("metric_name", "avg")
.build()), AlarmOperator.GT, 5, 1, 3));
}
public void shouldDefaultPeriodAndPeriods() {
AlarmExpression expr = new AlarmExpression("avg(hpcs.compute{metric_name=cpu, device=1, instance_id=5}) > 5");
AlarmSubExpression alarm = expr.getSubExpressions().get(0);
assertEquals(alarm.getPeriod(), 60);
assertEquals(alarm.getPeriods(), 1);
}
@Test(expectedExceptions = IllegalArgumentException.class)
public void shouldThrowOnDuplicateDimensions() {
AlarmSubExpression.of("avg(hpcs.compute{metric_name=cpu, device=1, instance_id=5, instance_uuid=4, instance_id=4}) > 5");
}
@Test(expectedExceptions = IllegalArgumentException.class)
public void shouldThrowOnCompoundExpressions() {
AlarmSubExpression.of("avg(hpcs.compute{metric_name=cpu, device=1, instance_id=5}) > 5 or avg(hpcs.compute{metric_name=mem, instance_id=5}) > 5");
}
public void shouldGetExpression() {
assertEquals(AlarmSubExpression.of("avg(hpcs.compute{metric_name=cpu, device=1}) > 5").getExpression(),
"avg(hpcs.compute{device=1, metric_name=cpu}) > 5.0");
assertEquals(AlarmSubExpression.of("avg(hpcs.compute{metric_name=cpu, device=1}, 45) > 5 times 4").getExpression(),
"avg(hpcs.compute{device=1, metric_name=cpu}, 45) > 5.0 times 4");
}
}

View File

@ -0,0 +1,166 @@
package com.hpcloud.mon.common.model.metric;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNull;
import java.util.Arrays;
import java.util.List;
import java.util.SortedMap;
import java.util.TreeMap;
import org.testng.annotations.Test;
import com.hpcloud.mon.common.model.metric.CollectdMetric;
import com.hpcloud.mon.common.model.metric.CollectdMetrics;
import com.hpcloud.mon.common.model.metric.FlatMetric;
/**
* @author Jonathan Halterman
*/
@Test
public class CollectdMetricsTest {
private String interfaceMetric = "{\"putval\":{\"values\":[287122343,345751104],\"dstypes\":[\"derive\",\"derive\"],\"dsnames\":[\"rx\",\"tx\"],\"time\":1364251708.702,\"interval\":60.000,\"host\":\"dx-aw1rdb1-manage0001.rndb.az2.hpcloud.net:instance-0003472\",\"plugin\":\"libvirt\",\"plugin_instance\":\"\",\"type\":\"if_octets\",\"type_instance\":\"vnet0\"}}";
private String cpuMetric = "{\"putval\":{\"values\":[50308210000000],\"dstypes\":[\"derive\"],\"dsnames\":[\"value\"],\"time\":1364251768.654,\"interval\":60.000,\"host\":\"dx-aw1rdb1-manage0001.rndb.az2.hpcloud.net:instance-0003472\",\"plugin\":\"libvirt\",\"plugin_instance\":\"\",\"type\":\"virt_cpu_total\",\"type_instance\":\"\"}}";
private String diskMetric = "{\"putval\":{\"values\":[36184,51182963],\"dstypes\":[\"derive\",\"derive\"],\"dsnames\":[\"read\",\"write\"],\"time\":1365802618.809,\"interval\":60.000,\"host\":\"instance-000d65f3\",\"plugin\":\"libvirt\",\"plugin_instance\":\"\",\"type\":\"disk_ops\",\"type_instance\":\"vda\"}}";
private String unsupportedMetricType = "{\"putval\":{\"values\":[36184,51182963],\"dstypes\":[\"derive\",\"derive\"],\"dsnames\":[\"read\",\"write\"],\"time\":1365802618.809,\"interval\":60.000,\"host\":\"instance-000d65f3\",\"plugin\":\"libvirt\",\"plugin_instance\":\"\",\"type\":\"virt_vcpu\",\"type_instance\":\"1\"}}";
private String bockMetric = "{\"putval\":{\"values\":[287122343,345751104],\"dstypes\":[\"derive\",\"derive\"],\"dsnames\":[\"read\",\"write\"],\"time\":1364251708.702,\"interval\":60.000,\"host\":\"dx-aw1rdb1-manage0001.rndb.az2.hpcloud.net:instance-0003472\",\"plugin\":\"bock\",\"plugin_instance\":\"vda\",\"type\":\"disk_octets\",\"type_instance\":\"\"}}";
private String bockMetric2 = "{\"putval\":{\"values\":[72120],\"dstypes\":[\"counter\"],\"dsnames\":[\"value\"],\"time\":1375300095.416,\"interval\":60.000,\"host\":\"nv-aw2az3-compute0254:instance-000f50e7\",\"plugin\":\"bock\",\"plugin_instance\":\"vdf\",\"type\":\"counter\",\"type_instance\":\"\"}}";
private String uuidHost = "{\"putval\":{\"values\":[287122343,345751104],\"dstypes\":[\"derive\",\"derive\"],\"dsnames\":[\"rx\",\"tx\"],\"time\":1364251708.702,\"interval\":60.000,\"host\":\"125ddf5e-79bb-4ebc-ab3e-a8539be799ff\",\"plugin\":\"libvirt\",\"plugin_instance\":\"\",\"type\":\"if_octets\",\"type_instance\":\"vnet0\"}}";
public void testHostForInstanceId() {
assertEquals(CollectdMetrics.hostForInstanceId(67361), "instance-00010721");
}
@SuppressWarnings("serial")
public void testNovaMetricsToFlatMetrics() {
SortedMap<String, String> expectedDimensions1 = new TreeMap<String, String>() {
{
put("metric_name", "net_in_bytes_count");
put("device", "eth0");
put("az", "2");
put("instance_id", "13426");
}
};
SortedMap<String, String> expectedDimensions2 = new TreeMap<String, String>() {
{
put("metric_name", "net_out_bytes_count");
put("device", "eth0");
put("az", "2");
put("instance_id", "13426");
}
};
List<FlatMetric> expected = Arrays.asList(new FlatMetric("hpcs.compute", expectedDimensions1,
1364251708, 287122343), new FlatMetric("hpcs.compute", expectedDimensions2, 1364251708,
345751104));
List<FlatMetric> metrics = CollectdMetrics.toFlatMetrics(interfaceMetric.getBytes());
assertEquals(metrics, expected);
expectedDimensions1 = new TreeMap<String, String>() {
{
put("metric_name", "cpu_total_time");
put("az", "2");
put("instance_id", "13426");
}
};
expected = Arrays.asList(new FlatMetric("hpcs.compute", expectedDimensions1, 1364251768,
50308210000000L));
metrics = CollectdMetrics.toFlatMetrics(cpuMetric.getBytes());
assertEquals(metrics, expected);
expectedDimensions1 = new TreeMap<String, String>() {
{
put("metric_name", "disk_read_ops_count");
put("device", "vda");
put("instance_id", "878067");
}
};
expectedDimensions2 = new TreeMap<String, String>() {
{
put("metric_name", "disk_write_ops_count");
put("device", "vda");
put("instance_id", "878067");
}
};
expected = Arrays.asList(
new FlatMetric("hpcs.compute", expectedDimensions1, 1365802618, 36184), new FlatMetric(
"hpcs.compute", expectedDimensions2, 1365802618, 51182963));
metrics = CollectdMetrics.toFlatMetrics(diskMetric.getBytes());
assertEquals(metrics, expected);
}
@SuppressWarnings("serial")
public void shouldConvertUUIDHostMetrics() {
SortedMap<String, String> expectedDimensions1 = new TreeMap<String, String>() {
{
put("metric_name", "net_in_bytes_count");
put("device", "eth0");
put("instance_id", "125ddf5e-79bb-4ebc-ab3e-a8539be799ff");
}
};
SortedMap<String, String> expectedDimensions2 = new TreeMap<String, String>() {
{
put("metric_name", "net_out_bytes_count");
put("device", "eth0");
put("instance_id", "125ddf5e-79bb-4ebc-ab3e-a8539be799ff");
}
};
List<FlatMetric> expected = Arrays.asList(new FlatMetric("hpcs.compute", expectedDimensions1,
1364251708, 287122343), new FlatMetric("hpcs.compute", expectedDimensions2, 1364251708,
345751104));
List<FlatMetric> metrics = CollectdMetrics.toFlatMetrics(uuidHost.getBytes());
assertEquals(metrics, expected);
}
public void toFlatMetricsShouldReturnNullForUnsupportedMetricTypes() {
assertNull(CollectdMetrics.toFlatMetrics(unsupportedMetricType.getBytes()));
}
public void testFromJson() {
assertEquals(CollectdMetrics.fromJson(interfaceMetric.getBytes()), new CollectdMetric(
"dx-aw1rdb1-manage0001.rndb.az2.hpcloud.net:instance-0003472", "libvirt", "", "if_octets",
"vnet0", 1364251708, 60, new String[] { "rx", "tx" }, new String[] { "derive", "derive" },
new double[] { 287122343, 345751104 }));
}
@SuppressWarnings("serial")
public void testBockMetricsToFlatMetrics() {
SortedMap<String, String> expectedDimensions1 = new TreeMap<String, String>() {
{
put("metric_name", "volume_read_bytes");
put("az", "2");
put("disk", "vda");
put("instance_id", "13426");
}
};
SortedMap<String, String> expectedDimensions2 = new TreeMap<String, String>() {
{
put("metric_name", "volume_write_bytes");
put("az", "2");
put("disk", "vda");
put("instance_id", "13426");
}
};
List<FlatMetric> expected = Arrays.asList(new FlatMetric("hpcs.volume", expectedDimensions1,
1364251708, 287122343), new FlatMetric("hpcs.volume", expectedDimensions2, 1364251708,
345751104));
List<FlatMetric> metrics = CollectdMetrics.toFlatMetrics(bockMetric.getBytes());
assertEquals(metrics, expected);
}
@SuppressWarnings("serial")
public void testBockMetricToFlatMetrics2() {
SortedMap<String, String> expectedDimensions = new TreeMap<String, String>() {
{
put("metric_name", "volume_idle_time");
put("az", "3");
put("disk", "vdf");
put("instance_id", "1003751");
}
};
List<FlatMetric> expected = Arrays.asList(new FlatMetric("hpcs.volume", expectedDimensions,
1375300095, 72120));
List<FlatMetric> metrics = CollectdMetrics.toFlatMetrics(bockMetric2.getBytes());
assertEquals(metrics, expected);
}
}

View File

@ -0,0 +1,113 @@
package com.hpcloud.mon.common.model.metric;
import static org.testng.Assert.assertEquals;
import java.io.UnsupportedEncodingException;
import java.util.SortedMap;
import java.util.TreeMap;
import org.testng.annotations.Test;
import com.hpcloud.mon.common.model.metric.FlatMetric;
import com.hpcloud.mon.common.model.metric.FlatMetrics;
/**
* @author Jonathan Halterman
*/
@Test
public class FlatMetricsTest {
public void shouldSerializeValue() {
SortedMap<String, String> dimensions = new TreeMap<String, String>();
dimensions.put("metric_name", "cpu");
dimensions.put("instance_id", "123");
FlatMetric metric = new FlatMetric("hpcs.compute", dimensions, 123345, 5);
String json = FlatMetrics.toJson(metric);
assertEquals(
json,
"{\"namespace\":\"hpcs.compute\",\"dimensions\":{\"instance_id\":\"123\",\"metric_name\":\"cpu\"},\"timestamp\":123345,\"value\":5.0}");
}
public void shouldSerializeTimeValues() {
SortedMap<String, String> dimensions = new TreeMap<String, String>();
dimensions.put("metric_name", "cpu");
dimensions.put("device", "2");
dimensions.put("instance_id", "123");
FlatMetric metric = new FlatMetric("hpcs.compute", dimensions, 123345, new double[][] {
{ 123, 5 }, { 456, 6 } });
String json = FlatMetrics.toJson(metric);
assertEquals(
json,
"{\"namespace\":\"hpcs.compute\",\"dimensions\":{\"device\":\"2\",\"instance_id\":\"123\",\"metric_name\":\"cpu\"},\"timestamp\":123345,\"time_values\":[[123,5.0],[456,6.0]]}");
}
public void shouldSerializeAndDeserialize() {
SortedMap<String, String> dimensions = new TreeMap<String, String>();
dimensions.put("metric_name", "cpu");
dimensions.put("device", "2");
dimensions.put("instance_id", "123");
FlatMetric expected = new FlatMetric("hpcs.compute", dimensions, 123345, new double[][] {
{ 123, 5 }, { 456, 6 } });
FlatMetric metric = FlatMetrics.fromJson(FlatMetrics.toJson(expected).getBytes());
assertEquals(metric, expected);
}
public void shouldSerializeValueUTF() {
SortedMap<String, String> dimensions = new TreeMap<String, String>();
dimensions.put("metric_name", "foôbár");
dimensions.put("instance_id", "123");
FlatMetric metric = new FlatMetric("hpcs.compute", dimensions, 123345, 5);
String json = FlatMetrics.toJson(metric);
assertEquals(
json,
"{\"namespace\":\"hpcs.compute\",\"dimensions\":{\"instance_id\":\"123\",\"metric_name\":\"foôbár\"},\"timestamp\":123345,\"value\":5.0}");
}
public void shouldSerializeAndDeserializeUTF8() throws UnsupportedEncodingException {
SortedMap<String, String> dimensions = new TreeMap<String, String>();
dimensions.put("metric_name", "foôbár");
dimensions.put("device", "2");
dimensions.put("instance_id", "123");
FlatMetric expected = new FlatMetric("hpcs.compute", dimensions, 123345, new double[][] {
{ 123, 5 }, { 456, 6 } });
FlatMetric metric;
metric = FlatMetrics.fromJson(FlatMetrics.toJson(expected).getBytes("UTF-8"));
assertEquals(metric, expected);
}
public void shouldSerializeAndDeserializeUTF8_2() throws UnsupportedEncodingException {
SortedMap<String, String> dimensions = new TreeMap<String, String>();
dimensions.put("metric_name", "fo\u00f4b\u00e1r");
dimensions.put("device", "2");
dimensions.put("instance_id", "123");
FlatMetric expected = new FlatMetric("hpcs.compute", dimensions, 123345, new double[][] {
{ 123, 5 }, { 456, 6 } });
FlatMetric metric;
metric = FlatMetrics.fromJson(FlatMetrics.toJson(expected).getBytes("UTF-8"));
assertEquals(metric, expected);
}
public void shouldSerializeAndDeserializeUTF8_3() throws UnsupportedEncodingException {
SortedMap<String, String> dimensions = new TreeMap<String, String>();
dimensions.put("metric_name", "fo\u00f4b\u00e1r");
dimensions.put("device", "2");
dimensions.put("instance_id", "123");
SortedMap<String, String> dimensions2 = new TreeMap<String, String>();
dimensions2.put("metric_name", "foôbár");
dimensions2.put("device", "2");
dimensions2.put("instance_id", "123");
FlatMetric expected_escaped = new FlatMetric("hpcs.compute", dimensions, 123345,
new double[][] { { 123, 5 }, { 456, 6 } });
FlatMetric expected_nonescaped = new FlatMetric("hpcs.compute", dimensions2, 123345,
new double[][] { { 123, 5 }, { 456, 6 } });
FlatMetric metric;
metric = FlatMetrics.fromJson(FlatMetrics.toJson(expected_escaped).getBytes("UTF-8"));
assertEquals(metric, expected_nonescaped);
}
}

1
java/mon-persistence/.gitignore vendored Normal file
View File

@ -0,0 +1 @@
/target

View File

@ -0,0 +1,33 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.hpcloud</groupId>
<artifactId>mon-common</artifactId>
<version>${computedVersion}</version>
</parent>
<artifactId>mon-persistence</artifactId>
<packaging>jar</packaging>
<dependencies>
<dependency>
<groupId>com.hpcloud</groupId>
<artifactId>mon-util</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.jdbi</groupId>
<artifactId>jdbi</artifactId>
<version>2.48.2</version>
</dependency>
<!-- Test dependencies -->
<dependency>
<groupId>com.hpcloud</groupId>
<artifactId>mon-testing</artifactId>
<version>${project.version}</version>
<scope>test</scope>
</dependency>
</dependencies>
</project>

View File

@ -0,0 +1,23 @@
package com.hpcloud.persistence;
import org.skife.jdbi.v2.DBI;
import com.google.inject.AbstractModule;
import com.google.inject.Provider;
/**
* Support module for binding persistent types.
*
* @author Jonathan Halterman
*/
public abstract class AbstractPersistenceModule extends AbstractModule {
protected <T> void bindSqlType(final Class<T> sqlType) {
final Provider<DBI> dbProvider = getProvider(DBI.class);
bind(sqlType).toProvider(new Provider<T>() {
@Override
public T get() {
return dbProvider.get().onDemand(sqlType);
}
});
}
}

View File

@ -0,0 +1,147 @@
package com.hpcloud.persistence;
/*
* Copyright 2004 - 2011 Brian McCallister
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.beans.BeanInfo;
import java.beans.IntrospectionException;
import java.beans.Introspector;
import java.beans.PropertyDescriptor;
import java.lang.reflect.InvocationTargetException;
import java.math.BigDecimal;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Time;
import java.sql.Timestamp;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import org.skife.jdbi.v2.StatementContext;
import org.skife.jdbi.v2.tweak.ResultSetMapper;
/**
* A result set mapper which maps the fields in a statement into a JavaBean. This uses the JDK's
* built in bean mapping facilities, so it does not support nested properties.
*
* <p>
* Additionally this bean mapper maps pascal case named columns to camel case named bean properties.
*/
public class BeanMapper<T> implements ResultSetMapper<T> {
private final Class<T> type;
private final Map<String, PropertyDescriptor> properties = new HashMap<String, PropertyDescriptor>();
public BeanMapper(Class<T> type) {
this.type = type;
try {
BeanInfo info = Introspector.getBeanInfo(type);
for (PropertyDescriptor descriptor : info.getPropertyDescriptors())
properties.put(descriptor.getName(), descriptor);
} catch (IntrospectionException e) {
throw new IllegalArgumentException(e);
}
}
static String pascalCaseToCamelCase(String str) {
StringBuilder sb = new StringBuilder();
String[] tokens = str.split("_");
for (int i = 0; i < tokens.length; i++) {
String s = tokens[i];
char c = s.charAt(0);
sb.append(i == 0 ? Character.toLowerCase(c) : Character.toUpperCase(c));
if (s.length() > 1)
sb.append(s.substring(1, s.length()).toLowerCase());
}
return sb.toString();
}
@SuppressWarnings({ "rawtypes", "unchecked" })
public T map(int row, ResultSet rs, StatementContext ctx) throws SQLException {
T bean;
try {
bean = type.newInstance();
} catch (Exception e) {
throw new IllegalArgumentException(String.format("A bean, %s, was mapped "
+ "which was not instantiable", type.getName()), e);
}
ResultSetMetaData metadata = rs.getMetaData();
for (int i = 1; i <= metadata.getColumnCount(); ++i) {
String name = pascalCaseToCamelCase(metadata.getColumnLabel(i).toLowerCase());
PropertyDescriptor descriptor = properties.get(name);
if (descriptor != null) {
Class<?> type = descriptor.getPropertyType();
Object value;
if (type.isAssignableFrom(Boolean.class) || type.isAssignableFrom(boolean.class)) {
value = rs.getBoolean(i);
} else if (type.isAssignableFrom(Byte.class) || type.isAssignableFrom(byte.class)) {
value = rs.getByte(i);
} else if (type.isAssignableFrom(Short.class) || type.isAssignableFrom(short.class)) {
value = rs.getShort(i);
} else if (type.isAssignableFrom(Integer.class) || type.isAssignableFrom(int.class)) {
value = rs.getInt(i);
} else if (type.isAssignableFrom(Long.class) || type.isAssignableFrom(long.class)) {
value = rs.getLong(i);
} else if (type.isAssignableFrom(Float.class) || type.isAssignableFrom(float.class)) {
value = rs.getFloat(i);
} else if (type.isAssignableFrom(Double.class) || type.isAssignableFrom(double.class)) {
value = rs.getDouble(i);
} else if (type.isAssignableFrom(BigDecimal.class)) {
value = rs.getBigDecimal(i);
} else if (type.isAssignableFrom(Timestamp.class)) {
value = rs.getTimestamp(i);
} else if (type.isAssignableFrom(Time.class)) {
value = rs.getTime(i);
} else if (type.isAssignableFrom(Date.class)) {
value = rs.getDate(i);
} else if (type.isAssignableFrom(String.class)) {
value = rs.getString(i);
} else {
value = rs.getObject(i);
}
if (rs.wasNull() && !type.isPrimitive()) {
value = null;
}
if (type.isEnum() && value != null) {
value = Enum.valueOf((Class) type, (String) value);
}
try {
descriptor.getWriteMethod().invoke(bean, value);
} catch (IllegalAccessException e) {
throw new IllegalArgumentException(String.format("Unable to access setter for "
+ "property, %s", name), e);
} catch (InvocationTargetException e) {
throw new IllegalArgumentException(String.format("Invocation target exception trying to "
+ "invoker setter for the %s property", name), e);
} catch (NullPointerException e) {
throw new IllegalArgumentException(String.format("No appropriate method to "
+ "write value %s ", value.toString()), e);
}
}
}
return bean;
}
}

View File

@ -0,0 +1,23 @@
package com.hpcloud.persistence;
import org.skife.jdbi.v2.ResultSetMapperFactory;
import org.skife.jdbi.v2.StatementContext;
import org.skife.jdbi.v2.tweak.ResultSetMapper;
/**
* Produces {@link BeanMapper instances}.
*
* @author Jonathan Halterman
*/
@SuppressWarnings({ "rawtypes", "unchecked" })
public class BeanMapperFactory implements ResultSetMapperFactory {
@Override
public boolean accepts(Class type, StatementContext ctx) {
return true;
}
@Override
public ResultSetMapper mapperFor(Class type, StatementContext ctx) {
return new BeanMapper(type);
}
}

View File

@ -0,0 +1,32 @@
package com.hpcloud.persistence;
import java.io.Reader;
import java.sql.Clob;
/**
* Utilities for working with SQL data types.
*/
public final class DataTypes {
private DataTypes() {
}
/**
* Returns the String read from the {@code clob}, else null if the String could not be read.
*/
@SuppressWarnings("unused")
public static String toString(Clob clob) {
try {
Reader is = clob.getCharacterStream();
StringBuffer sb = new StringBuffer();
char[] buffer = new char[(int) clob.length()];
int count = 0;
while ((count = is.read(buffer)) != -1)
sb.append(buffer);
return new String(sb);
} catch (Exception e) {
return null;
}
}
}

View File

@ -0,0 +1,24 @@
package com.hpcloud.persistence;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
import org.skife.jdbi.v2.StatementContext;
import org.skife.jdbi.v2.tweak.ResultSetMapper;
/**
* Mapper that accumulates key/value results in a map.
*
* @author Jonathan Halterman
*/
public class KeyValueMapper implements ResultSetMapper<Object> {
public final Map<String, String> map = new HashMap<String, String>();
@Override
public Object map(int index, ResultSet rs, StatementContext ctx) throws SQLException {
map.put(rs.getString(1), rs.getString(2));
return null;
}
}

View File

@ -0,0 +1,40 @@
package com.hpcloud.persistence;
import java.util.Iterator;
import java.util.Map;
import org.skife.jdbi.v2.Handle;
import org.skife.jdbi.v2.Query;
/**
* Utilities for building and performing queries.
*
* @author Jonathan Halterman
*/
public final class SqlQueries {
private SqlQueries() {
}
/**
* Returns a map of key/value pairs for the given {@code keyValueSelectingSql} with the
* {@code orderedParameters}.
*
* @param handle to execute statement against
* @param keyValueSelectingSql statement that selects a key and value
* @param orderedParameters ordered parameters to set against the {@code keyValueSelectingSql}
* @return a map of key value pairs
*/
public static Map<String, String> keyValuesFor(Handle handle, String keyValueSelectingSql,
Object... orderedParameters) {
Query<Map<String, Object>> q = handle.createQuery(keyValueSelectingSql);
for (int i = 0; i < orderedParameters.length; i++)
q.bind(i, orderedParameters[i]);
KeyValueMapper mapper = new KeyValueMapper();
Iterator<Object> it = q.map(mapper).iterator();
while (it.hasNext())
it.next();
return mapper.map;
}
}

View File

@ -0,0 +1,64 @@
package com.hpcloud.persistence;
import java.util.List;
import java.util.Map;
/**
* Utilities for producing SQL statements.
*
* @author Jonathan Halterman
*/
public final class SqlStatements {
private SqlStatements() {
}
/**
* Build a select statement that produces a dataset for the given {@code keyValues}. This
* statement can be used to join expected values with these actual dataset values. Example result:
*
* <pre>
* select 'flavor_id' dimension_name, '123' value union all select 'image_id' dimension_name, '456' value
* </pre>
*/
public static String unionAllStatementFor(Map<String, String> keyValues, String keyFieldName,
String valueFieldName) {
StringBuilder sb = new StringBuilder();
int propertyCount = 0;
for (Map.Entry<String, String> kvEntry : keyValues.entrySet()) {
if (propertyCount != 0)
sb.append(" union all ");
sb.append("select '")
.append(kvEntry.getKey())
.append("' ")
.append(keyFieldName)
.append(", '")
.append(kvEntry.getValue())
.append("' ")
.append(valueFieldName);
propertyCount++;
}
return sb.toString();
}
/**
* Build a select statement that produces a dataset for the given {@code values}. This statement
* can be used to join expected values with these actual dataset values. Example result:
*
* <pre>
* select '123' action_id union select '234' action_id
* </pre>
*/
public static String unionStatementFor(List<String> values, String valueFieldName) {
StringBuilder sb = new StringBuilder();
int propertyCount = 0;
for (String value : values) {
if (propertyCount != 0)
sb.append(" union ");
sb.append("select '").append(value).append("' ").append(valueFieldName);
propertyCount++;
}
return sb.toString();
}
}

View File

@ -0,0 +1,5 @@
/**
* Persistence related types.
*/
package com.hpcloud.persistence;

View File

@ -0,0 +1,47 @@
package com.hpcloud.persistence;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import static org.testng.Assert.assertEquals;
import org.skife.jdbi.v2.DBI;
import org.testng.annotations.Test;
import com.google.inject.AbstractModule;
import com.google.inject.Module;
import com.hpcloud.util.Injector;
@Test
public class AbstractPersistenceModuleTest {
static class FakeDAO {
}
FakeDAO dao = mock(FakeDAO.class);
Module dbModule = new AbstractModule() {
@Override
@SuppressWarnings("unchecked")
protected void configure() {
DBI db = mock(DBI.class);
when(db.onDemand(any(Class.class))).thenReturn(dao);
bind(DBI.class).toInstance(db);
}
};
Module persistenceModule = new AbstractPersistenceModule() {
@Override
protected void configure() {
bindSqlType(FakeDAO.class);
}
};
/**
* Asserts that instances provided via a persistence module make use of a provided Database
* instance.
*/
public void shouldGetSqlType() {
Injector.registerModules(dbModule, persistenceModule);
assertEquals(Injector.getInstance(FakeDAO.class), dao);
}
}

View File

@ -0,0 +1,13 @@
package com.hpcloud.persistence;
import static org.testng.Assert.*;
import org.testng.annotations.Test;
@Test
public class BeanMappeTest {
public void shouldConvertPascalCaseToCamelCase() {
assertEquals(BeanMapper.pascalCaseToCamelCase("SOME_TEST"), "someTest");
assertEquals(BeanMapper.pascalCaseToCamelCase("TEST"), "test");
assertEquals(BeanMapper.pascalCaseToCamelCase("test"), "test");
}
}

View File

@ -0,0 +1,33 @@
package com.hpcloud.persistence;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.testng.Assert.*;
import org.testng.annotations.Test;
/**
* @author Jonathan Halterman
*/
@Test
public class SqlStatementsTest {
public void testUnionAllStatementFor() {
Map<String, String> dimensions = new HashMap<String, String>();
dimensions.put("flavor_id", "937");
dimensions.put("image_id", "12");
assertEquals(
SqlStatements.unionAllStatementFor(dimensions, "dimension_name", "value"),
"select 'flavor_id' dimension_name, '937' value union all select 'image_id' dimension_name, '12' value");
}
public void testUnionStatementFor() {
List<String> actions = new ArrayList<String>();
actions.add("123");
actions.add("234");
assertEquals(SqlStatements.unionStatementFor(actions, "action_id"),
"select '123' action_id union select '234' action_id");
}
}

1
java/mon-testing/.gitignore vendored Normal file
View File

@ -0,0 +1 @@
/target

65
java/mon-testing/pom.xml Normal file
View File

@ -0,0 +1,65 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.hpcloud</groupId>
<artifactId>mon-common</artifactId>
<version>${computedVersion}</version>
</parent>
<artifactId>mon-testing</artifactId>
<packaging>jar</packaging>
<dependencies>
<dependency>
<groupId>org.testng</groupId>
<artifactId>testng</artifactId>
<version>6.8.7</version>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId>
<version>1.9.5</version>
</dependency>
<dependency>
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
<version>1.3.175</version>
</dependency>
<dependency>
<groupId>org.jodah</groupId>
<artifactId>concurrentunit</artifactId>
<version>0.3.0</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<groups>foo</groups>
<excludedGroups>performance,functional,integration,database,slow</excludedGroups>
</configuration>
</plugin>
</plugins>
</build>
<profiles>
<profile>
<id>integration</id>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<groups>performance,functional,integration,database,slow</groups>
<excludedGroups>foo</excludedGroups>
</configuration>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>

View File

@ -0,0 +1,42 @@
package com.hpcloud.testing;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.fail;
public class Assert {
public static void assertArraysEqual(final double[] actual, final double[] expected) {
if (expected == actual)
return;
if (null == expected)
fail("expected a null array, but not null found.");
if (null == actual)
fail("expected not null array, but null found.");
assertEquals(actual.length, expected.length, "arrays don't have the same size.");
for (int i = 0; i < expected.length; i++) {
if (expected[i] != actual[i]) {
fail("arrays differ firstly at element [" + i + "]; " + "expected value is <" + expected[i]
+ "> but was <" + actual[i] + ">.");
}
}
}
public static void assertArraysEqual(final long[] actual, final long[] expected) {
if (expected == actual)
return;
if (null == expected)
fail("expected a null array, but not null found.");
if (null == actual)
fail("expected not null array, but null found.");
assertEquals(actual.length, expected.length, "arrays don't have the same size.");
for (int i = 0; i < expected.length; i++) {
if (expected[i] != actual[i]) {
fail("arrays differ firstly at element [" + i + "]; " + "expected value is <" + expected[i]
+ "> but was <" + actual[i] + ">.");
}
}
}
}

View File

@ -0,0 +1,21 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE suite SYSTEM "http://testng.org/testng-1.0.dtd">
<suite verbose="2" name="ps-common suite">
<test name="All" preserve-order="false" verbose="1">
<groups>
<run>
<exclude name="performance" />
<exclude name="functional" />
<exclude name="integration" />
<exclude name="database" />
</run>
</groups>
<packages>
<package name="com.hpcloud.*" />
</packages>
</test>
<listeners>
<!-- <listener class-name="com.hpcloud.LoggingListener" /> -->
</listeners>
</suite>

View File

@ -0,0 +1 @@
name: test

1
java/mon-util/.gitignore vendored Normal file
View File

@ -0,0 +1 @@
/target

104
java/mon-util/pom.xml Normal file
View File

@ -0,0 +1,104 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.hpcloud</groupId>
<artifactId>mon-common</artifactId>
<version>${computedVersion}</version>
</parent>
<artifactId>mon-util</artifactId>
<packaging>jar</packaging>
<properties>
<jackson.version>2.3.1</jackson.version>
<logback.version>1.1.1</logback.version>
</properties>
<dependencies>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>1.7.6</version>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-core</artifactId>
<version>${logback.version}</version>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>${logback.version}</version>
</dependency>
<dependency>
<groupId>com.google.inject</groupId>
<artifactId>guice</artifactId>
<version>3.0</version>
</dependency>
<dependency>
<groupId>joda-time</groupId>
<artifactId>joda-time</artifactId>
<version>2.3</version>
</dependency>
<dependency>
<groupId>com.google.code.findbugs</groupId>
<artifactId>jsr305</artifactId>
<version>2.0.0</version>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>16.0.1</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.datatype</groupId>
<artifactId>jackson-datatype-joda</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.dataformat</groupId>
<artifactId>jackson-dataformat-yaml</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-validator</artifactId>
<version>4.3.0.Final</version>
</dependency>
<!-- Test dependencies -->
<dependency>
<groupId>com.hpcloud</groupId>
<artifactId>mon-testing</artifactId>
<version>${project.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>cglib</groupId>
<artifactId>cglib</artifactId>
<version>2.2.2</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>javassist</groupId>
<artifactId>javassist</artifactId>
<version>3.12.1.GA</version>
<scope>test</scope>
</dependency>
</dependencies>
</project>

View File

@ -0,0 +1,59 @@
package com.hpcloud.util;
import com.google.common.base.Preconditions;
/**
* Utilities for working with exceptions.
*
* @author Jonathan Halterman
*/
public final class Exceptions {
/**
* A marker exception class that we look for in order to unwrap the exception into the user
* exception, to provide a cleaner stack trace.
*/
@SuppressWarnings("serial")
private static class UnhandledCheckedUserException extends RuntimeException {
public UnhandledCheckedUserException(Exception ex, String msg, Object... args) {
super(String.format(msg, args), ex);
}
public UnhandledCheckedUserException(Throwable cause) {
super(cause);
}
}
private Exceptions() {
}
/** Throw <b>any</b> exception as a RuntimeException. */
public static RuntimeException sneakyThrow(Throwable throwable) {
Preconditions.checkNotNull(throwable, "throwable");
Exceptions.<RuntimeException>sneakyThrow0(throwable);
return null;
}
/**
* Throws a new unchecked exception wrapping the {@code cause}.
*
* @throws RuntimeException wrapping the {@code cause}
*/
public static void throwUnchecked(Throwable cause) {
throw new UnhandledCheckedUserException(cause);
}
/** Returns a new unchecked exception wrapping the {@code cause}. */
public static RuntimeException uncheck(Exception ex, String msg, Object... args) {
return new UnhandledCheckedUserException(ex, msg, args);
}
/** Returns a new unchecked exception wrapping the {@code cause}. */
public static RuntimeException uncheck(Throwable cause) {
return new UnhandledCheckedUserException(cause);
}
@SuppressWarnings("unchecked")
private static <T extends Throwable> void sneakyThrow0(Throwable t) throws T {
throw (T) t;
}
}

View File

@ -0,0 +1,171 @@
package com.hpcloud.util;
import java.io.Serializable;
import com.google.common.base.Preconditions;
import com.google.inject.AbstractModule;
import com.google.inject.ConfigurationException;
import com.google.inject.Guice;
import com.google.inject.Key;
import com.google.inject.Module;
import com.google.inject.ProvisionException;
import com.google.inject.TypeLiteral;
import com.google.inject.matcher.AbstractMatcher;
import com.google.inject.matcher.Matcher;
import com.google.inject.name.Names;
/**
* Provides fully injected instances.
*
* @author Jonathan Halterman
*/
public final class Injector {
private static volatile com.google.inject.Injector injector;
/**
* Supplementary factory methods for producing type literal based Guice Matchers.
*/
public static class TypeLiteralMatchers {
private static class SubtypeOf extends AbstractMatcher<TypeLiteral<?>> implements Serializable {
private static final long serialVersionUID = 1239939466206498961L;
private final TypeLiteral<?> supertype;
/**
* @param superType
*/
public SubtypeOf(TypeLiteral<?> superType) {
super();
this.supertype = Preconditions.checkNotNull(superType, "supertype");
}
@Override
public boolean equals(Object other) {
return other instanceof SubtypeOf && ((SubtypeOf) other).supertype.equals(supertype);
}
@Override
public int hashCode() {
return 37 * supertype.hashCode();
}
@Override
public boolean matches(TypeLiteral<?> subtype) {
return (subtype.equals(supertype) || supertype.getRawType().isAssignableFrom(
subtype.getRawType()));
}
@Override
public String toString() {
return "subtypeOf(" + supertype.getRawType() + ".class)";
}
}
public static Matcher<? super TypeLiteral<?>> subtypeOf(final Class<?> superclass) {
return new SubtypeOf(TypeLiteral.get(superclass));
}
public static Matcher<? super TypeLiteral<?>> subtypeOf(final TypeLiteral<?> supertype) {
return new SubtypeOf(supertype);
}
}
private Injector() {
}
/**
* Checks to see if the {@code type} is injectable.
*
* @throws ConfigurationException if {@code type} is not injectable
*/
public static void checkInjectable(Class<?> type) {
initInjector();
injector.getBinding(type);
}
/**
* Returns an instance of the {@code type} according to the registered modules.
*
* @throws ConfigurationException if this injector cannot find or create the provider.
* @throws ProvisionException if there was a runtime failure while providing an instance.
*/
public static <T> T getInstance(Class<T> type) {
initInjector();
return injector.getInstance(type);
}
/**
* Returns an instance of the {@code type} for the {@code name} according to the registered
* modules.
*
* @throws ConfigurationException if this injector cannot find or create the provider.
* @throws ProvisionException if there was a runtime failure while providing an instance.
*/
public static <T> T getInstance(Class<T> type, String name) {
initInjector();
return injector.getInstance(Key.get(type, Names.named(name)));
}
/**
* Returns an instance of <T> for the {@code key} according to the registered modules.
*
* @throws ConfigurationException if this injector cannot find or create the provider.
* @throws ProvisionException if there was a runtime failure while providing an instance.
*/
public static <T> T getInstance(Key<T> key) {
initInjector();
return injector.getInstance(key);
}
/**
* Injects dependencies into the fields and methods of the {@code object}.
*/
public static void injectMembers(Object object) {
initInjector();
injector.injectMembers(object);
}
/**
* Returns true of a binding exists for the {@code type}, else false.
*/
public static boolean isBound(Class<?> type) {
return injector != null && injector.getExistingBinding(Key.get(type)) != null;
}
/**
* Returns true of a binding exists for the {@code type} and {@code name}, else false.
*/
public static boolean isBound(Class<?> type, String name) {
return injector != null
&& injector.getExistingBinding(Key.get(type, Names.named(name))) != null;
}
/** Registers the {@code modules} if the {@code type} is not bound, and does so atomically. */
public static synchronized void registerIfNotBound(Class<?> type, Module... modules) {
if (!isBound(type))
registerModules(modules);
}
public static synchronized void registerModules(Module... modules) {
if (injector == null)
injector = Guice.createInjector(modules);
else
injector = injector.createChildInjector(modules);
}
/**
* Resets the injector's internal module-based configuration.
*/
public static void reset() {
injector = null;
}
/** Initializes the injector with an empty module. */
private static void initInjector() {
if (injector == null)
registerModules(new AbstractModule() {
@Override
protected void configure() {
}
});
}
}

View File

@ -0,0 +1,35 @@
package com.hpcloud.util;
import java.util.ArrayList;
/**
* LIFO Stack semantics around an ArrayList for random access support. Indexed access via
* {@link #get(int)} and iteration via {@link #iterator()} is in reverse order (FIFO). Not
* threadsafe.
*
* @author Jonathan Halterman
*/
public class Stack<T> extends ArrayList<T> {
private static final long serialVersionUID = 0L;
/**
* Pushes to the top of the stack.
*/
public void push(T element) {
add(element);
}
/**
* Pops from the top of the stack.
*/
public T pop() {
return remove(size() - 1);
}
/**
* Peeks at the top of the stack.
*/
public T peek() {
return get(size() - 1);
}
}

View File

@ -0,0 +1,271 @@
package com.hpcloud.util.stats;
import javax.annotation.concurrent.NotThreadSafe;
import com.hpcloud.util.Exceptions;
import com.hpcloud.util.time.TimeResolution;
/**
* A time based sliding window containing statistics for a fixed number of slots of a fixed length.
* The window provides a fixed size view over the total number of slots in the window.
*
* @author Jonathan Halterman
*/
@NotThreadSafe
public class SlidingWindowStats {
private final TimeResolution timescale;
private final long slotWidth;
private final int numViewSlots;
private final long windowLength;
private final Slot[] slots;
private int windowBeginIndex;
private long viewEndTimestamp;
private long slotEndTimestamp;
private long windowEndTimestamp;
private static class Slot {
private long timestamp;
private Statistic stat;
private Slot(long timestamp, Statistic stat) {
this.timestamp = timestamp;
this.stat = stat;
}
@Override
public String toString() {
return timestamp + "=" + stat;
}
}
/**
* Creates a time based SlidingWindowStats containing a fixed {@code numViewSlots} representing a
* view up to the {@code viewEndTimestamp} (non-inclusive), and an additional
* {@code numFutureSlots} for timestamps beyond the window view.
*
* It is recommended to make the {@code viewEndTimestamp} one time unit more than the current time
* intended for the last view slot, so that as the window slides to the right any added values
* will slide all the way across the view.
*
* @param statType to calculate values for
* @param timeResolution to adjust timestamps with
* @param slotWidth time-based width of the slot
* @param numViewSlots the number of viewable slots
* @param numFutureSlots the number of future slots to allow values for
* @param viewEndTimestamp timestamp to end view at, non-inclusive
*/
public SlidingWindowStats(Class<? extends Statistic> statType, TimeResolution timeResolution,
long slotWidth, int numViewSlots, int numFutureSlots, long viewEndTimestamp) {
this.timescale = timeResolution;
this.slotWidth = slotWidth;
this.numViewSlots = numViewSlots;
this.windowLength = (numViewSlots + numFutureSlots) * slotWidth;
this.viewEndTimestamp = timeResolution.adjust(viewEndTimestamp);
slotEndTimestamp = this.viewEndTimestamp;
windowEndTimestamp = this.viewEndTimestamp + (numFutureSlots * slotWidth);
slots = new Slot[numViewSlots + numFutureSlots];
long timestamp = windowEndTimestamp - slotWidth;
for (int i = numViewSlots + numFutureSlots - 1; i > -1; i--, timestamp -= slotWidth)
slots[i] = createSlot(timestamp, statType);
}
/** Returns a new slot for the {@code timestamp} and {@code statType}. */
private static Slot createSlot(long timestamp, Class<? extends Statistic> statType) {
try {
return new Slot(timestamp, statType.newInstance());
} catch (Exception e) {
throw Exceptions.uncheck(e, "Failed to initialize slot");
}
}
/**
* Adds the {@code value} to the statistics for the slot associated with the {@code timestamp} and
* returns true, else returns false if the {@code timestamp} is outside of the window.
*
* @param value to add
* @param timestamp to add value for
* @return true if the value was added else false if it the {@code timestamp} was outside the
* window
*/
public boolean addValue(double value, long timestamp) {
timestamp = timescale.adjust(timestamp);
int index = indexOfTime(timestamp);
if (index == -1)
return false;
slots[index].stat.addValue(value);
return true;
}
/** Returns the number of slots in the window. */
public int getSlotCount() {
return slots.length;
}
/** Returns the window's slot width. */
public long getSlotWidth() {
return slotWidth;
}
/**
* Returns the timestamps represented by the current position of the sliding window increasing
* from oldest to newest.
*/
public long[] getTimestamps() {
long[] timestamps = new long[numViewSlots];
long timestamp = windowEndTimestamp - ((slots.length - 1) * slotWidth);
for (int i = 0; i < numViewSlots; i++, timestamp += slotWidth)
timestamps[i] = timestamp;
return timestamps;
}
/**
* Returns the value for the window slot associated with {@code timestamp}.
*
* @param timestamp to get value for
* @throws IllegalStateException if no value is within the window for the {@code timestamp}
*/
public double getValue(long timestamp) {
timestamp = timescale.adjust(timestamp);
int index = indexOfTime(timestamp);
if (index == -1)
throw new IllegalStateException(timestamp + " is outside of the window");
return slots[index].stat.value();
}
/**
* Returns the values for the window up to and including the {@code timestamp}. Values for
* uninitialized slots will be Double.NaN.
*
* @param timestamp to get value for
* @throws IllegalStateException if no value is within the window for the {@code timestamp}
*/
public double[] getValuesUpTo(long timestamp) {
timestamp = timescale.adjust(timestamp);
int endIndex = indexOfTime(timestamp);
if (endIndex == -1)
throw new IllegalStateException(timestamp + " is outside of the window");
double[] values = new double[lengthToIndex(endIndex)];
for (int i = 0, index = windowBeginIndex; i < values.length; i++, index = indexAfter(index))
if (slots[index] != null)
values[i] = slots[index].stat.value();
return values;
}
/**
* Returns the values of the sliding view increasing from oldest to newest.
*/
public double[] getViewValues() {
double[] values = new double[numViewSlots];
for (int i = 0, index = windowBeginIndex; i < numViewSlots; i++, index = indexAfter(index))
if (slots[index] != null)
values[i] = slots[index].stat.value();
return values;
}
/**
* Returns the values of the sliding window increasing from oldest to newest.
*/
public double[] getWindowValues() {
double[] values = new double[slots.length];
for (int i = 0, index = windowBeginIndex; i < slots.length; i++, index = indexAfter(index))
if (slots[index] != null)
values[i] = slots[index].stat.value();
return values;
}
/**
* Slides window's view to the slot for the {@code timestamp}, erasing values for any slots along
* the way.
*
* @param timestamp slide view to
*/
public void slideViewTo(long timestamp) {
timestamp = timescale.adjust(timestamp);
if (timestamp <= viewEndTimestamp)
return;
long timeDiff = timestamp - slotEndTimestamp;
int slotsToAdvance = (int) (timeDiff / slotWidth);
slotsToAdvance += timeDiff % slotWidth == 0 ? 0 : 1;
for (int i = 0; i < slotsToAdvance; i++) {
windowBeginIndex = indexAfter(windowBeginIndex);
Slot slot = slots[indexOf(slots.length - 1)];
slot.timestamp = windowEndTimestamp;
slot.stat.reset();
slotEndTimestamp += slotWidth;
windowEndTimestamp += slotWidth;
}
viewEndTimestamp = timestamp;
}
/**
* Returns a logical view of the sliding window with increasing timestamps from left to right.
*/
@Override
public String toString() {
final int viewSlotsToDisplay = 3;
StringBuilder b = new StringBuilder();
b.append("SlidingWindowStats [(");
int startIndex = numViewSlots > viewSlotsToDisplay ? numViewSlots - viewSlotsToDisplay : 0;
if (startIndex != 0)
b.append("... ");
int index = indexOf(startIndex);
for (int i = startIndex; i < slots.length; i++, index = indexAfter(index)) {
if (i == numViewSlots)
b.append("), ");
else if (i != startIndex)
b.append(", ");
b.append(slots[index]);
}
return b.append(']').toString();
}
/**
* Returns the physical index of the logical {@code slotIndex} calculated from the
* {@code windowBeginIndex}.
*/
int indexOf(int slotIndex) {
int offset = windowBeginIndex + slotIndex;
if (offset >= slots.length)
offset -= slots.length;
return offset;
}
/**
* Returns physical index of the slot associated with the {@code timestamp}, else -1 if the
* {@code timestamp} is outside of the window. Slots increase in time from left to right,
* wrapping.
*/
int indexOfTime(long timestamp) {
if (timestamp < windowEndTimestamp) {
long windowStartTimestamp = windowEndTimestamp - windowLength;
int timeDiff = (int) (timestamp - windowStartTimestamp);
if (timeDiff >= 0) {
int logicalIndex = (int) (timeDiff / slotWidth);
return indexOf(logicalIndex);
}
}
return -1;
}
/** Returns the length of the window up to and including the physical {@code slotIndex}. */
int lengthToIndex(int slotIndex) {
if (windowBeginIndex <= slotIndex)
return slotIndex - windowBeginIndex + 1;
else
return slotIndex + slots.length - windowBeginIndex + 1;
}
/** Returns the physical index for the slot logically positioned after the {@code index}. */
private int indexAfter(int index) {
return ++index == slots.length ? 0 : index;
}
}

View File

@ -0,0 +1,20 @@
package com.hpcloud.util.stats;
/**
* Statistic.
*
* @author Jonathan Halterman
*/
public interface Statistic {
/** Adds the {@code value} to the statistic. */
void addValue(double value);
/** Returns true if the statistic has been initialized with a value, else false. */
boolean isInitialized();
/** Resets the value of the statistic. */
void reset();
/** Returns the value of the statistic. */
double value();
}

View File

@ -0,0 +1,96 @@
package com.hpcloud.util.stats;
/**
* Statistic implementations.
*
* @author Jonathan Halterman
*/
public final class Statistics {
public static abstract class AbstractStatistic implements Statistic {
protected boolean initialized;
protected double value;
@Override
public boolean isInitialized() {
return initialized;
}
@Override
public void reset() {
initialized = false;
value = 0;
}
@Override
public String toString() {
return Double.valueOf(value()).toString();
}
@Override
public double value() {
return !initialized ? Double.NaN : value;
}
}
public static class Average extends Sum {
protected int count;
@Override
public void addValue(double value) {
super.addValue(value);
this.count++;
}
@Override
public void reset() {
super.reset();
count = 0;
}
@Override
public double value() {
return !initialized ? Double.NaN : count == 0 ? 0 : value / count;
}
}
public static class Count extends AbstractStatistic {
@Override
public void addValue(double value) {
initialized = true;
this.value++;
}
}
public static class Max extends AbstractStatistic {
@Override
public void addValue(double value) {
if (!initialized) {
initialized = true;
this.value = value;
} else if (value > this.value)
this.value = value;
}
}
public static class Min extends AbstractStatistic {
@Override
public void addValue(double value) {
if (!initialized) {
initialized = true;
this.value = value;
} else if (value < this.value)
this.value = value;
}
}
public static class Sum extends AbstractStatistic {
@Override
public void addValue(double value) {
initialized = true;
this.value += value;
}
}
private Statistics() {
}
}

View File

@ -0,0 +1,32 @@
package com.hpcloud.util.time;
/**
* Time resolution.
*
* @author Jonathan Halterman
*/
public enum TimeResolution {
ABSOLUTE {
@Override
public long adjust(long timestamp) {
return timestamp;
}
},
SECONDS {
@Override
public long adjust(long timestamp) {
return Times.roundDownToNearestSecond(timestamp);
}
},
MINUTES {
@Override
public long adjust(long timestamp) {
return Times.roundDownToNearestMinute(timestamp);
}
};
/**
* Returns the {@code timestamp} adjusted for the resolution.
*/
public abstract long adjust(long timestamp);
}

View File

@ -0,0 +1,27 @@
package com.hpcloud.util.time;
/**
* Utilities for working with times.
*
* @author Jonathan Halterman
*/
public final class Times {
private Times() {
}
/**
* Returns a timestamp in seconds for the given {@code seconds} which is rounded down to the
* nearest minute.
*/
public static long roundDownToNearestMinute(long seconds) {
return seconds / 60 * 60;
}
/**
* Returns a timestamp in milliseconds for the given {@code milliseconds} which is rounded down to
* the nearest minute.
*/
public static long roundDownToNearestSecond(long milliseconds) {
return milliseconds / 1000 * 1000;
}
}

View File

@ -0,0 +1,272 @@
package com.hpcloud.util.stats;
import static com.hpcloud.testing.Assert.assertArraysEqual;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.fail;
import org.testng.annotations.Test;
import com.hpcloud.util.time.TimeResolution;
/**
* @author Jonathan Halterman
*/
@Test
public class SlidingWindowStatsTest {
public void testIndexOf() {
SlidingWindowStats window = new SlidingWindowStats(Statistics.Sum.class,
TimeResolution.ABSOLUTE, 1, 5, 2, 5);
// Window 1, 2, 3, 4, 5, 6, 7
assertEquals(window.indexOf(0), 0);
assertEquals(window.indexOf(1), 1);
assertEquals(window.indexOf(2), 2);
assertEquals(window.indexOf(4), 4);
assertEquals(window.indexOf(6), 6);
// Window 8, 9, 10, 4, 5, 6, 7
window.slideViewTo(8);
assertEquals(window.indexOf(0), 3);
assertEquals(window.indexOf(1), 4);
assertEquals(window.indexOf(2), 5);
assertEquals(window.indexOf(4), 0);
assertEquals(window.indexOf(6), 2);
// Window 8, 9, 10, 11, 12, 6, 7
window.slideViewTo(10);
assertEquals(window.indexOf(0), 5);
assertEquals(window.indexOf(1), 6);
assertEquals(window.indexOf(2), 0);
assertEquals(window.indexOf(4), 2);
assertEquals(window.indexOf(6), 4);
// Window 15, 9, 10, 11, 12, 13, 14
window.slideViewTo(13);
assertEquals(window.indexOf(0), 1);
assertEquals(window.indexOf(1), 2);
assertEquals(window.indexOf(2), 3);
assertEquals(window.indexOf(4), 5);
assertEquals(window.indexOf(6), 0);
}
public void shouldGetTimestamps() {
SlidingWindowStats window = new SlidingWindowStats(Statistics.Sum.class,
TimeResolution.ABSOLUTE, 1, 5, 2, 10);
assertArraysEqual(window.getTimestamps(), new long[] { 6, 7, 8, 9, 10 });
window.slideViewTo(14);
assertArraysEqual(window.getTimestamps(), new long[] { 10, 11, 12, 13, 14 });
window = new SlidingWindowStats(Statistics.Average.class, TimeResolution.ABSOLUTE, 3, 3, 2, 6);
assertArraysEqual(window.getTimestamps(), new long[] { 0, 3, 6 });
window.slideViewTo(14);
assertArraysEqual(window.getTimestamps(), new long[] { 9, 12, 15 });
}
public void shouldSlideViewTo() {
SlidingWindowStats window = new SlidingWindowStats(Statistics.Average.class,
TimeResolution.ABSOLUTE, 3, 3, 2, 6);
window.slideViewTo(2);
window.slideViewTo(7);
assertEquals(window.getTimestamps(), new long[] { 3, 6, 9 });
window.slideViewTo(9);
assertArraysEqual(window.getTimestamps(), new long[] { 3, 6, 9 });
window.slideViewTo(12);
assertArraysEqual(window.getTimestamps(), new long[] { 6, 9, 12 });
window.slideViewTo(14);
assertArraysEqual(window.getTimestamps(), new long[] { 9, 12, 15 });
window.slideViewTo(18);
assertArraysEqual(window.getTimestamps(), new long[] { 12, 15, 18 });
// Attempt to slide backwards - Noop
window.slideViewTo(10);
assertArraysEqual(window.getTimestamps(), new long[] { 12, 15, 18 });
}
public void shouldAddValueAndGetWindowValues() {
SlidingWindowStats window = new SlidingWindowStats(Statistics.Average.class,
TimeResolution.ABSOLUTE, 3, 3, 2, 9);
for (int i = 0; i < 5; i++)
window.addValue(999, i * 3);
assertEquals(window.getWindowValues(), new double[] { 999, 999, 999, 999, 999 });
window.slideViewTo(12);
assertEquals(window.getWindowValues(), new double[] { 999, 999, 999, 999, Double.NaN });
window.addValue(888, 17);
assertEquals(window.getWindowValues(), new double[] { 999, 999, 999, 999, 888 });
}
public void shouldAddValueAndGetViewValues() {
SlidingWindowStats window = new SlidingWindowStats(Statistics.Average.class,
TimeResolution.ABSOLUTE, 3, 3, 2, 9);
for (int i = 0; i < 5; i++)
window.addValue(999, i * 3);
assertEquals(window.getViewValues(), new double[] { 999, 999, 999 });
window.slideViewTo(15);
assertEquals(window.getViewValues(), new double[] { 999, 999, 999 });
window.addValue(777, 15);
window.addValue(888, 18);
assertEquals(window.getViewValues(), new double[] { 999, 999, 999 });
window.slideViewTo(21);
assertEquals(window.getViewValues(), new double[] { 999, 777, 888 });
}
public void testIndexOfTime() {
SlidingWindowStats window = new SlidingWindowStats(Statistics.Average.class,
TimeResolution.ABSOLUTE, 3, 3, 2, 15);
// Slots look like 6 9 12 15 18
assertEquals(window.indexOfTime(5), -1);
assertEquals(window.indexOfTime(9), 1);
assertEquals(window.indexOfTime(10), 1);
assertEquals(window.indexOfTime(12), 2);
assertEquals(window.indexOfTime(13), 2);
assertEquals(window.indexOfTime(15), 3);
assertEquals(window.indexOfTime(17), 3);
assertEquals(window.indexOfTime(20), 4);
assertEquals(window.indexOfTime(21), -1);
window.slideViewTo(19);
// Slots like 21 24 12 15 18
assertEquals(window.indexOfTime(11), -1);
assertEquals(window.indexOfTime(12), 2);
assertEquals(window.indexOfTime(15), 3);
assertEquals(window.indexOfTime(17), 3);
assertEquals(window.indexOfTime(20), 4);
assertEquals(window.indexOfTime(22), 0);
assertEquals(window.indexOfTime(26), 1);
assertEquals(window.indexOfTime(28), -1);
window.slideViewTo(22);
// Slots like 21 24 27 15 18
assertEquals(window.indexOfTime(14), -1);
assertEquals(window.indexOfTime(19), 4);
assertEquals(window.indexOfTime(20), 4);
assertEquals(window.indexOfTime(22), 0);
assertEquals(window.indexOfTime(26), 1);
assertEquals(window.indexOfTime(28), 2);
assertEquals(window.indexOfTime(31), -1);
}
public void shouldGetValue() {
SlidingWindowStats window = new SlidingWindowStats(Statistics.Sum.class,
TimeResolution.ABSOLUTE, 5, 3, 2, 20);
// Logical window is 5 10 15
window.addValue(2, 5);
window.addValue(3, 10);
window.addValue(4, 15);
assertEquals(window.getValue(5), 2.0);
assertEquals(window.getValue(10), 3.0);
assertEquals(window.getValue(15), 4.0);
// Slide logical window to 10 15 20
window.slideViewTo(25);
window.addValue(5, 24);
assertEquals(window.getValue(10), 3.0);
assertEquals(window.getValue(15), 4.0);
assertEquals(window.getValue(20), 5.0);
}
public void testLengthToIndex() {
SlidingWindowStats window = new SlidingWindowStats(Statistics.Sum.class,
TimeResolution.ABSOLUTE, 1, 5, 2, 6);
// Window 1, 2, 3, 4, 5, 6, 7
assertEquals(window.lengthToIndex(6), 7);
assertEquals(window.lengthToIndex(4), 5);
assertEquals(window.lengthToIndex(2), 3);
assertEquals(window.lengthToIndex(1), 2);
assertEquals(window.lengthToIndex(0), 1);
// Window 8, 2, 3, 4, 5, 6, 7
window.slideViewTo(7);
assertEquals(window.lengthToIndex(6), 6);
assertEquals(window.lengthToIndex(4), 4);
assertEquals(window.lengthToIndex(2), 2);
assertEquals(window.lengthToIndex(1), 1);
assertEquals(window.lengthToIndex(0), 7);
// Window 8, 9, 10, 4, 5, 6, 7
window.slideViewTo(9);
assertEquals(window.lengthToIndex(6), 4);
assertEquals(window.lengthToIndex(4), 2);
assertEquals(window.lengthToIndex(2), 7);
assertEquals(window.lengthToIndex(1), 6);
assertEquals(window.lengthToIndex(0), 5);
// Window 8, 9, 10, 11, 12, 13, 7
window.slideViewTo(12);
assertEquals(window.lengthToIndex(6), 1);
assertEquals(window.lengthToIndex(4), 6);
assertEquals(window.lengthToIndex(2), 4);
assertEquals(window.lengthToIndex(1), 3);
assertEquals(window.lengthToIndex(0), 2);
}
public void shouldGetValuesUpTo() {
SlidingWindowStats window = new SlidingWindowStats(Statistics.Sum.class,
TimeResolution.ABSOLUTE, 5, 3, 2, 20);
// Window is 5 10 15 20 25
window.addValue(2, 5);
window.addValue(3, 10);
window.addValue(4, 15);
assertEquals(window.getValuesUpTo(20), new double[] { 2, 3, 4, Double.NaN });
assertEquals(window.getValuesUpTo(18), new double[] { 2, 3, 4 });
assertEquals(window.getValuesUpTo(12), new double[] { 2, 3 });
assertEquals(window.getValuesUpTo(9), new double[] { 2 });
// Window is 30 10 15 20 25
window.slideViewTo(22);
window.addValue(5, 22);
assertEquals(window.getValuesUpTo(22), new double[] { 3, 4, 5 });
assertEquals(window.getValuesUpTo(15), new double[] { 3, 4 });
assertEquals(window.getValuesUpTo(12), new double[] { 3 });
// Window is 30 35 15 20 25
window.slideViewTo(27);
window.addValue(6, 26);
assertEquals(window.getValuesUpTo(27), new double[] { 4, 5, 6 });
assertEquals(window.getValuesUpTo(24), new double[] { 4, 5 });
assertEquals(window.getValuesUpTo(18), new double[] { 4 });
// Assert out of bounds
try {
assertEquals(window.getValuesUpTo(9), new double[] {});
fail();
} catch (Exception expected) {
}
// Assert out of bounds
try {
assertEquals(window.getValuesUpTo(41), new double[] {});
fail();
} catch (Exception expected) {
}
}
public void testToString() {
SlidingWindowStats smallWindow = new SlidingWindowStats(Statistics.Sum.class,
TimeResolution.ABSOLUTE, 5, 3, 2, 20);
assertEquals(smallWindow.toString(),
"SlidingWindowStats [(5=NaN, 10=NaN, 15=NaN), 20=NaN, 25=NaN]");
SlidingWindowStats bigWindow = new SlidingWindowStats(Statistics.Sum.class,
TimeResolution.ABSOLUTE, 10, 10, 2, 50);
assertEquals(bigWindow.toString(),
"SlidingWindowStats [(... 20=NaN, 30=NaN, 40=NaN), 50=NaN, 60=NaN]");
}
}

View File

@ -0,0 +1,31 @@
package com.hpcloud.util.stats;
import static org.testng.Assert.assertEquals;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import com.hpcloud.util.stats.Statistics.Average;
import com.hpcloud.util.stats.Statistics.Count;
import com.hpcloud.util.stats.Statistics.Max;
import com.hpcloud.util.stats.Statistics.Min;
import com.hpcloud.util.stats.Statistics.Sum;
/**
* @author Jonathan Halterman
*/
@Test
public class StatisticsTest {
@DataProvider(name = "metricTypes")
public Object[][] createData1() {
return new Object[][] { { new Average(), 3 }, { new Sum(), 6 }, { new Min(), 2 },
{ new Max(), 4 }, { new Count(), 2 }, };
}
@Test(dataProvider = "metricTypes")
public void testStat(Statistic stat, double expectedValue) {
stat.addValue(2);
stat.addValue(4);
assertEquals(stat.value(), expectedValue, stat.getClass().getName());
}
}

View File

@ -0,0 +1,42 @@
package com.hpcloud.util.time;
import static org.testng.Assert.assertEquals;
import org.joda.time.DateTime;
import org.testng.annotations.Test;
/**
* @author Jonathan Halterman
*/
@Test
public class TimesTest {
public void shouldRoundDownToNearestMinute() {
long t = 1367971679L; // Unix time
long rounded = Times.roundDownToNearestMinute(t);
DateTime dt = new DateTime(t * 1000);
DateTime dt1 = new DateTime(rounded * 1000);
assertEquals(dt.getYear(), dt1.getYear());
assertEquals(dt.getMonthOfYear(), dt1.getMonthOfYear());
assertEquals(dt.getDayOfYear(), dt1.getDayOfYear());
assertEquals(dt.getHourOfDay(), dt1.getHourOfDay());
assertEquals(dt.getMinuteOfHour(), dt1.getMinuteOfHour());
assertEquals(dt1.getSecondOfMinute(), 0);
assertEquals(dt1.getMillisOfSecond(), 0);
}
public void shouldRoundDownToNearestSecond() {
long t = 1363982335257L; // Java time
long rounded = Times.roundDownToNearestSecond(t);
DateTime dt = new DateTime(t);
DateTime dt1 = new DateTime(rounded);
assertEquals(dt.getYear(), dt1.getYear());
assertEquals(dt.getMonthOfYear(), dt1.getMonthOfYear());
assertEquals(dt.getDayOfYear(), dt1.getDayOfYear());
assertEquals(dt.getHourOfDay(), dt1.getHourOfDay());
assertEquals(dt.getMinuteOfHour(), dt1.getMinuteOfHour());
assertEquals(dt.getSecondOfMinute(), dt1.getSecondOfMinute());
assertEquals(dt1.getMillisOfSecond(), 0);
}
}

View File

@ -0,0 +1,12 @@
<configuration debug="true" scan="true">
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%-5p [%d{ISO8601}] [%.18thread] %c: %m\n%ex
</pattern>
</encoder>
</appender>
<root level="INFO">
<appender-ref ref="STDOUT" />
</root>
</configuration>

211
java/pom.xml Normal file
View File

@ -0,0 +1,211 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.hpcloud</groupId>
<artifactId>mon-common</artifactId>
<version>${computedVersion}</version>
<url>http://hpcloud.net</url>
<packaging>pom</packaging>
<prerequisites>
<maven>3.0</maven>
</prerequisites>
<properties>
<!-- Versioning -->
<versionNumber>1.0.0</versionNumber>
<computedVersion>${versionNumber}-SNAPSHOT</computedVersion>
<skipITs>true</skipITs>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
</properties>
<scm>
<connection>scm:git:git@git.hpcloud.net:mon/mon-common.git</connection>
<developerConnection>scm:git:git@git.hpcloud.net:mon/mon-common.git</developerConnection>
</scm>
<modules>
<module>mon-collectd</module>
<module>mon-dropwizard</module>
<module>mon-model</module>
<module>mon-persistence</module>
<module>mon-testing</module>
<module>mon-util</module>
</modules>
<repositories>
<repository>
<id>nexus releases</id>
<url>http://nexus.paas.hpcloud.net:8081/nexus/content/repositories/releases</url>
</repository>
<repository>
<id>nexus 3rd party</id>
<url>http://nexus.paas.hpcloud.net:8081/nexus/content/repositories/thirdparty</url>
</repository>
<repository>
<id>nexus-snapshots</id>
<name>nexus snapshots</name>
<url>http://nexus.paas.hpcloud.net:8081/nexus/content/repositories/snapshots</url>
</repository>
</repositories>
<distributionManagement>
<repository>
<id>nexus</id>
<name>Internal Releases</name>
<url>http://nexus.paas.hpcloud.net:8081/nexus/content/repositories/releases</url>
</repository>
<snapshotRepository>
<id>nexus</id>
<name>Snapshots</name>
<uniqueVersion>false</uniqueVersion>
<url>http://nexus.paas.hpcloud.net:8081/nexus/content/repositories/snapshots</url>
</snapshotRepository>
</distributionManagement>
<profiles>
<profile>
<id>release-deploy-url-override</id>
<activation>
<property>
<name>BUILD_NUM</name>
</property>
</activation>
<properties>
<computedVersion>${versionNumber}.${BUILD_NUM}</computedVersion>
</properties>
</profile>
</profiles>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>1.7</source>
<target>1.7</target>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<excludedGroups>performance,functional,integration,database,slow</excludedGroups>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-failsafe-plugin</artifactId>
<configuration>
<groups>performance,functional,integration,database,slow</groups>
<skipTests>${skipITs}</skipTests>
</configuration>
<executions>
<execution>
<goals>
<goal>integration-test</goal>
</goals>
<configuration>
<includes>
<include>**/*.class</include>
</includes>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<executions>
<execution>
<id>attach-sources</id>
<goals>
<goal>jar</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-scm-plugin</artifactId>
<configuration>
<tag>${project.version}</tag>
</configuration>
</plugin>
<!-- Resources, Install and Deploy configuration enable variable replacement on deployment. From
http://www.axelfontaine.com/2011/01/maven-releases-on-steroids-2-preparing.html -->
<plugin>
<artifactId>maven-resources-plugin</artifactId>
<executions>
<execution>
<id>replace-pom-placeholder</id>
<phase>package</phase>
<goals>
<goal>copy-resources</goal>
</goals>
<configuration>
<resources>
<resource>
<directory>${basedir}</directory>
<includes>
<include>pom.xml</include>
</includes>
<filtering>true</filtering>
</resource>
</resources>
<outputDirectory>${project.build.directory}/pom-install-deploy-fix</outputDirectory>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<artifactId>maven-install-plugin</artifactId>
<executions>
<execution>
<id>overwrite-pom</id>
<phase>install</phase>
<goals>
<goal>install-file</goal>
</goals>
<configuration>
<packaging>pom</packaging>
<file>target/pom-install-deploy-fix/pom.xml</file>
<pomFile>target/pom-install-deploy-fix/pom.xml</pomFile>
<version>${project.version}</version>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<artifactId>maven-deploy-plugin</artifactId>
<configuration>
<updateReleaseInfo>${isRelease}</updateReleaseInfo>
</configuration>
<executions>
<execution>
<id>overwrite-pom</id>
<phase>deploy</phase>
<goals>
<goal>deploy-file</goal>
</goals>
<configuration>
<packaging>pom</packaging>
<file>target/pom-install-deploy-fix/pom.xml</file>
<pomFile>target/pom-install-deploy-fix/pom.xml</pomFile>
<repositoryId>${deployRepoId}</repositoryId>
<url>${deployRepoUrl}</url>
<version>${project.version}</version>
<updateReleaseInfo>${isRelease}</updateReleaseInfo>
<uniqueVersion>false</uniqueVersion>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>