Add influxDB Support.

This commit is contained in:
Deklan Dieterly 2014-06-16 13:26:59 -06:00
parent f6783a2db6
commit 04d2d540b2
11 changed files with 1115 additions and 567 deletions

694
pom.xml
View File

@ -1,355 +1,363 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.hpcloud</groupId>
<artifactId>mon-api</artifactId>
<version>0.1.0</version>
<url>http://github.com/hpcloud-mon/mon-api</url>
<packaging>jar</packaging>
<groupId>com.hpcloud</groupId>
<artifactId>mon-api</artifactId>
<version>0.1.0</version>
<url>http://github.com/hpcloud-mon/mon-api</url>
<packaging>jar</packaging>
<prerequisites>
<maven>3.0</maven>
</prerequisites>
<prerequisites>
<maven>3.0</maven>
</prerequisites>
<properties>
<computedVersion>${project.version}-${timestamp}-${buildNumber}</computedVersion>
<computedName>${project.artifactId}-${computedVersion}</computedName>
<mon.common.version>1.0.0-SNAPSHOT</mon.common.version>
<dropwizard.version>0.7.0</dropwizard.version>
<properties>
<computedVersion>${project.version}-${timestamp}-${buildNumber}</computedVersion>
<computedName>${project.artifactId}-${computedVersion}</computedName>
<mon.common.version>1.0.0-SNAPSHOT</mon.common.version>
<dropwizard.version>0.7.0</dropwizard.version>
<skipITs>true</skipITs>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
</properties>
<skipITs>true</skipITs>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
</properties>
<scm>
<connection>scm:git:git@github.com:hpcloud-mon/mon-api</connection>
<developerConnection>scm:git:git@github.com:hpcloud-mon/mon-api</developerConnection>
</scm>
<scm>
<connection>scm:git:git@github.com:hpcloud-mon/mon-api</connection>
<developerConnection>scm:git:git@github.com:hpcloud-mon/mon-api</developerConnection>
</scm>
<repositories>
<repository>
<id>nexus releases</id>
<url>http://nexus.paas.hpcloud.net:8081/nexus/content/repositories/releases</url>
</repository>
<repository>
<id>nexus 3rd party</id>
<url>http://nexus.paas.hpcloud.net:8081/nexus/content/repositories/thirdparty</url>
</repository>
<repository>
<id>nexus-snapshots</id>
<name>nexus snapshots</name>
<url>http://nexus.paas.hpcloud.net:8081/nexus/content/repositories/snapshots</url>
</repository>
</repositories>
<repositories>
<repository>
<id>nexus releases</id>
<url>http://nexus.paas.hpcloud.net:8081/nexus/content/repositories/releases</url>
</repository>
<repository>
<id>nexus 3rd party</id>
<url>http://nexus.paas.hpcloud.net:8081/nexus/content/repositories/thirdparty</url>
</repository>
<repository>
<id>nexus-snapshots</id>
<name>nexus snapshots</name>
<url>http://nexus.paas.hpcloud.net:8081/nexus/content/repositories/snapshots</url>
</repository>
</repositories>
<profiles>
<profile>
<id>release-deploy-url-override</id>
<activation>
<property>
<name>BUILD_NUM</name>
</property>
</activation>
<properties>
<computedVersion>${versionNumber}.${BUILD_NUM}</computedVersion>
</properties>
</profile>
</profiles>
<profiles>
<profile>
<id>release-deploy-url-override</id>
<activation>
<property>
<name>BUILD_NUM</name>
</property>
</activation>
<properties>
<computedVersion>${versionNumber}.${BUILD_NUM}</computedVersion>
</properties>
</profile>
</profiles>
<dependencies>
<dependency>
<groupId>com.hpcloud</groupId>
<artifactId>mon-model</artifactId>
<version>${mon.common.version}</version>
</dependency>
<dependency>
<groupId>com.hpcloud</groupId>
<artifactId>mon-persistence</artifactId>
<version>${mon.common.version}</version>
</dependency>
<dependency>
<groupId>com.hpcloud</groupId>
<artifactId>mon-util</artifactId>
<version>${mon.common.version}</version>
</dependency>
<dependency>
<groupId>com.hpcloud</groupId>
<artifactId>mon-kafka</artifactId>
<version>${mon.common.version}</version>
</dependency>
<dependency>
<groupId>io.dropwizard</groupId>
<artifactId>dropwizard-core</artifactId>
<version>${dropwizard.version}</version>
</dependency>
<dependency>
<groupId>io.dropwizard</groupId>
<artifactId>dropwizard-db</artifactId>
<version>${dropwizard.version}</version>
</dependency>
<dependency>
<groupId>io.dropwizard</groupId>
<artifactId>dropwizard-jdbi</artifactId>
<version>${dropwizard.version}</version>
</dependency>
<dependency>
<groupId>io.dropwizard</groupId>
<artifactId>dropwizard-assets</artifactId>
<version>${dropwizard.version}</version>
</dependency>
<dependency>
<groupId>io.dropwizard</groupId>
<artifactId>dropwizard-jersey</artifactId>
<version>${dropwizard.version}</version>
</dependency>
<dependency>
<groupId>com.vertica</groupId>
<artifactId>vertica-jdbc</artifactId>
<version>6.1.0</version>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>5.1.26</version>
</dependency>
<dependency>
<groupId>com.google.code.findbugs</groupId>
<artifactId>jsr305</artifactId>
<version>2.0.0</version>
</dependency>
<dependency>
<groupId>com.hp.csbu.cc</groupId>
<artifactId>CsMiddleware</artifactId>
<version>3.34.0</version>
</dependency>
<dependency>
<groupId>org.apache.curator</groupId>
<artifactId>curator-recipes</artifactId>
<version>2.2.0-incubating</version>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.9.2</artifactId>
<version>0.8.0</version>
<exclusions>
<exclusion>
<groupId>com.sun.jdmk</groupId>
<artifactId>jmxtools</artifactId>
</exclusion>
<exclusion>
<groupId>com.sun.jmx</groupId>
<artifactId>jmxri</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-simple</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.wordnik</groupId>
<artifactId>swagger-jaxrs_2.9.1</artifactId>
<version>1.3.1</version>
</dependency>
<dependencies>
<dependency>
<groupId>com.hpcloud</groupId>
<artifactId>mon-model</artifactId>
<version>${mon.common.version}</version>
</dependency>
<dependency>
<groupId>com.hpcloud</groupId>
<artifactId>mon-persistence</artifactId>
<version>${mon.common.version}</version>
</dependency>
<dependency>
<groupId>com.hpcloud</groupId>
<artifactId>mon-util</artifactId>
<version>${mon.common.version}</version>
</dependency>
<dependency>
<groupId>com.hpcloud</groupId>
<artifactId>mon-kafka</artifactId>
<version>${mon.common.version}</version>
</dependency>
<dependency>
<groupId>io.dropwizard</groupId>
<artifactId>dropwizard-core</artifactId>
<version>${dropwizard.version}</version>
</dependency>
<dependency>
<groupId>io.dropwizard</groupId>
<artifactId>dropwizard-db</artifactId>
<version>${dropwizard.version}</version>
</dependency>
<dependency>
<groupId>io.dropwizard</groupId>
<artifactId>dropwizard-jdbi</artifactId>
<version>${dropwizard.version}</version>
</dependency>
<dependency>
<groupId>io.dropwizard</groupId>
<artifactId>dropwizard-assets</artifactId>
<version>${dropwizard.version}</version>
</dependency>
<dependency>
<groupId>io.dropwizard</groupId>
<artifactId>dropwizard-jersey</artifactId>
<version>${dropwizard.version}</version>
</dependency>
<dependency>
<groupId>com.vertica</groupId>
<artifactId>vertica-jdbc</artifactId>
<version>6.1.0</version>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>5.1.26</version>
</dependency>
<dependency>
<groupId>com.google.code.findbugs</groupId>
<artifactId>jsr305</artifactId>
<version>2.0.0</version>
</dependency>
<dependency>
<groupId>com.hp.csbu.cc</groupId>
<artifactId>CsMiddleware</artifactId>
<version>3.34.0</version>
</dependency>
<dependency>
<groupId>org.apache.curator</groupId>
<artifactId>curator-recipes</artifactId>
<version>2.2.0-incubating</version>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.9.2</artifactId>
<version>0.8.0</version>
<exclusions>
<exclusion>
<groupId>com.sun.jdmk</groupId>
<artifactId>jmxtools</artifactId>
</exclusion>
<exclusion>
<groupId>com.sun.jmx</groupId>
<artifactId>jmxri</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-simple</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.wordnik</groupId>
<artifactId>swagger-jaxrs_2.9.1</artifactId>
<version>1.3.1</version>
</dependency>
<dependency>
<groupId>org.influxdb</groupId>
<artifactId>influxdb-java</artifactId>
<version>1.0</version>
</dependency>
<!-- Test dependencies -->
<dependency>
<groupId>com.hpcloud</groupId>
<artifactId>mon-testing</artifactId>
<version>${mon.common.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.hpcloud</groupId>
<artifactId>mon-dropwizard</artifactId>
<version>${mon.common.version}</version>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>io.dropwizard</groupId>
<artifactId>dropwizard-testing</artifactId>
<version>${dropwizard.version}</version>
<scope>test</scope>
</dependency>
</dependencies>
<!-- Test dependencies -->
<dependency>
<groupId>com.hpcloud</groupId>
<artifactId>mon-testing</artifactId>
<version>${mon.common.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.hpcloud</groupId>
<artifactId>mon-dropwizard</artifactId>
<version>${mon.common.version}</version>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>io.dropwizard</groupId>
<artifactId>dropwizard-testing</artifactId>
<version>${dropwizard.version}</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<artifactId>maven-clean-plugin</artifactId>
<version>2.5</version>
<configuration>
<filesets>
<fileset>
<directory>${project.basedir}/debs</directory>
</fileset>
</filesets>
</configuration>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>buildnumber-maven-plugin</artifactId>
<version>1.1</version>
<executions>
<execution>
<phase>validate</phase>
<goals>
<goal>create</goal>
</goals>
</execution>
</executions>
<configuration>
<doCheck>false</doCheck>
<shortRevisionLength>6</shortRevisionLength>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>1.7</source>
<target>1.7</target>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<excludedGroups>performance,functional,integration,database,slow
</excludedGroups>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-failsafe-plugin</artifactId>
<configuration>
<groups>performance,functional,integration,database,slow</groups>
<skipTests>${skipITs}</skipTests>
</configuration>
<executions>
<execution>
<goals>
<goal>integration-test</goal>
</goals>
<configuration>
<includes>
<include>**/*.class</include>
</includes>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>2.1</version>
<configuration>
<finalName>${computedName}</finalName>
<createDependencyReducedPom>true</createDependencyReducedPom>
<filters>
<filter>
<artifact>*:*</artifact>
<excludes>
<exclude>META-INF/*.SF</exclude>
<exclude>META-INF/*.DSA</exclude>
<exclude>META-INF/*.RSA</exclude>
</excludes>
</filter>
</filters>
</configuration>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<transformers>
<transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer" />
<transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
<mainClass>com.hpcloud.mon.MonApiApplication</mainClass>
</transformer>
</transformers>
<shadedArtifactAttached>true</shadedArtifactAttached>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>2.4</version>
<configuration>
<archive>
<manifest>
<addDefaultImplementationEntries>true</addDefaultImplementationEntries>
</manifest>
</archive>
</configuration>
</plugin>
<plugin>
<artifactId>jdeb</artifactId>
<groupId>org.vafer</groupId>
<version>1.0</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>jdeb</goal>
</goals>
<configuration>
<deb>${project.basedir}/debs/binaries/${computedName}.deb</deb>
<dataSet>
<data>
<type>file</type>
<src>${project.build.directory}/${computedName}.jar</src>
<dst>/opt/mon/mon-api.jar</dst>
</data>
<data>
<type>file</type>
<src>${project.basedir}/src/deb/init/mon-api.conf</src>
<dst>/etc/init/mon-api.conf</dst>
</data>
<data>
<type>file</type>
<src>${project.basedir}/src/deb/etc/mon-api-config.yml-sample
</src>
<dst>/etc/mon/mon-api-config.yml-sample</dst>
</data>
</dataSet>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<executions>
<execution>
<id>attach-sources</id>
<goals>
<goal>jar</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-scm-plugin</artifactId>
<configuration>
<tag>${project.version}</tag>
</configuration>
</plugin>
</plugins>
</build>
<build>
<plugins>
<plugin>
<artifactId>maven-clean-plugin</artifactId>
<version>2.5</version>
<configuration>
<filesets>
<fileset>
<directory>${project.basedir}/debs</directory>
</fileset>
</filesets>
</configuration>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>buildnumber-maven-plugin</artifactId>
<version>1.1</version>
<executions>
<execution>
<phase>validate</phase>
<goals>
<goal>create</goal>
</goals>
</execution>
</executions>
<configuration>
<doCheck>false</doCheck>
<shortRevisionLength>6</shortRevisionLength>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>1.7</source>
<target>1.7</target>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<excludedGroups>performance,functional,integration,database,slow
</excludedGroups>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-failsafe-plugin</artifactId>
<configuration>
<groups>performance,functional,integration,database,slow</groups>
<skipTests>${skipITs}</skipTests>
</configuration>
<executions>
<execution>
<goals>
<goal>integration-test</goal>
</goals>
<configuration>
<includes>
<include>**/*.class</include>
</includes>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>2.1</version>
<configuration>
<finalName>${computedName}</finalName>
<createDependencyReducedPom>true</createDependencyReducedPom>
<filters>
<filter>
<artifact>*:*</artifact>
<excludes>
<exclude>META-INF/*.SF</exclude>
<exclude>META-INF/*.DSA</exclude>
<exclude>META-INF/*.RSA</exclude>
</excludes>
</filter>
</filters>
</configuration>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<transformers>
<transformer
implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/>
<transformer
implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
<mainClass>com.hpcloud.mon.MonApiApplication</mainClass>
</transformer>
</transformers>
<shadedArtifactAttached>true</shadedArtifactAttached>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>2.4</version>
<configuration>
<archive>
<manifest>
<addDefaultImplementationEntries>true</addDefaultImplementationEntries>
</manifest>
</archive>
</configuration>
</plugin>
<plugin>
<artifactId>jdeb</artifactId>
<groupId>org.vafer</groupId>
<version>1.0</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>jdeb</goal>
</goals>
<configuration>
<deb>${project.basedir}/debs/binaries/${computedName}.deb</deb>
<dataSet>
<data>
<type>file</type>
<src>${project.build.directory}/${computedName}.jar</src>
<dst>/opt/mon/mon-api.jar</dst>
</data>
<data>
<type>file</type>
<src>${project.basedir}/src/deb/init/mon-api.conf</src>
<dst>/etc/init/mon-api.conf</dst>
</data>
<data>
<type>file</type>
<src>${project.basedir}/src/deb/etc/mon-api-config.yml-sample
</src>
<dst>/etc/mon/mon-api-config.yml-sample</dst>
</data>
</dataSet>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<executions>
<execution>
<id>attach-sources</id>
<goals>
<goal>jar</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-scm-plugin</artifactId>
<configuration>
<tag>${project.version}</tag>
</configuration>
</plugin>
</plugins>
</build>
</project>

View File

@ -0,0 +1,11 @@
package com.hpcloud.mon.Config;
import com.fasterxml.jackson.annotation.JsonProperty;
public class DatabaseConfiguration {
@JsonProperty
String databaseType;
public String getDatabaseType() { return databaseType; }
}

View File

@ -0,0 +1,44 @@
package com.hpcloud.mon.Config;
import com.fasterxml.jackson.annotation.JsonProperty;
public class InfluxDBConfig {
@JsonProperty
String name;
public String getName() {
return name;
}
@JsonProperty
int replicationFactor;
public int getReplicationFactor() {
return replicationFactor;
}
@JsonProperty
String url;
public String getUrl() {
return url;
}
@JsonProperty
String user;
public String getUser() {
return user;
}
@JsonProperty
String password;
public String getPassword() {
return password;
}
}

View File

@ -16,26 +16,48 @@
*/
package com.hpcloud.mon;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.hpcloud.messaging.kafka.KafkaConfiguration;
import com.hpcloud.mon.Config.DatabaseConfiguration;
import com.hpcloud.mon.Config.InfluxDBConfig;
import com.hpcloud.mon.infrastructure.middleware.MiddlewareConfiguration;
import io.dropwizard.Configuration;
import io.dropwizard.db.DataSourceFactory;
import org.hibernate.validator.constraints.NotEmpty;
import javax.validation.Valid;
import javax.validation.constraints.NotNull;
import org.hibernate.validator.constraints.NotEmpty;
import com.hpcloud.messaging.kafka.KafkaConfiguration;
import com.hpcloud.mon.infrastructure.middleware.MiddlewareConfiguration;
public class MonApiConfiguration extends Configuration {
@NotEmpty public String region;
@NotNull public Boolean accessedViaHttps;
@NotEmpty public String metricsTopic = "metrics";
@NotEmpty public String eventsTopic = "events";
@NotEmpty public String alarmStateTransitionsTopic = "alarm-state-transitions";
@NotEmpty
public String region;
@NotNull
public Boolean accessedViaHttps;
@NotEmpty
public String metricsTopic = "metrics";
@NotEmpty
public String eventsTopic = "events";
@NotEmpty
public String alarmStateTransitionsTopic = "alarm-state-transitions";
@Valid
@NotNull
public DataSourceFactory mysql;
@Valid
@NotNull
public DataSourceFactory vertica;
@Valid
@NotNull
public KafkaConfiguration kafka;
@Valid
@NotNull
public MiddlewareConfiguration middleware;
@Valid
@NotNull
public InfluxDBConfig influxDB;
@Valid
@NotNull
@JsonProperty
public DatabaseConfiguration databaseConfiguration;
@Valid @NotNull public DataSourceFactory mysql;
@Valid @NotNull public DataSourceFactory vertica;
@Valid @NotNull public KafkaConfiguration kafka;
@Valid @NotNull public MiddlewareConfiguration middleware;
}

View File

@ -61,7 +61,7 @@ public class MonApiModule extends AbstractModule {
install(new ApplicationModule());
install(new DomainModule());
install(new InfrastructureModule());
install(new InfrastructureModule(this.config));
}
@Provides

View File

@ -16,35 +16,51 @@
*/
package com.hpcloud.mon.infrastructure;
import javax.inject.Singleton;
import com.google.inject.AbstractModule;
import com.hpcloud.mon.MonApiConfiguration;
import com.hpcloud.mon.domain.model.alarm.AlarmRepository;
import com.hpcloud.mon.domain.model.alarmstatehistory.AlarmStateHistoryRepository;
import com.hpcloud.mon.domain.model.measurement.MeasurementRepository;
import com.hpcloud.mon.domain.model.metric.MetricDefinitionRepository;
import com.hpcloud.mon.domain.model.notificationmethod.NotificationMethodRepository;
import com.hpcloud.mon.domain.model.statistic.StatisticRepository;
import com.hpcloud.mon.infrastructure.persistence.AlarmStateHistoryRepositoryImpl;
import com.hpcloud.mon.infrastructure.persistence.AlarmRepositoryImpl;
import com.hpcloud.mon.infrastructure.persistence.MeasurementRepositoryImpl;
import com.hpcloud.mon.infrastructure.persistence.MetricDefinitionRepositoryImpl;
import com.hpcloud.mon.infrastructure.persistence.NotificationMethodRepositoryImpl;
import com.hpcloud.mon.infrastructure.persistence.StatisticRepositoryImpl;
import com.hpcloud.mon.infrastructure.persistence.*;
import javax.inject.Singleton;
/**
* Infrastructure layer bindings.
*/
public class InfrastructureModule extends AbstractModule {
@Override
protected void configure() {
// Bind repositories
bind(AlarmRepository.class).to(AlarmRepositoryImpl.class).in(Singleton.class);
bind(AlarmStateHistoryRepository.class).to(AlarmStateHistoryRepositoryImpl.class).in(Singleton.class);
bind(MetricDefinitionRepository.class).to(MetricDefinitionRepositoryImpl.class).in(Singleton.class);
bind(MeasurementRepository.class).to(MeasurementRepositoryImpl.class).in(Singleton.class);
bind(StatisticRepository.class).to(StatisticRepositoryImpl.class).in(Singleton.class);
bind(NotificationMethodRepository.class).to(NotificationMethodRepositoryImpl.class).in(
Singleton.class);
}
private MonApiConfiguration config;
public InfrastructureModule(MonApiConfiguration config) {
this.config = config;
}
@Override
protected void configure() {
// Bind repositories
bind(AlarmRepository.class).to(AlarmRepositoryImpl.class).in(Singleton.class);
if (config.databaseConfiguration.getDatabaseType().trim().toLowerCase().equals("vertica")) {
bind(AlarmStateHistoryRepository.class).to(AlarmStateHistoryRepositoryImpl.class).in(Singleton.class);
bind(MetricDefinitionRepository.class).to(MetricDefinitionRepositoryImpl.class).in(Singleton.class);
bind(MeasurementRepository.class).to(MeasurementRepositoryImpl.class).in(Singleton.class);
bind(StatisticRepository.class).to(StatisticRepositoryImpl.class).in(Singleton.class);
} else if (config.databaseConfiguration.getDatabaseType().trim().toLowerCase().equals("influxdb")) {
bind(AlarmStateHistoryRepository.class).to(AlarmStateHistoryInfluxDBRepositoryImpl.class).in(Singleton.class);
bind(MetricDefinitionRepository.class).to(MetricDefinitionInfluxDBRepositoryImpl.class).in(Singleton.class);
bind(MeasurementRepository.class).to(MeasurementInfluxDBRepositoryImpl.class).in(Singleton.class);
bind(StatisticRepository.class).to(StatisticInfluxDBRepositoryImpl.class).in(Singleton.class);
} else {
System.err.println("Unknown database type encountered: " + config.databaseConfiguration.getDatabaseType());
System.err.println("Supported databases are 'vertica' and 'influxdb'");
System.err.println("Check your config file.");
System.exit(1);
}
bind(NotificationMethodRepository.class).to(NotificationMethodRepositoryImpl.class).in(
Singleton.class);
}
}

View File

@ -0,0 +1,145 @@
package com.hpcloud.mon.infrastructure.persistence;
import com.google.inject.Inject;
import com.hpcloud.mon.MonApiConfiguration;
import com.hpcloud.mon.common.model.alarm.AlarmState;
import com.hpcloud.mon.domain.model.alarmstatehistory.AlarmStateHistory;
import com.hpcloud.mon.domain.model.alarmstatehistory.AlarmStateHistoryRepository;
import org.influxdb.InfluxDB;
import org.influxdb.InfluxDBFactory;
import org.influxdb.dto.Serie;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.skife.jdbi.v2.DBI;
import org.skife.jdbi.v2.Handle;
import org.skife.jdbi.v2.Query;
import org.skife.jdbi.v2.util.StringMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nullable;
import javax.inject.Named;
import java.util.*;
import java.util.concurrent.TimeUnit;
public class AlarmStateHistoryInfluxDBRepositoryImpl implements AlarmStateHistoryRepository {
private static final Logger logger = LoggerFactory.getLogger(AlarmStateHistoryInfluxDBRepositoryImpl.class);
private final MonApiConfiguration config;
private final InfluxDB influxDB;
private final DBI mysql;
private static final String FIND_ALARMS_SQL = "select distinct a.id from alarm as a "
+ "join sub_alarm sa on a.id = sa.alarm_id "
+ "left outer join sub_alarm_dimension dim on sa.id = dim.sub_alarm_id%s "
+ "where a.tenant_id = :tenantId and a.deleted_at is NULL";
@Inject
public AlarmStateHistoryInfluxDBRepositoryImpl(@Named("mysql") DBI mysql, MonApiConfiguration config) {
this.mysql = mysql;
this.config = config;
this.influxDB = InfluxDBFactory.connect(this.config.influxDB.getUrl(), this.config.influxDB.getUser(),
this.config.influxDB.getPassword());
}
@Override
public List<AlarmStateHistory> findById(String tenantId, String alarmId) {
// InfluxDB orders queries by time stamp desc by default.
String query = String.format("select alarm_id, old_state, new_state, reason, reason_data " +
"from alarm_state_history " +
"where tenant_id = '%1$s' and alarm_id = '%2$s'", tenantId, alarmId);
return queryInfluxDBForAlarmStateHistory(query);
}
@Override
public Collection<AlarmStateHistory> find(String tenantId, Map<String, String> dimensions, DateTime startTime, @Nullable DateTime endTime) {
List<String> alarmIds = null;
// Find alarm Ids for dimensions
try (Handle h = mysql.open()) {
String sql = String.format(FIND_ALARMS_SQL, SubAlarmQueries.buildJoinClauseFor(dimensions));
Query<Map<String, Object>> query = h.createQuery(sql).bind("tenantId", tenantId);
DimensionQueries.bindDimensionsToQuery(query, dimensions);
alarmIds = query.map(StringMapper.FIRST).list();
}
if (alarmIds == null || alarmIds.isEmpty()) {
return Collections.emptyList();
}
String timePart = buildTimePart(startTime, endTime);
String alarmsPart = buildAlarmsPart(alarmIds);
String query = String.format("select alarm_id, old_state, new_state, reason, reason_data " +
"from alarm_state_history " +
"where tenant_id = '%1$s' %2$s %3$s", tenantId, timePart, alarmsPart);
return queryInfluxDBForAlarmStateHistory(query);
}
private String buildAlarmsPart(List<String> alarmIds) {
String s = "";
for (String alarmId : alarmIds) {
if (s.length() > 0) {
s += " or ";
}
s += String.format(" alarm_id = '%1$s' ", alarmId);
}
if (s.length() > 0) {
s = String.format(" and (%1$s)", s);
}
return s;
}
private String buildTimePart(DateTime startTime, DateTime endTime) {
String s = "";
if (startTime != null) {
s += String.format(" and time > %1$ds", startTime.getMillis() / 1000);
}
if (endTime != null) {
s += String.format(" and time < %1$ds", endTime.getMillis() / 1000);
}
return s;
}
private List<AlarmStateHistory> queryInfluxDBForAlarmStateHistory(String query) {
logger.debug("Query string: {}", query);
List<Serie> result = this.influxDB.Query(this.config.influxDB.getName(), query, TimeUnit.SECONDS);
List<AlarmStateHistory> alarmStateHistoryList = new LinkedList<>();
// Should only be one serie -- alarm_state_history.
for (Serie serie : result) {
Object[][] valObjArryArry = serie.getPoints();
for (int i = 0; i < valObjArryArry.length; i++) {
AlarmStateHistory alarmStateHistory = new AlarmStateHistory();
// Time is always in position 0.
alarmStateHistory.setTimestamp(new DateTime(new Long((Integer) valObjArryArry[i][0]) * 1000, DateTimeZone.UTC));
// Sequence_number is always in position 1.
alarmStateHistory.setAlarmId((String) valObjArryArry[i][2]);
alarmStateHistory.setNewState(AlarmState.valueOf((String) valObjArryArry[i][3]));
alarmStateHistory.setOldState(AlarmState.valueOf((String) valObjArryArry[i][4]));
alarmStateHistory.setReason((String) valObjArryArry[i][5]);
alarmStateHistory.setReasonData((String) valObjArryArry[i][6]);
alarmStateHistoryList.add(alarmStateHistory);
}
}
return alarmStateHistoryList;
}
}

View File

@ -0,0 +1,99 @@
package com.hpcloud.mon.infrastructure.persistence;
import com.google.inject.Inject;
import com.hpcloud.mon.MonApiConfiguration;
import com.hpcloud.mon.domain.model.measurement.MeasurementRepository;
import com.hpcloud.mon.domain.model.measurement.Measurements;
import org.influxdb.InfluxDB;
import org.influxdb.InfluxDBFactory;
import org.influxdb.dto.Serie;
import org.joda.time.DateTime;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.ISODateTimeFormat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nullable;
import java.util.*;
import java.util.concurrent.TimeUnit;
public class MeasurementInfluxDBRepositoryImpl implements MeasurementRepository {
private static final Logger logger = LoggerFactory.getLogger(MeasurementInfluxDBRepositoryImpl.class);
private final MonApiConfiguration config;
private final InfluxDB influxDB;
public static final DateTimeFormatter DATETIME_FORMATTER = ISODateTimeFormat.dateTimeNoMillis();
@Inject
public MeasurementInfluxDBRepositoryImpl(MonApiConfiguration config) {
this.config = config;
this.influxDB = InfluxDBFactory.connect(this.config.influxDB.getUrl(), this.config.influxDB.getUser(),
this.config.influxDB.getPassword());
}
@Override
public Collection<Measurements> find(String tenantId, String name, Map<String, String> dimensions, DateTime startTime, @Nullable DateTime endTime) {
String dimWhereClause = "";
if (dimensions != null) {
for (String colName : dimensions.keySet()) {
dimWhereClause += String.format(" and %1$s = '%2$s'", colName, dimensions.get(colName));
}
}
String timePart = buildTimePart(startTime, endTime);
String query = String.format("select value " +
"from %1$s " +
"where tenant_id = '%2$s' %3$s %4$s",
name, tenantId, timePart, dimWhereClause);
logger.debug("Query string: {}", query);
List<Serie> result = this.influxDB.Query(this.config.influxDB.getName(), query, TimeUnit.MILLISECONDS);
Measurements measurements = new Measurements();
measurements.setName(name);
measurements.setDimensions(dimensions);
List<Object[]> valObjArryList = new LinkedList<>();
for (Serie serie : result) {
Object[][] valObjArry = serie.getPoints();
for (int i = 0; i < valObjArry.length; i++) {
Object[] objArry = new Object[3];
// sequence_number
objArry[0] = valObjArry[i][1];
// time
objArry[1] = DATETIME_FORMATTER.print((long) valObjArry[i][0]);
;
// value
objArry[2] = valObjArry[i][2];
valObjArryList.add(objArry);
}
}
measurements.setMeasurements(valObjArryList);
return Arrays.asList(measurements);
}
private String buildTimePart(DateTime startTime, DateTime endTime) {
String s = "";
if (startTime != null) {
s += String.format(" and time > %1$ds", startTime.getMillis() / 1000);
}
if (endTime != null) {
s += String.format(" and time < %1$ds", endTime.getMillis() / 1000);
}
return s;
}
}

View File

@ -0,0 +1,70 @@
package com.hpcloud.mon.infrastructure.persistence;
import com.google.inject.Inject;
import com.hpcloud.mon.MonApiConfiguration;
import com.hpcloud.mon.common.model.metric.MetricDefinition;
import com.hpcloud.mon.domain.model.metric.MetricDefinitionRepository;
import org.influxdb.InfluxDB;
import org.influxdb.InfluxDBFactory;
import org.influxdb.dto.Serie;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
public class MetricDefinitionInfluxDBRepositoryImpl implements MetricDefinitionRepository {
private static final Logger logger = LoggerFactory.getLogger(AlarmStateHistoryInfluxDBRepositoryImpl.class);
private final MonApiConfiguration config;
private final InfluxDB influxDB;
@Inject
public MetricDefinitionInfluxDBRepositoryImpl(MonApiConfiguration config) {
this.config = config;
this.influxDB = InfluxDBFactory.connect(this.config.influxDB.getUrl(), this.config.influxDB.getUser(),
this.config.influxDB.getPassword());
}
@Override
public List<MetricDefinition> find(String tenantId, String name, Map<String, String> dimensions) {
String dimWhereClause = "";
String dimColNames = "";
boolean first = true;
if (dimensions != null) {
for (String colName : dimensions.keySet()) {
if (first) {
first = false;
} else {
dimWhereClause += " and";
dimColNames += ",";
}
dimWhereClause += String.format(" %1$s = '%2$s'", colName, dimensions.get(colName));
dimColNames += colName;
}
if (dimWhereClause.length() > 0) {
dimWhereClause = String.format(" and %1$s", dimWhereClause);
}
}
String query = String.format("select %1$s from /.*/ where tenant_id = '%2$s' %3$s", dimColNames, tenantId, dimWhereClause);
logger.debug("Query string: {}", query);
List<Serie> result = this.influxDB.Query(this.config.influxDB.getName(), query, TimeUnit.SECONDS);
List<MetricDefinition> metricDefinitionList = new ArrayList<>();
for (Serie serie : result) {
MetricDefinition metricDefinition = new MetricDefinition();
metricDefinition.name = serie.getName();
metricDefinition.setDimensions(dimensions);
metricDefinitionList.add(metricDefinition);
}
return metricDefinitionList;
}
}

View File

@ -0,0 +1,140 @@
package com.hpcloud.mon.infrastructure.persistence;
import com.google.inject.Inject;
import com.hpcloud.mon.MonApiConfiguration;
import com.hpcloud.mon.domain.model.statistic.StatisticRepository;
import com.hpcloud.mon.domain.model.statistic.Statistics;
import org.influxdb.InfluxDB;
import org.influxdb.InfluxDBFactory;
import org.influxdb.dto.Serie;
import org.joda.time.DateTime;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.ISODateTimeFormat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nullable;
import java.util.*;
import java.util.concurrent.TimeUnit;
public class StatisticInfluxDBRepositoryImpl implements StatisticRepository {
private static final Logger logger = LoggerFactory.getLogger(StatisticInfluxDBRepositoryImpl.class);
private final MonApiConfiguration config;
private final InfluxDB influxDB;
public static final DateTimeFormatter DATETIME_FORMATTER = ISODateTimeFormat.dateTimeNoMillis();
@Inject
public StatisticInfluxDBRepositoryImpl(MonApiConfiguration config) {
this.config = config;
this.influxDB = InfluxDBFactory.connect(this.config.influxDB.getUrl(), this.config.influxDB.getUser(),
this.config.influxDB.getPassword());
}
@Override
public List<Statistics> find(String tenantId, String name, Map<String, String> dimensions,
DateTime startTime, @Nullable DateTime endTime,
List<String> statistics, int period) {
String statsPart = buildStatsPart(statistics);
String timePart = buildTimePart(startTime, endTime);
String dimsPart = buildDimPart(dimensions);
String periodPart = buildPeriodPart(period);
String query = String.format("select time %1$s from %2$s where tenant_id = '%3$s' %4$s %5$s %6$s",
statsPart, name, tenantId, timePart, dimsPart, periodPart);
logger.debug("Query string: {}", query);
List<Serie> result = this.influxDB.Query(this.config.influxDB.getName(), query, TimeUnit.MILLISECONDS);
List<Statistics> statisticsList = new LinkedList<Statistics>();
// Should only be one serie -- name.
for (Serie serie : result) {
Statistics stat = new Statistics();
stat.setName(serie.getName());
List<String> colNamesList = new LinkedList<>(statistics);
colNamesList.add(0, "timestamp");
stat.setColumns(colNamesList);
stat.setDimensions(dimensions);
List<List<Object>> valObjArryArry = new LinkedList<List<Object>>();
stat.setStatistics(valObjArryArry);
Object[][] pointsArryArry = serie.getPoints();
for (int i = 0; i < pointsArryArry.length; i++) {
List<Object> valObjArry = new ArrayList<>();
// First column is always time.
valObjArry.add(DATETIME_FORMATTER.print((long) pointsArryArry[i][0]));
for (int j = 1; j < statistics.size() + 1; j++) {
valObjArry.add(pointsArryArry[i][j]);
}
valObjArryArry.add(valObjArry);
}
statisticsList.add(stat);
}
return statisticsList;
}
private String buildPeriodPart(int period) {
String s = "";
if (period >= 1) {
s += String.format("group by time(%1$ds)", period);
}
return s;
}
private String buildDimPart(Map<String, String> dims) {
String s = "";
if (dims != null) {
for (String colName : dims.keySet()) {
if (s.length() > 0) {
s += " and";
}
s += String.format(" %1$s = '%2$s'", colName, dims.get(colName));
}
if (s.length() > 0) {
s = " and " + s;
}
}
return s;
}
private String buildTimePart(DateTime startTime, DateTime endTime) {
String s = "";
if (startTime != null) {
s += String.format(" and time > %1$ds", startTime.getMillis() / 1000);
}
if (endTime != null) {
s += String.format(" and time < %1$ds", endTime.getMillis() / 1000);
}
return s;
}
private String buildStatsPart(List<String> statistics) {
String s = "";
for (String statistic : statistics) {
s += ",";
if (statistic.trim().toLowerCase().equals("avg")) {
s += " mean(value)";
} else {
s += " " + statistic + "(value)";
}
}
return s;
}
}

View File

@ -16,17 +16,8 @@
*/
package com.hpcloud.mon.infrastructure.persistence;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.inject.Inject;
import javax.inject.Named;
import com.hpcloud.mon.domain.model.statistic.StatisticRepository;
import com.hpcloud.mon.domain.model.statistic.Statistics;
import org.joda.time.DateTime;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.ISODateTimeFormat;
@ -34,219 +25,221 @@ import org.skife.jdbi.v2.DBI;
import org.skife.jdbi.v2.Handle;
import org.skife.jdbi.v2.Query;
import com.hpcloud.mon.domain.model.statistic.StatisticRepository;
import com.hpcloud.mon.domain.model.statistic.Statistics;
import javax.inject.Inject;
import javax.inject.Named;
import java.sql.Timestamp;
import java.util.*;
/**
* Vertica statistic repository implementation.
*/
public class StatisticRepositoryImpl implements StatisticRepository {
public static final DateTimeFormatter DATETIME_FORMATTER = ISODateTimeFormat.dateTimeNoMillis()
.withZoneUTC();
private static final String FIND_BY_METRIC_DEF_SQL = "select dd.id, def.name, d.name as dname, d.value as dvalue "
+ "from MonMetrics.Definitions def, MonMetrics.DefinitionDimensions dd "
+ "left outer join MonMetrics.Dimensions d on d.dimension_set_id = dd.dimension_set_id%s "
+ "where def.id = dd.definition_id and def.tenant_id = :tenantId%s order by dd.id";
public static final DateTimeFormatter DATETIME_FORMATTER = ISODateTimeFormat.dateTimeNoMillis()
.withZoneUTC();
private static final String FIND_BY_METRIC_DEF_SQL = "select dd.id, def.name, d.name as dname, d.value as dvalue "
+ "from MonMetrics.Definitions def, MonMetrics.DefinitionDimensions dd "
+ "left outer join MonMetrics.Dimensions d on d.dimension_set_id = dd.dimension_set_id%s "
+ "where def.id = dd.definition_id and def.tenant_id = :tenantId%s order by dd.id";
private final DBI db;
@Inject
public StatisticRepositoryImpl(@Named("vertica") DBI db) {
this.db = db;
}
private final DBI db;
@Override
public List<Statistics> find(String tenantId, String name, Map<String, String> dimensions,
DateTime startTime, DateTime endTime, List<String> statistics, int period) {
List<Statistics> listStats = new ArrayList<>();
List<String> copyStatistics = createColumns(statistics);
@Inject
public StatisticRepositoryImpl(@Named("vertica") DBI db) {
this.db = db;
}
try (Handle h = db.open()) {
Map<byte[], Statistics> byteMap = findDefIds(h, tenantId, name, dimensions, startTime, endTime);
@Override
public List<Statistics> find(String tenantId, String name, Map<String, String> dimensions,
DateTime startTime, DateTime endTime, List<String> statistics, int period) {
List<Statistics> listStats = new ArrayList<>();
List<String> copyStatistics = createColumns(statistics);
for (byte[] bufferId : byteMap.keySet()) {
try (Handle h = db.open()) {
Map<byte[], Statistics> byteMap = findDefIds(h, tenantId, name, dimensions, startTime, endTime);
Query<Map<String, Object>> query = h.createQuery(
createQuery(period, startTime, endTime, statistics))
.bind("definition_id", bufferId)
.bind("start_time", startTime)
.bind("end_time", endTime);
for (byte[] bufferId : byteMap.keySet()) {
Query<Map<String, Object>> query = h.createQuery(
createQuery(period, startTime, endTime, statistics))
.bind("definition_id", bufferId)
.bind("start_time", startTime)
.bind("end_time", endTime);
// Execute
List<Map<String, Object>> rows = query.list();
List<Object> statisticsRow = new ArrayList<Object>();
for (Map<String, Object> row : rows) {
Double sum = (Double) row.get("sum");
Double average = (Double) row.get("avg");
Double min = (Double) row.get("min");
Double max = (Double) row.get("max");
Long count = (Long) row.get("count");
Timestamp time_stamp = (Timestamp) row.get("time_interval");
if (time_stamp != null) {
statisticsRow.add(DATETIME_FORMATTER.print(time_stamp.getTime()));
}
if (average != null) {
statisticsRow.add(average);
}
if (count != null) {
statisticsRow.add(count);
}
if (max != null) {
statisticsRow.add(max);
}
if (min != null) {
statisticsRow.add(min);
}
if (sum != null) {
statisticsRow.add(sum);
}
byteMap.get(bufferId).addValues(statisticsRow);
statisticsRow = new ArrayList<>();
}
byteMap.get(bufferId).setColumns(copyStatistics);
listStats.add(byteMap.get(bufferId));
}
}
return listStats;
}
private Map<byte[], Statistics> findDefIds(Handle h, String tenantId, String name,
Map<String, String> dimensions, DateTime startTime, DateTime endTime) {
List<byte[]> bytes = new ArrayList<>();
// Build query
StringBuilder sbWhere = new StringBuilder();
if (name != null)
sbWhere.append(" and def.name = :name");
String sql = String.format(FIND_BY_METRIC_DEF_SQL,
MetricQueries.buildJoinClauseFor(dimensions), sbWhere);
Query<Map<String, Object>> query = h.createQuery(sql)
.bind("tenantId", tenantId)
.bind("startTime", startTime);
if (name != null) {
query.bind("name", name);
}
if (endTime != null) {
query.bind("endTime", new Timestamp(endTime.getMillis()));
}
DimensionQueries.bindDimensionsToQuery(query, dimensions);
// Execute
List<Map<String, Object>> rows = query.list();
List<Object> statisticsRow = new ArrayList<Object>();
Map<byte[], Statistics> byteIdMap = new HashMap<>();
// Build results
byte[] currentId = null;
Map<String, String> dims = null;
for (Map<String, Object> row : rows) {
Double sum = (Double) row.get("sum");
Double average = (Double) row.get("avg");
Double min = (Double) row.get("min");
Double max = (Double) row.get("max");
Long count = (Long) row.get("count");
Timestamp time_stamp = (Timestamp) row.get("time_interval");
byte[] defId = (byte[]) row.get("id");
String defName = (String) row.get("name");
String demName = (String) row.get("dname");
String demValue = (String) row.get("dvalue");
if (time_stamp != null) {
statisticsRow.add(DATETIME_FORMATTER.print(time_stamp.getTime()));
}
if (defId == null || !Arrays.equals(currentId, defId)) {
currentId = defId;
dims = new HashMap<>();
dims.put(demName, demValue);
if (average != null) {
statisticsRow.add(average);
}
if (count != null) {
statisticsRow.add(count);
}
if (max != null) {
statisticsRow.add(max);
}
if (min != null) {
statisticsRow.add(min);
}
if (sum != null) {
statisticsRow.add(sum);
}
byteMap.get(bufferId).addValues(statisticsRow);
statisticsRow = new ArrayList<>();
Statistics statistics = new Statistics();
statistics.setName(defName);
statistics.setDimensions(dims);
byteIdMap.put(currentId, statistics);
} else
dims.put(demName, demValue);
}
byteMap.get(bufferId).setColumns(copyStatistics);
listStats.add(byteMap.get(bufferId));
}
}
return listStats;
}
bytes.add(currentId);
private Map<byte[], Statistics> findDefIds(Handle h, String tenantId, String name,
Map<String, String> dimensions, DateTime startTime, DateTime endTime) {
List<byte[]> bytes = new ArrayList<>();
// Build query
StringBuilder sbWhere = new StringBuilder();
if (name != null)
sbWhere.append(" and def.name = :name");
String sql = String.format(FIND_BY_METRIC_DEF_SQL,
MetricQueries.buildJoinClauseFor(dimensions), sbWhere);
Query<Map<String, Object>> query = h.createQuery(sql)
.bind("tenantId", tenantId)
.bind("startTime", startTime);
if (name != null) {
query.bind("name", name);
return byteIdMap;
}
if (endTime != null) {
query.bind("endTime", new Timestamp(endTime.getMillis()));
List<String> createColumns(List<String> list) {
List<String> copy = new ArrayList<>();
for (String string : list) {
copy.add(string);
}
Collections.sort(copy);
copy.add(0, "timestamp");
return copy;
}
DimensionQueries.bindDimensionsToQuery(query, dimensions);
private String createQuery(int period, DateTime startTime, DateTime endTime,
List<String> statistics) {
StringBuilder builder = new StringBuilder();
// Execute
List<Map<String, Object>> rows = query.list();
builder.append("SELECT " + getColumns(statistics));
Map<byte[], Statistics> byteIdMap = new HashMap<>();
if (period >= 1) {
builder.append(",MIN(time_stamp) as time_interval ");
builder.append(" FROM (Select FLOOR((EXTRACT('epoch' from time_stamp) - ");
builder.append(createOffset(period, startTime, endTime));
builder.append(" AS time_slice, time_stamp, value ");
}
// Build results
byte[] currentId = null;
Map<String, String> dims = null;
for (Map<String, Object> row : rows) {
byte[] defId = (byte[]) row.get("id");
String defName = (String) row.get("name");
String demName = (String) row.get("dname");
String demValue = (String) row.get("dvalue");
builder.append(" FROM MonMetrics.Measurements ");
builder.append("WHERE definition_dimensions_id = :definition_id ");
builder.append(createWhereClause(startTime, endTime));
if (defId == null || !Arrays.equals(currentId, defId)) {
currentId = defId;
dims = new HashMap<>();
dims.put(demName, demValue);
Statistics statistics = new Statistics();
statistics.setName(defName);
statistics.setDimensions(dims);
byteIdMap.put(currentId, statistics);
} else
dims.put(demName, demValue);
if (period >= 1) {
builder.append(") as TimeSlices group by time_slice order by time_slice");
}
return builder.toString();
}
bytes.add(currentId);
return byteIdMap;
}
List<String> createColumns(List<String> list) {
List<String> copy = new ArrayList<>();
for (String string : list) {
copy.add(string);
}
Collections.sort(copy);
copy.add(0, "timestamp");
return copy;
}
private String createQuery(int period, DateTime startTime, DateTime endTime,
List<String> statistics) {
StringBuilder builder = new StringBuilder();
builder.append("SELECT " + getColumns(statistics));
if (period >= 1) {
builder.append(",MIN(time_stamp) as time_interval ");
builder.append(" FROM (Select FLOOR((EXTRACT('epoch' from time_stamp) - ");
builder.append(createOffset(period, startTime, endTime));
builder.append(" AS time_slice, time_stamp, value ");
private String createWhereClause(DateTime startTime, DateTime endTime) {
String clause = "";
if (startTime != null && endTime != null) {
clause = "AND time_stamp >= :start_time AND time_stamp <= :end_time ";
} else if (startTime != null) {
clause = "AND time_stamp >= :start_time ";
}
return clause;
}
builder.append(" FROM MonMetrics.Measurements ");
builder.append("WHERE definition_dimensions_id = :definition_id ");
builder.append(createWhereClause(startTime, endTime));
private String createOffset(int period, DateTime startTime, DateTime endTime) {
if (period >= 1) {
builder.append(") as TimeSlices group by time_slice order by time_slice");
StringBuilder offset = new StringBuilder();
offset.append("(select mod((select extract('epoch' from time_stamp) from MonMetrics.Measurements ");
offset.append("WHERE definition_dimensions_id = :definition_id ");
offset.append(createWhereClause(startTime, endTime));
offset.append("order by time_stamp limit 1");
offset.append("),");
offset.append(period + ")))/" + period + ")");
return offset.toString();
}
return builder.toString();
}
private String createWhereClause(DateTime startTime, DateTime endTime) {
String clause = "";
if (startTime != null && endTime != null) {
clause = "AND time_stamp >= :start_time AND time_stamp <= :end_time ";
} else if (startTime != null) {
clause = "AND time_stamp >= :start_time ";
private String getColumns(List<String> statistics) {
StringBuilder buildColumns = new StringBuilder();
int size = statistics.size();
int count = 0;
for (String statistic : statistics) {
if (statistic.equals("average")) {
buildColumns.append("avg(value) as average ");
} else {
buildColumns.append(statistic + "(value) as " + statistic + " ");
}
if (size - 1 > count) {
buildColumns.append(",");
}
count++;
}
return buildColumns.toString();
}
return clause;
}
private String createOffset(int period, DateTime startTime, DateTime endTime) {
StringBuilder offset = new StringBuilder();
offset.append("(select mod((select extract('epoch' from time_stamp) from MonMetrics.Measurements ");
offset.append("WHERE definition_dimensions_id = :definition_id ");
offset.append(createWhereClause(startTime, endTime));
offset.append("order by time_stamp limit 1");
offset.append("),");
offset.append(period + ")))/" + period + ")");
return offset.toString();
}
private String getColumns(List<String> statistics) {
StringBuilder buildColumns = new StringBuilder();
int size = statistics.size();
int count = 0;
for (String statistic : statistics) {
if (statistic.equals("average")) {
buildColumns.append("avg(value) as average ");
} else {
buildColumns.append(statistic + "(value) as " + statistic + " ");
}
if (size - 1 > count) {
buildColumns.append(",");
}
count++;
}
return buildColumns.toString();
}
}